use clap::{Parser, Subcommand, ValueEnum};
use ignore::{DirEntry, WalkBuilder};
use rand::{distr::Alphanumeric, Rng};
use serde_json::{json, Value};
use std::io::BufRead;
use std::io::BufReader;
use std::io::Write;
use std::process::Command as Proc;
use std::time::{Duration, Instant};
use std::{
collections::HashMap,
fs,
fs::File,
io,
path::{Path, PathBuf},
};
use blake3;
use chacha20poly1305::{
aead::{Aead, KeyInit},
XChaCha20Poly1305, XNonce,
};
const MAGIC: &[u8; 5] = b"EENV1"; const HOOK_MARKER: &str = "# managed-by-eenv";
fn git_hooks_dir(repo_root: &Path) -> io::Result<PathBuf> {
let out = Proc::new("git")
.arg("-C").arg(repo_root)
.arg("rev-parse")
.arg("--git-path").arg("hooks")
.output()?;
if !out.status.success() {
return Err(io::Error::new(io::ErrorKind::Other, "git rev-parse failed"));
}
let p = String::from_utf8_lossy(&out.stdout).trim().to_string();
Ok(PathBuf::from(p))
}
fn install_git_hook(repo_root: &Path, force: bool) -> io::Result<()> {
let status = Proc::new("git").arg("-C").arg(repo_root).arg("rev-parse").arg("--git-dir").status()?;
if !status.success() {
return Err(io::Error::new(io::ErrorKind::NotFound, "not a git repo"));
}
let hooks_dir = git_hooks_dir(repo_root)?;
fs::create_dir_all(&hooks_dir)?;
let sh_path = hooks_dir.join("pre-commit");
let ps1_path = hooks_dir.join("pre-commit.ps1");
let exe = std::env::current_exe()?;
let exe_str = exe.to_string_lossy();
let sh_content = format!(r#"#!/usr/bin/env bash
{marker}
set -euo pipefail
exec "{exe}" pre-commit --write
"#, marker = HOOK_MARKER, exe = exe_str);
let ps1_content = format!(r#"{marker}
$ErrorActionPreference = "Stop"
& "{exe}" pre-commit --write
exit $LASTEXITCODE
"#,
marker = HOOK_MARKER, exe = exe_str);
fn maybe_write(path: &Path, desired: &str, force: bool) -> io::Result<bool> {
match fs::read_to_string(path) {
Ok(existing) => {
let ours = existing.contains(HOOK_MARKER);
if !ours && !force {
return Ok(false);
}
if existing == desired {
return Ok(false);
}
if !ours && force {
let bak = backup_path(path);
fs::copy(path, &bak).ok();
}
}
Err(_) => { }
}
write_string_atomic(path, desired)?;
Ok(true)
}
let _sh_changed = maybe_write(&sh_path, &sh_content, force)?;
#[cfg(unix)]
{
use std::os::unix::fs::PermissionsExt;
if sh_path.exists() {
let mut perm = fs::metadata(&sh_path)?.permissions();
perm.set_mode(0o755);
fs::set_permissions(&sh_path, perm)?;
}
}
let _ps1_changed = maybe_write(&ps1_path, &ps1_content, force)?;
Ok(())
}
fn backup_path(p: &Path) -> PathBuf {
let ts = std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH).unwrap().as_secs();
p.with_extension(format!("bak.{ts}"))
}
fn uninstall_git_hook(repo_root: &Path, force: bool) -> io::Result<()> {
let hooks_dir = git_hooks_dir(repo_root)?;
for name in ["pre-commit", "pre-commit.ps1"] {
let p = hooks_dir.join(name);
if !p.exists() { continue; }
if force {
let _ = fs::remove_file(&p);
continue;
}
if let Ok(existing) = fs::read_to_string(&p) {
if existing.contains(HOOK_MARKER) {
let _ = fs::remove_file(&p);
}
}
}
Ok(())
}
#[derive(ValueEnum, Clone, Debug)]
enum HookAction { Install, Uninstall }
#[derive(Parser, Debug)]
#[command(version, about, long_about = None)]
struct Cli {
#[command(subcommand)]
command: Option<Command>,
#[arg(short, long, default_value = "world")]
name: String,
#[arg(short, long, default_value_t = 1)]
count: u8,
}
#[derive(Subcommand, Debug)]
enum Command {
#[allow(non_camel_case_types)]
init,
PreCommit {
#[arg(long)]
write: bool,
},
Hook {
#[arg(value_enum)]
action: HookAction,
#[arg(long, default_value_t = false)]
force: bool,
},
Greet,
}
#[derive(Debug, Clone, Copy)]
pub struct EenvState {
pub enc: bool, pub example: bool, pub env: bool, pub eenvjson: bool, }
fn main() -> io::Result<()> {
let cli = Cli::parse();
match cli.command.unwrap_or(Command::Greet) {
Command::init => {
let cwd = std::env::current_dir()?;
let repo_root = find_repo_root(&cwd)?;
if let Err(e) = install_git_hook(&repo_root, false) {
eprintln!("[hook] WARN: could not install pre-commit hook: {e}");
}
init(&repo_root)?;
}
Command::PreCommit { write } => {
let cwd = std::env::current_dir()?;
let repo_root = find_repo_root(&cwd)?;
if let Err(e) = install_git_hook(&repo_root, false) {
eprintln!("[hook] WARN: could not ensure pre-commit hook: {e}");
}
if let Err(e) = pre_commit(&repo_root, write) {
eprintln!("[pre-commit] {e}");
std::process::exit(1); }
}
Command::Hook { action, force } => {
let cwd = std::env::current_dir()?;
let repo_root = find_repo_root(&cwd)?;
match action {
HookAction::Install => {
if let Err(e) = install_git_hook(&repo_root, force) {
eprintln!("[hook] ERROR: {e}");
std::process::exit(1);
}
println!("[hook] installed (force={force})");
}
HookAction::Uninstall => {
if let Err(e) = uninstall_git_hook(&repo_root, force) {
eprintln!("[hook] ERROR: {e}");
std::process::exit(1);
}
println!("[hook] uninstalled");
}
}
}
Command::Greet => {
for _ in 0..cli.count {
println!("Hello {}!", cli.name);
}
}
}
Ok(())
}
fn init(repo_root: &Path) -> io::Result<()> {
let state = compute_eenv_state(repo_root)?;
println!("[state]");
println!("enc = {}", state.enc);
println!("example = {}", state.example);
println!("env = {}", state.env);
println!("eenvjson = {}", state.eenvjson);
println!("-----------------");
if state.enc {
if state.eenvjson {
if let Err(e) = handle_enc_workflow(repo_root) {
eprintln!("[enc] error: {e}");
}
} else {
match bootstrap_key_and_decrypt(repo_root) {
Ok(()) => eprintln!("[enc] key accepted, config created, decrypted where possible."),
Err(e) => {
eprintln!("[enc] could not bootstrap from key: {e}");
return Err(e);
}
}
}
}
if state.env {
let (files, _t_find) = time_result("find_env_files_recursive", || find_env_files_recursive(repo_root))?;
let ((real, examples, encs), _t_split) = time_ok("split_env_files", move || split_env_files(files));
println!("--- real env files ---"); for p in &real { println!("{}", p.display()); }
println!("--- example env files ---"); for p in &examples { println!("{}", p.display()); }
println!("--- encrypted env files ---");for p in &encs { println!("{}", p.display()); }
if !state.example && !real.is_empty() {
let skeletons = extract_env_skeletons(&real)?;
if let Ok(actions) = ensure_env_examples_from_skeletons(&skeletons) {
for (src, dst, action) in actions {
let label = match action {
ExampleAction::Created => "created",
ExampleAction::Overwritten => "overwritten",
ExampleAction::SourceIsExample => "skip",
};
println!("[env-example] {:<11} {} -> {}", label, src.display(), dst.display());
}
}
}
match fix_gitignore_from_found(repo_root, &real) {
Ok(report) => {
if report.changed {
println!("[gitignore] updated: {}\n + added: {:?}\n - removed: {:?}", report.path.display(), report.added, report.removed);
} else {
println!("[gitignore] no changes needed ({})", report.path.display());
}
}
Err(e) => eprintln!("[gitignore] error: {e}"),
}
match ensure_eenv_config(repo_root) {
Ok(ConfigStatus::Created) => eprintln!("[config] created eenv.config.json"),
Ok(ConfigStatus::FixedMissingKey) => eprintln!("[config] injected key into eenv.config.json"),
Ok(ConfigStatus::RewrittenFromInvalid { backup }) => eprintln!("[config] repaired eenv.config.json (backup: {})", backup.display()),
Ok(ConfigStatus::Valid) => {}
Err(e) => eprintln!("[config] error: {e}"),
}
let produced = encrypt_envs_to_enc(repo_root, &real)?;
for p in &produced {
println!("[init] encrypted -> {}", p.display());
}
}
Ok(())
}
fn handle_enc_workflow(repo_root: &Path) -> io::Result<()> {
let key = read_eenv_key(repo_root)?;
let aead = XChaCha20Poly1305::new((&key).into());
let files = find_env_files_recursive(repo_root)?;
let (_real, _examples, encs) = split_env_files(files);
for enc_path in encs {
let dst = dec_output_path(&enc_path);
if dst.exists() {
eprintln!("[enc] skip decrypt (target exists): {}", dst.display());
continue;
}
match decrypt_file_from_enc(&aead, &enc_path, &dst) {
Ok(()) => println!("[enc] decrypted {} -> {}", enc_path.display(), dst.display()),
Err(e) => eprintln!("[enc] WARN: could not decrypt {} ({})", enc_path.display(), e),
}
}
Ok(())
}
fn enc_output_path(input: &Path) -> PathBuf {
let mut name = input.file_name().and_then(|s| s.to_str()).unwrap_or("").to_string();
name.push_str(".enc");
input.with_file_name(name)
}
fn dec_output_path(input_enc: &Path) -> PathBuf {
let name = input_enc.file_name().and_then(|s| s.to_str()).unwrap_or("");
if let Some(stripped) = name.strip_suffix(".enc") {
input_enc.with_file_name(stripped)
} else {
input_enc.with_file_name(name) }
}
fn encrypt_file_to_enc(aead: &XChaCha20Poly1305, src: &Path, dst: &Path) -> io::Result<()> {
let plaintext = fs::read(src)?;
let nonce_bytes: [u8; 24] = rand::rng().random();
let nonce = XNonce::from_slice(&nonce_bytes);
let mut out = Vec::with_capacity(MAGIC.len() + nonce_bytes.len() + plaintext.len() + 32);
out.extend_from_slice(MAGIC);
out.extend_from_slice(&nonce_bytes);
let ciphertext = aead
.encrypt(nonce, plaintext.as_ref())
.map_err(|_| io::Error::new(io::ErrorKind::Other, "encrypt failed"))?;
out.extend_from_slice(&ciphertext);
write_bytes_atomic(dst, &out)
}
fn decrypt_file_from_enc(aead: &XChaCha20Poly1305, src_enc: &Path, dst: &Path) -> io::Result<()> {
let data = fs::read(src_enc)?;
if data.len() < MAGIC.len() + 24 + 16 {
return Err(io::Error::new(io::ErrorKind::InvalidData, "enc file too short"));
}
if &data[..MAGIC.len()] != MAGIC {
return Err(io::Error::new(io::ErrorKind::InvalidData, "bad magic/version"));
}
let nonce_bytes = &data[MAGIC.len()..MAGIC.len() + 24];
let nonce = XNonce::from_slice(nonce_bytes);
let ciphertext = &data[MAGIC.len() + 24..];
let plaintext = aead
.decrypt(nonce, ciphertext)
.map_err(|_| io::Error::new(io::ErrorKind::InvalidData, "decrypt failed (wrong key?)"))?;
write_bytes_atomic(dst, &plaintext)
}
fn validate_eenv_config(repo_root: &Path) -> io::Result<bool> {
let path = eenv_config_path(repo_root);
if !path.exists() {
return Ok(false);
}
let text = fs::read_to_string(&path)?;
match serde_json::from_str::<serde_json::Value>(&text) {
Ok(v) if v.is_object() => {
let ok = matches!(v.get("key"), Some(serde_json::Value::String(s)) if !s.is_empty());
Ok(ok)
}
_ => Ok(false),
}
}
fn compute_eenv_state(repo_root: &Path) -> io::Result<EenvState> {
let files = find_env_files_recursive(repo_root)?;
let (real, examples, encs) = split_env_files(files);
let enc = !encs.is_empty();
let example = !examples.is_empty();
let env = !real.is_empty();
let eenvjson = validate_eenv_config(repo_root)?;
Ok(EenvState { enc, example, env, eenvjson })
}
fn eenv_config_path(repo_root: &Path) -> PathBuf {
repo_root.join("eenv.config.json")
}
fn generate_key() -> String {
rand::rng()
.sample_iter(&Alphanumeric)
.take(44)
.map(char::from)
.collect()
}
fn prompt_for_key() -> io::Result<String> {
print!("eenv: existing eenv.config.json is invalid.\nEnter key to use: ");
io::stdout().flush()?;
let mut s = String::new();
io::stdin().read_line(&mut s)?;
let key = s.trim().to_string();
if key.is_empty() {
return Err(io::Error::new(
io::ErrorKind::InvalidInput,
"empty key not allowed",
));
}
Ok(key)
}
fn write_string_atomic(path: &Path, contents: &str) -> io::Result<()> {
if let Some(parent) = path.parent() {
fs::create_dir_all(parent)?;
}
let tmp = path.with_extension("tmp~");
{
let mut f = File::create(&tmp)?;
f.write_all(contents.as_bytes())?;
f.sync_all()?;
}
fs::rename(tmp, path)
}
#[derive(Debug)]
enum ConfigStatus {
Created, Valid, FixedMissingKey, RewrittenFromInvalid { backup: PathBuf }, }
fn ensure_eenv_config(repo_root: &Path) -> io::Result<ConfigStatus> {
let path = eenv_config_path(repo_root);
if !path.exists() {
let key = generate_key();
let pretty = format!("{{\n \"key\": \"{}\"\n}}\n", key);
write_string_atomic(&path, &pretty)?;
return Ok(ConfigStatus::Created);
}
let text = fs::read_to_string(&path)?;
match serde_json::from_str::<Value>(&text) {
Ok(mut v) => {
if !v.is_object() {
let backup = backup_invalid_config(&path, &text)?;
let key = generate_key();
let pretty = format!("{{\n \"key\": \"{}\"\n}}\n", key);
write_string_atomic(&path, &pretty)?;
return Ok(ConfigStatus::RewrittenFromInvalid { backup });
}
let needs_key = match v.get("key") {
Some(Value::String(s)) => s.is_empty(),
_ => true,
};
if needs_key {
let key = generate_key();
v.as_object_mut()
.expect("object checked")
.insert("key".into(), Value::String(key));
let mut pretty = serde_json::to_string_pretty(&v).unwrap_or_else(|_| {
let key = generate_key();
json!({ "key": key }).to_string()
});
if !pretty.ends_with('\n') {
pretty.push('\n');
}
write_string_atomic(&path, &pretty)?;
Ok(ConfigStatus::FixedMissingKey)
} else {
Ok(ConfigStatus::Valid)
}
}
Err(_) => {
let key = prompt_for_key()?; let backup = backup_invalid_config(&path, &text)?;
let pretty = format!("{{\n \"key\": \"{}\"\n}}\n", key);
write_string_atomic(&path, &pretty)?;
Ok(ConfigStatus::RewrittenFromInvalid { backup })
}
}
}
fn backup_invalid_config(path: &Path, contents: &str) -> io::Result<PathBuf> {
use std::time::{SystemTime, UNIX_EPOCH};
let ts = SystemTime::now()
.duration_since(UNIX_EPOCH)
.unwrap()
.as_secs();
let backup = path.with_extension(format!("bak.{ts}"));
write_string_atomic(&backup, contents)?;
Ok(backup)
}
fn find_env_files_recursive(root: &Path) -> io::Result<Vec<PathBuf>> {
let mut builder = WalkBuilder::new(root);
builder
.hidden(true) .follow_links(false)
.standard_filters(false) .parents(false) .add_custom_ignore_filename(".eenvignore") .filter_entry(|d| {
if d.depth() == 0 {
return true;
}
true
});
let mut out = Vec::new();
for result in builder.build() {
let dent = match result {
Ok(d) => d,
Err(err) => {
eprintln!("walk error: {err}");
continue;
}
};
if is_env_file(&dent) {
let abs = dent
.path()
.canonicalize()
.unwrap_or_else(|_| dent.path().to_path_buf());
out.push(abs);
}
}
out.sort();
out.dedup();
Ok(out)
}
fn split_env_files(mut files: Vec<PathBuf>) -> (Vec<PathBuf>, Vec<PathBuf>, Vec<PathBuf>) {
files.sort();
files.dedup();
let mut real = Vec::new();
let mut examples = Vec::new();
let mut encs = Vec::new();
for path in files {
if let Some(name) = path.file_name().and_then(|s| s.to_str()) {
if name.ends_with(".example") {
examples.push(path);
} else if name.ends_with(".enc") {
encs.push(path);
} else {
real.push(path);
}
}
}
(real, examples, encs)
}
fn is_env_file(d: &DirEntry) -> bool {
if !d.file_type().map(|t| t.is_file()).unwrap_or(false) {
return false;
}
match d.path().file_name().and_then(|s| s.to_str()) {
Some(name) if name.starts_with(".env") => true,
_ => false,
}
}
fn extract_env_skeletons(files: &[PathBuf]) -> io::Result<HashMap<PathBuf, Vec<String>>> {
let mut out: HashMap<PathBuf, Vec<String>> = HashMap::new();
for path in files {
let file = File::open(path)?;
let reader = BufReader::new(file);
let mut lines = Vec::new();
for line in reader.lines() {
let line = line?;
let trimmed = line.trim();
if trimmed.is_empty() {
lines.push(String::new());
} else if trimmed.starts_with('#') {
lines.push(line);
} else if let Some((key, _value)) = line.split_once('=') {
lines.push(format!("{}=", key.trim()));
} else {
lines.push(line);
}
}
out.insert(path.clone(), lines);
}
Ok(out)
}
fn example_path_for(path: &Path) -> PathBuf {
let file_name = path
.file_name()
.and_then(|s| s.to_str())
.unwrap_or_default();
if file_name.ends_with(".example") {
return path.to_path_buf();
}
let mut name = file_name.to_string();
name.push_str(".example");
path.with_file_name(name)
}
#[derive(Debug)]
enum ExampleAction {
Created, Overwritten, SourceIsExample, }
fn ensure_env_examples_from_skeletons(
skeletons: &std::collections::HashMap<PathBuf, Vec<String>>,
) -> io::Result<Vec<(PathBuf, PathBuf, ExampleAction)>> {
let mut results = Vec::new();
for (real_path, lines) in skeletons {
let target = example_path_for(real_path);
if real_path == &target {
results.push((real_path.clone(), target, ExampleAction::SourceIsExample));
continue;
}
let existed = target.exists();
write_lines_atomic(&target, lines)?;
let action = if existed {
ExampleAction::Overwritten
} else {
ExampleAction::Created
};
results.push((real_path.clone(), target, action));
}
Ok(results)
}
fn write_lines_atomic(path: &Path, lines: &[String]) -> io::Result<()> {
if let Some(parent) = path.parent() {
fs::create_dir_all(parent)?;
}
let mut buf = lines.join("\n");
if !buf.ends_with('\n') {
buf.push('\n');
}
let tmp = path.with_extension("example.tmp~");
{
let mut f = File::create(&tmp)?;
f.write_all(buf.as_bytes())?;
f.sync_all()?; }
fs::rename(tmp, path)
}
fn find_repo_root(start: &Path) -> io::Result<PathBuf> {
let mut cur = start.canonicalize()?;
loop {
if cur.join(".git").exists() {
return Ok(cur);
}
let Some(parent) = cur.parent() else {
return Ok(start.to_path_buf());
};
cur = parent.to_path_buf();
}
}
fn banned_env_ignores() -> &'static [&'static str] {
&[
".env.example",
".env*.example",
".env.*.example",
"*.env.example",
".env.enc",
".env*.enc",
".env.*.enc",
"*.env.enc",
]
}
fn pattern_core(line: &str) -> &str {
let mut core = line;
if let Some(hash) = line.find('#') {
core = &line[..hash];
}
core.trim()
}
#[derive(Debug)]
pub struct GitignoreEdit {
pub path: PathBuf,
pub added: Vec<String>,
pub removed: Vec<String>,
pub changed: bool,
}
fn to_gitignore_rel_pattern(abs: &std::path::Path, root: &std::path::Path) -> Option<String> {
let rel = abs.strip_prefix(root).ok()?;
let s = rel.to_string_lossy().replace('\\', "/");
let s = s.replace(' ', r"\ ");
Some(if s.is_empty() { String::from("/") } else { s })
}
fn fix_gitignore_from_found(
project_root: &std::path::Path,
real_env_files: &[std::path::PathBuf],
) -> std::io::Result<GitignoreEdit> {
let root = find_repo_root(project_root)?;
let path = root.join(".gitignore");
let original = if path.exists() {
std::fs::read_to_string(&path)?
} else {
String::new()
};
let mut lines: Vec<String> = if original.is_empty() {
Vec::new()
} else {
original.lines().map(|s| s.to_string()).collect()
};
use std::collections::{BTreeSet, HashSet};
let banned: HashSet<&'static str> = banned_env_ignores().iter().copied().collect();
let mut removed = Vec::new();
lines.retain(|line| {
let core = pattern_core(line);
if !core.is_empty() && banned.contains(core) {
removed.push(line.clone());
false
} else {
true
}
});
let mut required: BTreeSet<String> = BTreeSet::new(); for abs in real_env_files {
let Some(fname) = abs.file_name().and_then(|s| s.to_str()) else { continue; };
if fname.ends_with(".example") || fname.ends_with(".enc") {
continue; }
if let Some(pat) = to_gitignore_rel_pattern(abs, &root) {
required.insert(pat);
}
}
required.insert("eenv.config.json".to_string());
let existing: HashSet<String> = lines.iter().map(|l| pattern_core(l).to_string()).collect();
let mut added = Vec::new();
let missing: Vec<String> = required
.into_iter()
.filter(|r| !existing.contains(r))
.collect();
if !missing.is_empty() {
if !lines.is_empty() && !lines.last().unwrap().trim().is_empty() {
lines.push(String::new());
}
lines.push("# added by eenv".to_string());
for m in &missing {
lines.push(m.clone());
}
added.extend(missing);
}
let new_text = {
let mut s = lines.join("\n");
if !s.ends_with('\n') {
s.push('\n');
}
s
};
let changed = new_text != original;
if changed {
let tmp = path.with_extension("tmp~");
{
let mut f = std::fs::File::create(&tmp)?;
use std::io::Write;
f.write_all(new_text.as_bytes())?;
f.sync_all()?;
}
std::fs::rename(tmp, &path)?;
}
Ok(GitignoreEdit {
path,
added,
removed,
changed,
})
}
fn pre_commit(repo_root: &Path, write: bool) -> io::Result<()> {
let staged = staged_files(repo_root)?;
let mut offenders: Vec<PathBuf> = Vec::new();
for p in &staged {
if let Some(name) = p.file_name().and_then(|s| s.to_str()) {
if name.starts_with(".env") && !name.ends_with(".example") && !name.ends_with(".enc") {
offenders.push(p.clone());
}
}
}
if !offenders.is_empty() {
eprintln!("[pre-commit] ❌ refusing to commit raw .env files:");
for p in offenders {
eprintln!(" - {}", p.display());
}
eprintln!("Hint: encrypt them to .env*.enc or add them to .gitignore.");
return Err(io::Error::new(io::ErrorKind::Other, "raw .env staged"));
}
let (files, _t_find) = time_result("find_env_files_recursive", || {
find_env_files_recursive(repo_root)
})?;
let ((real, examples, encs), _t_split) = time_ok("split_env_files", || split_env_files(files));
if write && !real.is_empty() {
let skeletons = extract_env_skeletons(&real)?;
match ensure_env_examples_from_skeletons(&skeletons) {
Ok(actions) => {
let mut to_add: Vec<PathBuf> = Vec::new();
for (_src, dst, action) in actions {
match action {
ExampleAction::Created | ExampleAction::Overwritten => to_add.push(dst),
ExampleAction::SourceIsExample => {}
}
}
if !to_add.is_empty() {
git_add(repo_root, &to_add)?;
}
}
Err(e) => eprintln!("[pre-commit] example gen error: {e}"),
}
}
if write && !real.is_empty() {
match fix_gitignore_from_found(repo_root, &real) {
Ok(report) => {
if report.changed {
git_add(repo_root, &[report.path])?;
}
}
Err(e) => eprintln!("[pre-commit] gitignore fix error: {e}"),
}
}
if write && !real.is_empty() {
match ensure_eenv_config(repo_root) {
Ok(ConfigStatus::Created) => eprintln!("[config] created eenv.config.json"),
Ok(ConfigStatus::FixedMissingKey) => eprintln!("[config] injected key into eenv.config.json"),
Ok(ConfigStatus::RewrittenFromInvalid { backup }) => eprintln!("[config] repaired eenv.config.json (backup: {})", backup.display()),
Ok(ConfigStatus::Valid) => {}
Err(e) => eprintln!("[config] error: {e}"),
}
let produced = encrypt_envs_to_enc(repo_root, &real)?;
if !produced.is_empty() {
git_add(repo_root, &produced)?;
}
}
Ok(())
}
fn aead_from_key_str(key_str: &str) -> io::Result<XChaCha20Poly1305> {
if key_str.trim().is_empty() {
return Err(io::Error::new(io::ErrorKind::InvalidInput, "empty key"));
}
let hash = blake3::hash(key_str.as_bytes());
Ok(XChaCha20Poly1305::new(hash.as_bytes().into()))
}
fn write_eenv_config_with_key(repo_root: &Path, key_str: &str) -> io::Result<()> {
let path = eenv_config_path(repo_root);
let pretty = format!("{{\n \"key\": \"{}\"\n}}\n", key_str);
write_string_atomic(&path, &pretty)
}
fn ensure_gitignore_has_config(repo_root: &Path) -> io::Result<()> {
let root = find_repo_root(repo_root)?;
let path = root.join(".gitignore");
let original = if path.exists() { fs::read_to_string(&path)? } else { String::new() };
let mut lines: Vec<String> = if original.is_empty() { Vec::new() } else { original.lines().map(|s| s.to_string()).collect() };
let already = lines.iter().any(|l| {
let core = pattern_core(l);
core == "eenv.config.json"
});
if !already {
if !lines.is_empty() && !lines.last().unwrap().trim().is_empty() {
lines.push(String::new());
}
lines.push("# added by eenv".to_string());
lines.push("eenv.config.json".to_string());
let mut s = lines.join("\n");
if !s.ends_with('\n') { s.push('\n'); }
let tmp = path.with_extension("tmp~");
{
let mut f = File::create(&tmp)?;
f.write_all(s.as_bytes())?;
f.sync_all()?;
}
fs::rename(tmp, &path)?;
}
Ok(())
}
fn bootstrap_key_and_decrypt(repo_root: &Path) -> io::Result<()> {
let key_str = prompt_for_key()?;
let aead = aead_from_key_str(&key_str)?;
let files = find_env_files_recursive(repo_root)?;
let (_real, _examples, encs) = split_env_files(files);
if encs.is_empty() {
return Err(io::Error::new(io::ErrorKind::NotFound, "no .env*.enc files found"));
}
let mut validated = false;
for enc_path in &encs {
let dst = dec_output_path(enc_path);
if dst.exists() {
let tmp = dst.with_extension("validate.tmp~");
match decrypt_file_from_enc(&aead, enc_path, &tmp) {
Ok(()) => {
let _ = fs::remove_file(&tmp);
validated = true;
break;
}
Err(_) => {
let _ = fs::remove_file(&tmp);
continue;
}
}
} else {
if decrypt_file_from_enc(&aead, enc_path, &dst).is_ok() {
validated = true;
break;
} else {
let _ = fs::remove_file(&dst);
}
}
}
if !validated {
return Err(io::Error::new(io::ErrorKind::InvalidData, "provided key did not decrypt any .env*.enc"));
}
write_eenv_config_with_key(repo_root, &key_str)?;
ensure_gitignore_has_config(repo_root)?;
handle_enc_workflow(repo_root)
}
fn staged_files(repo_root: &Path) -> io::Result<Vec<PathBuf>> {
let out = Proc::new("git")
.arg("-C")
.arg(repo_root)
.arg("diff")
.arg("--name-only")
.arg("--cached")
.arg("-z")
.output()?;
if !out.status.success() {
return Err(io::Error::new(io::ErrorKind::Other, "git diff failed"));
}
let mut files = Vec::new();
for name in out.stdout.split(|b| *b == 0u8) {
if name.is_empty() {
continue;
}
let s = String::from_utf8_lossy(name);
files.push(repo_root.join(s.as_ref()));
}
Ok(files)
}
fn git_add(repo_root: &Path, paths: &[PathBuf]) -> io::Result<()> {
if paths.is_empty() {
return Ok(());
}
let mut cmd = Proc::new("git");
cmd.arg("-C").arg(repo_root).arg("add").arg("--");
for p in paths {
cmd.arg(p);
}
let status = cmd.status()?;
if !status.success() {
return Err(io::Error::new(io::ErrorKind::Other, "git add failed"));
}
Ok(())
}
fn encrypt_envs_to_enc(repo_root: &Path, real_envs: &[PathBuf]) -> io::Result<Vec<PathBuf>> {
let key = read_eenv_key(repo_root)?;
let aead = XChaCha20Poly1305::new((&key).into());
let mut produced = Vec::new();
for src in real_envs {
let Some(name) = src.file_name().and_then(|s| s.to_str()) else { continue; };
if name.ends_with(".example") || name.ends_with(".enc") {
continue;
}
let dst = enc_output_path(src);
encrypt_file_to_enc(&aead, src, &dst)?;
println!("[enc] wrote {}", dst.display());
produced.push(dst);
}
Ok(produced)
}
fn read_eenv_key(repo_root: &Path) -> io::Result<[u8; 32]> {
let cfg_path = eenv_config_path(repo_root);
let text = fs::read_to_string(&cfg_path)?;
let v: serde_json::Value = serde_json::from_str(&text)
.map_err(|e| io::Error::new(io::ErrorKind::InvalidData, format!("bad eenv.config.json: {e}")))?;
let key_str = v.get("key")
.and_then(|x| x.as_str())
.ok_or_else(|| io::Error::new(io::ErrorKind::InvalidData, "eenv.config.json missing non-empty \"key\""))?
.trim()
.to_string();
if key_str.is_empty() {
return Err(io::Error::new(io::ErrorKind::InvalidData, "empty key"));
}
let hash = blake3::hash(key_str.as_bytes());
Ok(*hash.as_bytes()) }
fn write_bytes_atomic(path: &Path, bytes: &[u8]) -> io::Result<()> {
if let Some(parent) = path.parent() {
fs::create_dir_all(parent)?;
}
let tmp = path.with_extension("tmp~");
{
let mut f = File::create(&tmp)?;
f.write_all(bytes)?;
f.sync_all()?;
}
fs::rename(tmp, path)
}
fn time_result<F, T, E>(label: &str, f: F) -> Result<(T, Duration), E>
where
F: FnOnce() -> Result<T, E>,
{
let start = Instant::now();
let out = f()?;
let dt = start.elapsed();
eprintln!("[time] {label}: {:.3} ms", dt.as_secs_f64() * 1000.0);
Ok((out, dt))
}
fn time_ok<F, T>(label: &str, f: F) -> (T, Duration)
where
F: FnOnce() -> T,
{
let start = Instant::now();
let out = f();
let dt = start.elapsed();
eprintln!("[time] {label}: {:.3} ms", dt.as_secs_f64() * 1000.0);
(out, dt)
}