use regex::Regex;
use std::collections::{HashMap, HashSet};
use std::fs;
use std::path::{Path, PathBuf};
use std::sync::OnceLock;
use walkdir::{DirEntry, WalkDir};
const CATEGORY_ORDER: &[&str] = &[
"Next.js Configuration",
"Node.js Configuration",
"Application Configuration",
"Database Configuration",
"Supabase Configuration",
"Authentication",
"Better Auth Configuration",
"Email Configuration",
"MinIO Storage Configuration",
"S3 Storage Configuration",
"DigitalOcean Configuration",
"AWS Configuration",
"Stripe Configuration",
"Mollie Payment Configuration",
"Google Services",
"Apple Services",
"Microsoft Services",
"GitHub Integration",
"API Configuration",
"Redis Configuration",
"Webhook Configuration",
"Typesense Configuration",
"Passkey Configuration",
"PDF Configuration",
"Presigned URL Configuration",
"Other",
];
const CATEGORY_PREFIXES: &[(&str, &str)] = &[
("NEXT_PUBLIC_", "Next.js Configuration"),
("NEXT_", "Next.js Configuration"),
("NODE_", "Node.js Configuration"),
("DATABASE_", "Database Configuration"),
("SUPABASE_", "Supabase Configuration"),
("AUTH_", "Authentication"),
("BETTER_AUTH_", "Better Auth Configuration"),
("SMTP_", "Email Configuration"),
("EMAIL_", "Email Configuration"),
("RESEND_", "Email Configuration"),
("AWS_", "AWS Configuration"),
("S3_", "S3 Storage Configuration"),
("MINIO_", "MinIO Storage Configuration"),
("DIGITALOCEAN_", "DigitalOcean Configuration"),
("STRIPE_", "Stripe Configuration"),
("MOLLIE_", "Mollie Payment Configuration"),
("GOOGLE_", "Google Services"),
("APPLE_", "Apple Services"),
("MICROSOFT_", "Microsoft Services"),
("GITHUB_", "GitHub Integration"),
("API_", "API Configuration"),
("APP_", "Application Configuration"),
("REDIS_", "Redis Configuration"),
("WEBHOOK_", "Webhook Configuration"),
("TYPESENSE_", "Typesense Configuration"),
("PASSKEY_", "Passkey Configuration"),
("PDF_", "PDF Configuration"),
("PRESIGNED_", "Presigned URL Configuration"),
];
pub fn generate_env_default(project_root: &Path, output: Option<&str>) -> Result<(), String> {
let keys = get_required_env_vars(project_root)?;
if keys.is_empty() {
println!("No environment variables detected during scanning.");
return Ok(());
}
let file_path = output
.map(PathBuf::from)
.map(|path| {
if path.is_relative() {
project_root.join(path)
} else {
path
}
})
.unwrap_or_else(|| project_root.join(".env.default"));
let mut sorted_keys: Vec<String> = keys.into_iter().collect();
sorted_keys.sort();
let content = sorted_keys
.iter()
.map(|key| format!("{}=\n", key))
.collect::<String>();
fs::write(&file_path, content)
.map_err(|e| format!("Failed to write {}: {}", file_path.display(), e))?;
println!(
"Generated {} key(s) into {}",
sorted_keys.len(),
file_path.display()
);
Ok(())
}
pub fn get_required_env_vars(project_root: &Path) -> Result<HashSet<String>, String> {
let walker = WalkDir::new(project_root).into_iter();
let mut vars = HashSet::new();
for entry in walker.filter_entry(|e| !should_ignore(e)) {
let entry = entry.map_err(|e| {
let path_display = e
.path()
.map(|p| p.display().to_string())
.unwrap_or_else(|| "unknown".to_string());
format!("Failed to traverse {}: {}", path_display, e)
})?;
if !should_analyze(&entry) {
continue;
}
if let Ok(text) = fs::read_to_string(entry.path()) {
for candidate in capture_candidates(&text) {
vars.insert(candidate);
}
}
}
Ok(vars)
}
fn should_ignore(entry: &DirEntry) -> bool {
if !entry.file_type().is_dir() {
return false;
}
let name = entry.file_name().to_string_lossy();
matches!(
name.as_ref(),
".git" | ".github" | "node_modules" | "target" | "dist" | "build" | ".xbp" | "vendor"
)
}
fn should_analyze(entry: &DirEntry) -> bool {
if entry.file_type().is_dir() {
return false;
}
let name = entry.file_name().to_string_lossy().to_lowercase();
if name.eq("dockerfile") || name.starts_with("docker-compose") {
return true;
}
if let Some(ext) = entry.path().extension().and_then(|e| e.to_str()) {
matches!(
ext.to_lowercase().as_str(),
"rs" | "py"
| "ts"
| "tsx"
| "js"
| "jsx"
| "json"
| "yaml"
| "yml"
| "toml"
| "env"
| "sh"
| "bash"
| "zsh"
)
} else {
false
}
}
fn capture_candidates(text: &str) -> HashSet<String> {
let mut found = HashSet::new();
for regex in regexes().iter() {
for capture in regex.captures_iter(text) {
if let Some(name) = capture.get(1) {
found.insert(name.as_str().to_string());
}
}
}
found
}
fn regexes() -> &'static [Regex] {
static REGEXES: OnceLock<Vec<Regex>> = OnceLock::new();
REGEXES.get_or_init(|| {
vec["']\s*\)"#).unwrap(),
Regex::new(r#"process\.env\.([A-Z0-9_]+)"#).unwrap(),
Regex::new(r#"process\.env\[\s*["']([A-Z0-9_]+)["']\s*\]"#).unwrap(),
Regex::new(r#"os\.environ\[\s*["']([A-Z0-9_]+)["']\s*\]"#).unwrap(),
Regex::new(r#"os\.environ\.get\(\s*["']([A-Z0-9_]+)["']"#).unwrap(),
Regex::new(r#"os\.getenv\(\s*["']([A-Z0-9_]+)["']"#).unwrap(),
Regex::new(r#"\b(?:ARG|ENV)\s+([A-Z0-9_]+)\b"#).unwrap(),
Regex::new(r#"\$\{([A-Z0-9_]+)\}"#).unwrap(),
]
})
}
pub fn get_env_vars_with_defaults(project_root: &Path) -> Result<HashMap<String, Option<String>>, String> {
let walker = WalkDir::new(project_root).into_iter();
let mut vars: HashMap<String, Option<String>> = HashMap::new();
for entry in walker.filter_entry(|e| !should_ignore(e)) {
let entry = entry.map_err(|e| {
let path_display = e
.path()
.map(|p| p.display().to_string())
.unwrap_or_else(|| "unknown".to_string());
format!("Failed to traverse {}: {}", path_display, e)
})?;
if !should_analyze(&entry) {
continue;
}
if let Ok(text) = fs::read_to_string(entry.path()) {
for (name, default) in capture_with_defaults(&text) {
vars.entry(name).or_insert(default);
}
}
}
Ok(vars)
}
fn capture_with_defaults(text: &str) -> Vec<(String, Option<String>)> {
let mut found: HashMap<String, Option<String>> = HashMap::new();
for (re, default_group) in default_regexes().iter() {
for capture in re.captures_iter(text) {
if let Some(name) = capture.get(1) {
let key = name.as_str().to_string();
let default = default_group.and_then(|g| capture.get(g)).map(|m| m.as_str().to_string());
found.entry(key).or_insert(default);
}
}
}
for re in regexes().iter() {
for capture in re.captures_iter(text) {
if let Some(name) = capture.get(1) {
let key = name.as_str().to_string();
found.entry(key).or_insert(None);
}
}
}
found.into_iter().collect()
}
fn default_regexes() -> &'static [(Regex, Option<usize>)] {
static REGEXES: OnceLock<Vec<(Regex, Option<usize>)>> = OnceLock::new();
REGEXES.get_or_init(|| {
vec![
(
Regex::new(r#"process\.env\.([A-Z_][A-Z0-9_]*)\s*\|\|\s*["']((?:(?!["']).)*)["']"#).unwrap(),
Some(2),
),
(
Regex::new(r#"process\.env\.([A-Z_][A-Z0-9_]*)\s*\|\|\s*(\d+(?:\.\d+)?)"#).unwrap(),
Some(2),
),
(
Regex::new(r#"(?i)process\.env\.([A-Z_][A-Z0-9_]*)\s*\|\|\s*(true|false)"#).unwrap(),
Some(2),
),
]
})
}
fn categorize_env_vars(vars: &HashMap<String, Option<String>>) -> HashMap<String, Vec<(String, Option<String>)>> {
let mut categories: HashMap<String, Vec<(String, Option<String>)>> = HashMap::new();
let mut prefixes: Vec<(&str, &str)> = CATEGORY_PREFIXES.to_vec();
prefixes.sort_by(|a, b| b.0.len().cmp(&a.0.len()));
for (var, default) in vars {
let mut categorized = false;
for (prefix, category) in &prefixes {
if var.starts_with(prefix) {
categories
.entry((*category).to_string())
.or_default()
.push((var.clone(), default.clone()));
categorized = true;
break;
}
}
if !categorized {
categories
.entry("Other".to_string())
.or_default()
.push((var.clone(), default.clone()));
}
}
for v in categories.values_mut() {
v.sort_by(|a, b| a.0.cmp(&b.0));
}
categories
}
pub fn generate_env_example(
project_root: &Path,
output: Option<&str>,
clean_local: bool,
include_prefix: &[String],
exclude_prefix: &[String],
) -> Result<(), String> {
let mut vars = get_env_vars_with_defaults(project_root)?;
if !include_prefix.is_empty() {
vars.retain(|k, _| include_prefix.iter().any(|p| k.starts_with(p)));
}
if !exclude_prefix.is_empty() {
vars.retain(|k, _| !exclude_prefix.iter().any(|p| k.starts_with(p)));
}
if vars.is_empty() {
println!("No environment variables detected (or all filtered out).");
return Ok(());
}
let file_path = output
.map(PathBuf::from)
.map(|path| {
if path.is_relative() {
project_root.join(path)
} else {
path
}
})
.unwrap_or_else(|| project_root.join(".env.example"));
let categories = categorize_env_vars(&vars);
let mut content = String::from("# Environment Variables\n");
content.push_str("# Copy this file to .env and fill in the values\n");
for &cat in CATEGORY_ORDER {
if let Some(items) = categories.get(cat) {
content.push_str(&format!("\n# {}\n", cat));
for (var, default) in items {
match default {
Some(d) => content.push_str(&format!("{}= # default: {}\n", var, d)),
None => content.push_str(&format!("{}=\n", var)),
}
}
}
}
for (cat, items) in &categories {
if !CATEGORY_ORDER.contains(&cat.as_str()) {
content.push_str(&format!("\n# {}\n", cat));
for (var, default) in items {
match default {
Some(d) => content.push_str(&format!("{}= # default: {}\n", var, d)),
None => content.push_str(&format!("{}=\n", var)),
}
}
}
}
fs::write(&file_path, content)
.map_err(|e| format!("Failed to write {}: {}", file_path.display(), e))?;
println!(
"Generated {} variable(s) into {}",
vars.len(),
file_path.display()
);
let keys: HashSet<_> = vars.keys().map(|s| s.as_str()).collect();
if clean_local {
clean_env_local(project_root, &keys)?;
}
Ok(())
}
pub fn clean_env_local(project_root: &Path, example_keys: &HashSet<&str>) -> Result<(), String> {
let env_local = project_root.join(".env.local");
if !env_local.exists() {
println!(".env.local not found, skipping cleanup");
return Ok(());
}
let content = fs::read_to_string(&env_local)
.map_err(|e| format!("Failed to read {}: {}", env_local.display(), e))?;
let mut local_vars: HashMap<String, String> = HashMap::new();
for line in content.lines() {
let trimmed = line.trim();
if trimmed.is_empty() || trimmed.starts_with('#') {
continue;
}
if let Some((key, value)) = trimmed.split_once('=') {
let key = key.trim().to_string();
let value = value.trim().to_string();
if !key.is_empty() {
local_vars.insert(key, value);
}
}
}
let unused: Vec<_> = local_vars.keys().filter(|k| !example_keys.contains(k.as_str())).collect();
if unused.is_empty() {
println!("No unused keys found in .env.local");
return Ok(());
}
println!("\nFound {} unused key(s) in .env.local:", unused.len());
for key in &unused {
println!(" - {}", key);
}
let unused_set: HashSet<_> = unused.iter().map(|s| s.as_str()).collect();
let mut filtered_lines = Vec::new();
let mut prev_empty = false;
let mut prev_title = false;
for line in content.lines() {
let stripped = line.trim();
let is_empty = stripped.is_empty();
let is_title = stripped.starts_with('#');
if is_empty && prev_empty {
continue;
}
if is_empty && prev_title && is_title {
continue;
}
if is_empty || is_title {
filtered_lines.push(line.to_string());
prev_empty = is_empty;
prev_title = is_title;
continue;
}
if let Some((key, _)) = stripped.split_once('=') {
let key = key.trim();
if !unused_set.contains(key) {
filtered_lines.push(line.to_string());
prev_empty = false;
prev_title = false;
}
}
}
let new_content = filtered_lines.join("\n");
if !filtered_lines.is_empty() && !new_content.ends_with('\n') {
fs::write(&env_local, format!("{}\n", new_content))
.map_err(|e| format!("Failed to write {}: {}", env_local.display(), e))?;
} else {
fs::write(&env_local, new_content)
.map_err(|e| format!("Failed to write {}: {}", env_local.display(), e))?;
}
println!("\nRemoved {} unused key(s) from .env.local", unused.len());
Ok(())
}