use std::collections::HashMap;
use std::ffi::OsStr;
use std::path::{Path, PathBuf};
use anyhow::{Context, Result};
use colored::Colorize;
use tsafe_core::{audit::AuditEntry, env as tsenv};
use crate::helpers::*;
const DOTENV_SEARCH_MAX_DEPTH: usize = 8;
const DOTENV_SEARCH_MAX_MATCHES: usize = 20;
const EXTENDED_IMPORT_SOURCES: &[&str] = &[
"bitwarden",
"1password",
"lastpass",
"chrome",
"edge",
"firefox",
];
fn is_extended_import_source(from: &str) -> bool {
EXTENDED_IMPORT_SOURCES.contains(&from)
}
fn should_skip_subdir(dir_name: &OsStr) -> bool {
let name = dir_name.to_string_lossy();
matches!(
name.as_ref(),
"target"
| "node_modules"
| ".git"
| ".cargo"
| "vendor"
| "dist"
| "build"
| ".next"
| ".cache"
| "venv"
| ".venv"
| "__pycache__"
)
}
fn find_same_named_files_under_dir(root: &Path, basename: &OsStr) -> Vec<PathBuf> {
let mut results = Vec::new();
let mut stack: Vec<(PathBuf, usize)> = vec![(root.to_path_buf(), 0)];
while let Some((dir, depth)) = stack.pop() {
if results.len() >= DOTENV_SEARCH_MAX_MATCHES {
break;
}
let Ok(read_dir) = std::fs::read_dir(&dir) else {
continue;
};
for ent in read_dir.flatten() {
if results.len() >= DOTENV_SEARCH_MAX_MATCHES {
break;
}
let Ok(ft) = ent.file_type() else {
continue;
};
let p = ent.path();
if ft.is_dir() {
if depth >= DOTENV_SEARCH_MAX_DEPTH {
continue;
}
if should_skip_subdir(ent.file_name().as_os_str()) {
continue;
}
stack.push((p, depth + 1));
} else if ft.is_file() && p.file_name() == Some(basename) {
results.push(p);
}
}
}
results.sort_by(|a, b| {
let la = a.components().count();
let lb = b.components().count();
la.cmp(&lb)
.then_with(|| a.to_string_lossy().cmp(&b.to_string_lossy()))
});
results
}
fn basename_for_import_search(requested: &Path) -> Option<&OsStr> {
requested.file_name().or({
if requested.as_os_str().is_empty() {
None
} else {
Some(requested.as_os_str())
}
})
}
fn display_path_for_hint(cwd: &Path, p: &Path) -> String {
if let Ok(rel) = p.strip_prefix(cwd) {
let s = rel.to_string_lossy();
if s.is_empty() {
return ".".to_string();
}
if rel.components().next().is_some() && !s.starts_with('.') {
format!("./{s}")
} else {
s.into_owned()
}
} else {
p.display().to_string()
}
}
fn dotenv_import_not_found_message(from: &str, requested: &Path, cwd: &Path) -> String {
let cwd_disp = cwd.display().to_string();
let mut msg = format!(
"import source not found: '{from}'\n \
Current directory: {cwd_disp}\n\n \
Hints:\n \
• Pass a path to an existing file, e.g. `tsafe import --from ./apps/web/.env`\n \
• Or `cd` into the directory that contains the file, then run import again.\n \
• Create the file if you meant to add variables there first.\n"
);
if requested.is_relative() {
if let Some(base) = basename_for_import_search(requested) {
let hits = find_same_named_files_under_dir(cwd, base);
if !hits.is_empty() {
msg.push_str(&format!(
"\n Found {} file(s) named '{}' under this directory (search depth ≤ {}, skipped common build/cache folders):\n",
hits.len(),
base.to_string_lossy(),
DOTENV_SEARCH_MAX_DEPTH
));
for p in &hits {
let rel = display_path_for_hint(cwd, p);
msg.push_str(&format!(" {rel}\n"));
}
if let Some(first) = hits.first() {
let arg = display_path_for_hint(cwd, first);
msg.push_str(&format!("\n Try: tsafe import --from '{arg}'\n"));
}
if hits.len() >= DOTENV_SEARCH_MAX_MATCHES {
msg.push_str(&format!(
" (List capped at {DOTENV_SEARCH_MAX_MATCHES} matches; narrow the path or cd closer to the file.)\n"
));
}
} else {
msg.push_str(&format!(
"\n No files named '{base}' found under this directory within the search depth.\n \
If the file lives elsewhere, use an absolute path.\n",
base = base.to_string_lossy()
));
}
}
} else {
msg.push_str(
"\n This is an absolute path — tsafe did not search subdirectories (use a correct path or a relative path from cwd to get suggestions).\n",
);
}
msg
}
pub(crate) fn cmd_import(
profile: &str,
from: &str,
file: Option<&str>,
overwrite: bool,
skip_duplicates: bool,
ns: Option<&str>,
dry_run: bool,
) -> Result<()> {
let from_lower = from.to_ascii_lowercase();
if is_extended_import_source(&from_lower) {
#[cfg(not(feature = "pm-import-extended"))]
{
let _ = (file, skip_duplicates);
anyhow::bail!(
"extended import source '{from}' is not compiled into this build; use a tsafe build or release channel that includes `pm-import-extended`.\n\
Local development builds can rebuild with feature `pm-import-extended`."
);
}
#[cfg(feature = "pm-import-extended")]
{
let export_file = file.ok_or_else(|| {
anyhow::anyhow!("--file <path> is required when --from is '{from}'")
})?;
return cmd_import_pw_manager(profile, from, export_file, overwrite, skip_duplicates);
}
}
let path = Path::new(from);
if !path.exists() {
let cwd = std::env::current_dir().unwrap_or_else(|_| PathBuf::from("."));
let text = dotenv_import_not_found_message(from, path, &cwd);
anyhow::bail!("{text}");
}
let imported =
tsenv::parse_dotenv(path).with_context(|| format!("failed to parse '{from}'"))?;
if dry_run {
let vault = open_vault(profile)?;
if let Some(n) = ns {
println!(
"{} [dry-run] Using namespace '{n}' (keys would be stored as '{n}/<KEY>')",
"i".blue()
);
}
let mut would_import = 0usize;
let mut would_skip = 0usize;
let mut keys: Vec<String> = imported.keys().cloned().collect();
keys.sort();
for raw_key in &keys {
let key = match ns {
Some(n) => format!("{n}/{raw_key}"),
None => raw_key.clone(),
};
if !overwrite && vault.list().contains(&key.as_str()) {
println!(" {} [skip] {key} (already exists)", "!".yellow());
would_skip += 1;
} else {
println!(" {} [import] {key}", "✓".green());
would_import += 1;
}
}
println!(
"\n{} Dry-run: would import {would_import} key(s), skip {would_skip} existing",
"i".blue()
);
return Ok(());
}
let mut vault = open_vault(profile)?;
let mut count = 0usize;
let mut skipped = 0usize;
if let Some(n) = ns {
println!(
"{} Using namespace '{n}' (keys stored as '{n}/<KEY>')",
"i".blue()
);
}
for (raw_key, value) in &imported {
let key = match ns {
Some(n) => format!("{n}/{raw_key}"),
None => raw_key.clone(),
};
if !overwrite && vault.list().contains(&key.as_str()) {
eprintln!(
"{} Skipping existing key '{key}' (use --overwrite to replace)",
"!".yellow()
);
skipped += 1;
continue;
}
vault.set(&key, value, HashMap::new())?;
count += 1;
}
audit(profile)
.append(&AuditEntry::success(profile, "import", None))
.ok();
if skipped > 0 {
println!(
"{} Imported {count} secret(s), skipped {skipped} existing (profile '{profile}')",
"✓".green()
);
} else {
println!(
"{} Imported {count} secret(s) into profile '{profile}'",
"✓".green()
);
}
Ok(())
}
#[cfg(feature = "pm-import-extended")]
fn cmd_import_pw_manager(
profile: &str,
source: &str,
path: &str,
overwrite: bool,
skip_duplicates: bool,
) -> Result<()> {
let p = Path::new(path);
if !p.exists() {
let cwd = std::env::current_dir()
.map(|d| d.display().to_string())
.unwrap_or_else(|_| "(unknown)".to_string());
anyhow::bail!(
"import file not found: '{path}'\n \
Current directory: {cwd}\n\n \
Hints:\n \
• Check the path — use Tab completion or `ls` to confirm the export file exists.\n \
• Bitwarden / 1Password / LastPass: export a CSV from the app, then pass `--file /full/path/to/export.csv`.\n \
• Chrome / Edge: chrome://settings/passwords → Download file; Firefox: about:logins → export.\n"
);
}
let data = std::fs::read_to_string(path).with_context(|| format!("failed to read '{path}'"))?;
let mut lines = data.lines();
let header_line = lines
.next()
.ok_or_else(|| anyhow::anyhow!("export file is empty"))?;
let headers: Vec<&str> = split_csv_line(header_line);
let col = |names: &[&str]| -> Option<usize> {
names
.iter()
.find_map(|n| headers.iter().position(|h| h.eq_ignore_ascii_case(n)))
};
let (name_col, user_col, pass_col, url_col) = match source.to_ascii_lowercase().as_str() {
"bitwarden" => (
col(&["name"]).ok_or_else(|| anyhow::anyhow!("missing 'name' column"))?,
col(&["login_username"]),
col(&["login_password"]),
col(&["login_uri"]),
),
"lastpass" => (
col(&["name"]).ok_or_else(|| anyhow::anyhow!("missing 'name' column"))?,
col(&["username"]),
col(&["password"]),
col(&["url"]),
),
"1password" => (
col(&["title", "name"]).ok_or_else(|| anyhow::anyhow!("missing 'title' column"))?,
col(&["username"]),
col(&["password"]),
col(&["url", "website url"]),
),
"chrome" | "edge" => (
col(&["name"]).ok_or_else(|| anyhow::anyhow!("missing 'name' column — export from chrome://settings/passwords or edge://settings/passwords"))?,
col(&["username"]),
col(&["password"]),
col(&["url"]),
),
"firefox" => (
col(&["url"]).ok_or_else(|| anyhow::anyhow!("missing 'url' column — export from Firefox about:logins"))?,
col(&["username"]),
col(&["password"]),
None, ),
other => anyhow::bail!("unknown password-manager source '{other}'"),
};
let mut vault = open_vault(profile)?;
let mut imported = 0usize;
let mut skipped = 0usize;
let mut duplicate_errors: Vec<String> = Vec::new();
let mut seen_in_batch: std::collections::HashSet<String> = std::collections::HashSet::new();
for (row_num, line) in lines.enumerate() {
if line.trim().is_empty() {
continue;
}
let cols: Vec<String> = split_csv_line(line)
.into_iter()
.map(|s| s.to_string())
.collect();
let get = |idx: usize| {
cols.get(idx)
.map(|s| s.as_str())
.unwrap_or("")
.trim()
.to_string()
};
let name = get(name_col);
if name.is_empty() {
eprintln!("{} row {}: empty name, skipping", "!".yellow(), row_num + 2);
continue;
}
let prefix = name
.chars()
.map(|c| {
if c.is_alphanumeric() {
c.to_ascii_uppercase()
} else {
'_'
}
})
.collect::<String>();
let prefix = prefix.trim_matches('_').to_string();
let mut store = |suffix: &str, value: String| -> Result<()> {
if value.is_empty() {
return Ok(());
}
let key = format!("{prefix}_{suffix}");
if seen_in_batch.contains(&key) {
if skip_duplicates {
eprintln!(
"{} row {}: duplicate key '{}' in import file — skipping",
"!".yellow(),
row_num + 2,
key
);
skipped += 1;
return Ok(());
} else if !overwrite {
duplicate_errors.push(format!(
" row {}: key '{}' appears more than once in the import file",
row_num + 2,
key
));
return Ok(());
}
}
if !overwrite && vault.list().contains(&key.as_str()) {
if skip_duplicates {
eprintln!("{} Skipping existing key '{key}'", "!".yellow());
skipped += 1;
return Ok(());
}
eprintln!(
"{} Skipping existing key '{key}' (use --overwrite to replace)",
"!".yellow()
);
skipped += 1;
return Ok(());
}
seen_in_batch.insert(key.clone());
vault.set(&key, &value, HashMap::new())?;
audit(profile)
.append(&AuditEntry::success(profile, "import", Some(&key)))
.ok();
imported += 1;
Ok(())
};
if let Some(idx) = user_col {
store("USERNAME", get(idx))?;
}
if let Some(idx) = pass_col {
store("PASSWORD", get(idx))?;
}
if let Some(idx) = url_col {
store("URL", get(idx))?;
}
}
if !duplicate_errors.is_empty() {
anyhow::bail!(
"duplicate keys detected in import file (use --skip-duplicates to skip without error, \
or --overwrite to overwrite):\n{}",
duplicate_errors.join("\n")
);
}
println!(
"{} Imported {} secret(s) from {source} export (skipped {skipped} existing)",
"✓".green(),
imported
);
Ok(())
}
#[cfg(any(feature = "pm-import-extended", test))]
fn split_csv_line(line: &str) -> Vec<&str> {
let mut fields = Vec::new();
let mut start = 0usize;
let bytes = line.as_bytes();
let mut in_quotes = false;
let mut i = 0usize;
while i < bytes.len() {
match bytes[i] {
b'"' => {
if in_quotes && i + 1 < bytes.len() && bytes[i + 1] == b'"' {
i += 1;
} else {
in_quotes = !in_quotes;
}
}
b',' if !in_quotes => {
let field = &line[start..i];
fields.push(unquote(field));
start = i + 1;
}
_ => {}
}
i += 1;
}
fields.push(unquote(&line[start..]));
fields
}
#[cfg(any(feature = "pm-import-extended", test))]
fn unquote(s: &str) -> &str {
let s = s.trim();
if s.starts_with('"') && s.ends_with('"') && s.len() >= 2 {
&s[1..s.len() - 1]
} else {
s
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn find_same_named_files_skips_target_and_node_modules() {
let dir = tempfile::tempdir().unwrap();
let good = dir.path().join("app");
std::fs::create_dir_all(&good).unwrap();
std::fs::write(good.join(".env"), "A=1\n").unwrap();
let target = dir.path().join("target");
std::fs::create_dir_all(target.join("nested")).unwrap();
std::fs::write(target.join("nested").join(".env"), "B=2\n").unwrap();
let hits = find_same_named_files_under_dir(dir.path(), OsStr::new(".env"));
assert_eq!(hits.len(), 1);
assert_eq!(hits[0].file_name(), Some(OsStr::new(".env")));
assert!(hits[0].to_string_lossy().contains("app"));
}
#[test]
fn dotenv_not_found_message_lists_nested_candidates() {
let dir = tempfile::tempdir().unwrap();
let sub = dir.path().join("packages").join("api");
std::fs::create_dir_all(&sub).unwrap();
std::fs::write(sub.join(".env"), "X=1\n").unwrap();
let msg = dotenv_import_not_found_message(".env", Path::new(".env"), dir.path());
assert!(msg.contains("packages"));
assert!(msg.contains("Try: tsafe import --from"));
assert!(msg.contains("Hints:"));
}
#[test]
fn basename_for_import_search_handles_dotted_filename() {
let p = Path::new(".env.production");
assert_eq!(
basename_for_import_search(p),
Some(OsStr::new(".env.production"))
);
}
#[test]
fn split_csv_simple_fields() {
let fields = split_csv_line("name,url,username,password");
assert_eq!(fields, vec!["name", "url", "username", "password"]);
}
#[test]
fn split_csv_quoted_fields_preserve_content() {
let fields = split_csv_line(r#""Alice","http://example.com","admin","s3cr3t""#);
assert_eq!(
fields,
vec!["Alice", "http://example.com", "admin", "s3cr3t"]
);
}
#[test]
fn split_csv_quoted_field_containing_comma() {
let fields = split_csv_line(r#""My Service, LLC",https://myservice.com,user,pass"#);
assert_eq!(
fields,
vec!["My Service, LLC", "https://myservice.com", "user", "pass"]
);
}
#[test]
fn split_csv_double_quoted_escape_within_field() {
let fields = split_csv_line(r#""Say ""hello""",next"#);
assert_eq!(fields.len(), 2);
assert_eq!(fields[1], "next");
}
#[test]
fn split_csv_single_field_no_comma() {
let fields = split_csv_line("onlyone");
assert_eq!(fields, vec!["onlyone"]);
}
#[test]
fn split_csv_empty_fields() {
let fields = split_csv_line("a,,c");
assert_eq!(fields, vec!["a", "", "c"]);
}
#[test]
fn unquote_removes_surrounding_double_quotes() {
assert_eq!(unquote(r#""hello""#), "hello");
}
#[test]
fn unquote_leaves_unquoted_string_unchanged() {
assert_eq!(unquote("hello"), "hello");
}
#[test]
fn unquote_trims_whitespace_before_checking_quotes() {
assert_eq!(unquote(r#" "hello" "#), "hello");
}
#[test]
fn unquote_single_char_not_stripped() {
assert_eq!(unquote("\""), "\"");
}
#[test]
fn unquote_empty_quoted_string() {
assert_eq!(unquote(r#""""#), "");
}
}