use anyhow::Result;
use std::{
collections::{HashSet, hash_map::Entry},
env,
ffi::OsStr,
fs,
hash::{Hash, Hasher},
path::{Path, PathBuf},
time::{SystemTime, UNIX_EPOCH},
};
use log::debug;
pub struct UblxNames {
pub pkg_name: &'static str,
pub pkg_name_plural: &'static str,
pub index_db_file_ext: &'static str,
pub local_config_visible_toml: &'static str,
pub local_config_hidden_toml: &'static str,
pub nefax_db: &'static str,
pub zahir_export_dir_name: &'static str,
pub lens_export_dir_name: &'static str,
}
impl Default for UblxNames {
fn default() -> Self {
Self::new()
}
}
impl UblxNames {
#[must_use]
pub const fn new() -> Self {
Self {
pkg_name: env!("CARGO_PKG_NAME"),
pkg_name_plural: "ubli",
index_db_file_ext: concat!(".", env!("CARGO_PKG_NAME")),
local_config_visible_toml: concat!(env!("CARGO_PKG_NAME"), ".toml"),
local_config_hidden_toml: concat!(".", env!("CARGO_PKG_NAME"), ".toml"),
nefax_db: ".nefaxer",
zahir_export_dir_name: concat!(env!("CARGO_PKG_NAME"), "-export"),
lens_export_dir_name: concat!(env!("CARGO_PKG_NAME"), "-lenses"),
}
}
}
pub const UBLX_NAMES: UblxNames = UblxNames::new();
const PATH_HASH_HEX_LEN: usize = 16;
#[must_use]
pub fn is_hex_hash16(s: &str) -> bool {
s.len() == PATH_HASH_HEX_LEN && s.chars().all(|c| c.is_ascii_hexdigit())
}
#[must_use]
pub fn hash_suffix_from_db_stem(stem: &str) -> Option<&str> {
let (_, rest) = stem.rsplit_once('_')?;
is_hex_hash16(rest).then_some(rest)
}
#[must_use]
pub fn path_to_hex(path: &Path) -> String {
let mut hasher = std::collections::hash_map::DefaultHasher::new();
path.to_string_lossy().hash(&mut hasher);
format!("{:016x}", hasher.finish())
}
fn sanitize_name_for_fs(name: &str) -> String {
let mut out = String::with_capacity(name.len());
for ch in name.chars() {
if ch.is_ascii_alphanumeric() || ch == '-' || ch == '_' {
out.push(ch);
} else {
out.push('_');
}
}
let trimmed = out.trim_matches('_');
if trimmed.is_empty() {
"root".to_string()
} else {
trimmed.to_string()
}
}
fn config_dir() -> Option<PathBuf> {
#[cfg(windows)]
{
env::var("APPDATA")
.ok()
.map(|p| PathBuf::from(p).join(UBLX_NAMES.pkg_name))
}
#[cfg(not(windows))]
{
env::var("HOME")
.ok()
.map(|h| PathBuf::from(h).join(".config").join(UBLX_NAMES.pkg_name))
}
}
fn cache_dir() -> Option<PathBuf> {
#[cfg(windows)]
{
env::var("LOCALAPPDATA")
.ok()
.map(|p| PathBuf::from(p).join(UBLX_NAMES.pkg_name))
}
#[cfg(not(windows))]
{
env::var("HOME").ok().map(|h| {
PathBuf::from(h)
.join(".local")
.join("share")
.join(UBLX_NAMES.pkg_name)
})
}
}
#[must_use]
fn db_dir() -> Option<PathBuf> {
cache_dir().map(|c| c.join(UBLX_NAMES.pkg_name_plural))
}
const RECENTS_SUBDIR: &str = "recents";
const RECENTS_OPEN_WEIGHT_NS: u128 = 3_600_000_000_000;
#[must_use]
fn recents_dir() -> Option<PathBuf> {
cache_dir().map(|c| c.join(RECENTS_SUBDIR))
}
#[must_use]
fn now_ns() -> u64 {
SystemTime::now()
.duration_since(UNIX_EPOCH)
.map_or(0, |d| u64::try_from(d.as_nanos()).unwrap_or(u64::MAX))
}
#[derive(Debug, Clone)]
struct RecentsFileData {
path: PathBuf,
times_opened: u64,
last_open_ns: u64,
}
fn fmt_recents_txt(data: &RecentsFileData) -> String {
format!(
"path={}\ntimes_opened={}\nlast_open_ns={}\n",
data.path.to_string_lossy(),
data.times_opened,
data.last_open_ns
)
}
fn parse_recents_txt(content: &str) -> Option<RecentsFileData> {
let trimmed = content.trim();
if trimmed.is_empty() {
return None;
}
if !trimmed.contains('=') {
let p = PathBuf::from(trimmed);
return Some(RecentsFileData {
path: p,
times_opened: 0,
last_open_ns: 0,
});
}
let mut path: Option<PathBuf> = None;
let mut times_opened: u64 = 0;
let mut last_open_ns: u64 = 0;
for line in trimmed.lines() {
let line = line.trim();
if line.is_empty() {
continue;
}
let (k, v) = line.split_once('=')?;
match k.trim() {
"path" => path = Some(PathBuf::from(v.trim())),
"times_opened" => times_opened = v.trim().parse().unwrap_or(0),
"last_open_ns" => last_open_ns = v.trim().parse().unwrap_or(0),
_ => {}
}
}
path.map(|p| RecentsFileData {
path: p,
times_opened,
last_open_ns,
})
}
fn read_recents_file(path: &Path) -> Option<RecentsFileData> {
let s = fs::read_to_string(path).ok()?;
parse_recents_txt(&s)
}
#[must_use]
fn recents_composite_score(data: &RecentsFileData) -> u128 {
u128::from(data.last_open_ns)
.saturating_add(u128::from(data.times_opened).saturating_mul(RECENTS_OPEN_WEIGHT_NS))
}
#[must_use]
pub fn has_recents_entry_for_dir(dir: &Path) -> bool {
let Some(recents) = recents_dir() else {
return false;
};
let key = path_to_hex(dir);
recents.join(format!("{key}.txt")).exists()
}
#[must_use]
pub fn should_show_initial_prompt(
snapshot_only: bool,
had_index_db_before_ensure: bool,
had_any_cached_db_before_this_root: bool,
) -> bool {
let initial_prompt = !snapshot_only && !had_index_db_before_ensure;
debug!(
"initial_prompt={initial_prompt} (had_index_db_before_ensure={had_index_db_before_ensure})"
);
debug!("cached ublx roots seen before startup: {had_any_cached_db_before_this_root}");
initial_prompt
}
#[must_use]
pub fn has_any_cached_ublx_db() -> bool {
let Some(dir) = db_dir() else {
return false;
};
let Ok(rd) = fs::read_dir(dir) else {
return false;
};
rd.flatten().any(|e| {
e.path()
.file_name()
.and_then(|n| n.to_str())
.is_some_and(|n| n.ends_with(UBLX_NAMES.index_db_file_ext))
})
}
pub fn remember_indexed_root_path(dir: &Path) -> Result<()> {
let Some(recents) = recents_dir() else {
return Ok(());
};
fs::create_dir_all(&recents)?;
let key = path_to_hex(dir);
let canon = dir.canonicalize().unwrap_or_else(|_| dir.to_path_buf());
let path_file = recents.join(format!("{key}.txt"));
let mut data = read_recents_file(&path_file).unwrap_or(RecentsFileData {
path: canon.clone(),
times_opened: 0,
last_open_ns: 0,
});
data.path = canon;
data.last_open_ns = now_ns();
fs::write(path_file, fmt_recents_txt(&data))?;
Ok(())
}
pub fn record_prior_root_selected(dir: &Path) -> Result<()> {
let Some(recents) = recents_dir() else {
return Ok(());
};
let key = path_to_hex(dir);
let canon = dir.canonicalize().unwrap_or_else(|_| dir.to_path_buf());
let path_file = recents.join(format!("{key}.txt"));
if !path_file.exists() {
return Ok(());
}
let Some(mut data) = read_recents_file(&path_file) else {
return Ok(());
};
data.path = canon;
data.last_open_ns = now_ns();
fs::write(path_file, fmt_recents_txt(&data))?;
Ok(())
}
pub fn record_ublx_session_open(dir: &Path) -> Result<()> {
let Some(recents) = recents_dir() else {
return Ok(());
};
let key = path_to_hex(dir);
let canon = dir.canonicalize().unwrap_or_else(|_| dir.to_path_buf());
let path_file = recents.join(format!("{key}.txt"));
if !path_file.exists() {
return Ok(());
}
let Some(mut data) = read_recents_file(&path_file) else {
return Ok(());
};
data.path = canon;
data.times_opened = data.times_opened.saturating_add(1);
data.last_open_ns = now_ns();
fs::write(path_file, fmt_recents_txt(&data))?;
Ok(())
}
fn collect_recents_entries() -> Vec<RecentsFileData> {
let Some(dir) = recents_dir() else {
return Vec::new();
};
let Ok(rd) = fs::read_dir(&dir) else {
return Vec::new();
};
let mut best: std::collections::HashMap<PathBuf, RecentsFileData> =
std::collections::HashMap::new();
for entry in rd.flatten() {
let p = entry.path();
if p.extension().and_then(|e| e.to_str()) != Some("txt") {
continue;
}
let Some(mut data) = read_recents_file(&p) else {
continue;
};
let canon = data
.path
.canonicalize()
.unwrap_or_else(|_| data.path.clone());
data.path.clone_from(&canon);
match best.entry(canon) {
Entry::Occupied(mut o) => {
let ex = o.get_mut();
if data.last_open_ns > ex.last_open_ns
|| (data.last_open_ns == ex.last_open_ns && data.times_opened > ex.times_opened)
{
*ex = data;
}
}
Entry::Vacant(v) => {
v.insert(data);
}
}
}
best.into_values().collect()
}
#[must_use]
pub fn all_indexed_roots_alphabetical() -> Vec<PathBuf> {
let Some(recents) = recents_dir() else {
return Vec::new();
};
let Ok(rd) = fs::read_dir(&recents) else {
return Vec::new();
};
let mut out: HashSet<PathBuf> = HashSet::new();
for entry in rd.flatten() {
let p = entry.path();
if p.extension().and_then(|e| e.to_str()) != Some("txt") {
continue;
}
let Some(fname) = p.file_stem().and_then(|s| s.to_str()) else {
continue;
};
if !is_hex_hash16(fname) {
continue;
}
let Some(data) = read_recents_file(&p) else {
continue;
};
let path = data
.path
.canonicalize()
.unwrap_or_else(|_| data.path.clone());
if !path.is_dir() {
continue;
}
let db_path = UblxPaths::new(&path).db();
if !db_path.exists() {
continue;
}
let Some(db_stem) = db_path.file_stem().and_then(|s| s.to_str()) else {
continue;
};
let Some(hash_from_db) = hash_suffix_from_db_stem(db_stem) else {
continue;
};
if hash_from_db != fname {
continue;
}
out.insert(path);
}
let mut paths: Vec<PathBuf> = out.into_iter().collect();
paths.sort_by_key(|a| a.display().to_string());
paths
}
#[must_use]
pub fn prior_indexed_roots(current: &Path) -> Vec<PathBuf> {
prior_indexed_roots_scored(current, usize::MAX)
.into_iter()
.map(|(p, _)| p)
.collect()
}
fn prior_indexed_roots_scored(current: &Path, max: usize) -> Vec<(PathBuf, RecentsFileData)> {
let current_canon = current
.canonicalize()
.unwrap_or_else(|_| current.to_path_buf());
let mut scored: Vec<(PathBuf, RecentsFileData)> = Vec::new();
for mut data in collect_recents_entries() {
let dir = data
.path
.canonicalize()
.unwrap_or_else(|_| data.path.clone());
if dir == current_canon || !dir.is_dir() {
continue;
}
let db = UblxPaths::new(&dir).db();
if !db.exists() {
continue;
}
data.path.clone_from(&dir);
scored.push((dir, data));
}
scored.sort_by(|a, b| {
recents_composite_score(&b.1)
.cmp(&recents_composite_score(&a.1))
.then_with(|| a.0.cmp(&b.0))
});
scored.truncate(max);
scored
}
#[must_use]
pub fn prior_indexed_roots_recent(current: &Path, max: usize) -> Vec<PathBuf> {
prior_indexed_roots_scored(current, max)
.into_iter()
.map(|(p, _)| p)
.collect()
}
#[must_use]
pub fn global_config_toml() -> Option<PathBuf> {
config_dir().map(|c| c.join(UBLX_NAMES.local_config_visible_toml))
}
#[must_use]
pub fn last_applied_config_path(dir: &Path) -> Option<PathBuf> {
cache_dir().map(|c| c.join("configs").join(format!("{}.toml", path_to_hex(dir))))
}
#[must_use]
pub fn rel_path_is_exact_local_config_toml(path_str: &str) -> bool {
let trim = path_str.trim();
if Path::new(trim).is_absolute() {
return false;
}
let norm = trim.replace('\\', "/");
let norm = norm.trim_start_matches("./");
norm == UBLX_NAMES.local_config_visible_toml || norm == UBLX_NAMES.local_config_hidden_toml
}
#[derive(Clone, Debug)]
pub struct UblxPaths {
pub dir_to_ublx_abs: PathBuf,
}
impl UblxPaths {
#[must_use]
pub fn new(dir_to_ublx: &Path) -> Self {
Self {
dir_to_ublx_abs: dir_to_ublx.to_path_buf(),
}
}
fn db_stem(&self) -> String {
let dir_name = self
.dir_to_ublx_abs
.file_name()
.and_then(|n| n.to_str())
.unwrap_or("root");
let safe_name = sanitize_name_for_fs(dir_name);
let hash = path_to_hex(&self.dir_to_ublx_abs);
format!("{safe_name}_{hash}")
}
#[must_use]
fn db_filename(&self) -> String {
format!("{}{}", self.db_stem(), UBLX_NAMES.index_db_file_ext)
}
#[must_use]
pub fn db_dir(&self) -> Option<PathBuf> {
db_dir()
}
pub fn ensure_db_dir(&self) -> Result<PathBuf> {
let dir = self
.db_dir()
.ok_or_else(|| anyhow::anyhow!("could not resolve user cache directory"))?;
fs::create_dir_all(&dir)?;
Ok(dir)
}
#[must_use]
pub fn log_path(&self) -> PathBuf {
self.dir_to_ublx_abs
.join(format!("{}.log", UBLX_NAMES.pkg_name))
}
#[must_use]
pub fn hidden_toml(&self) -> PathBuf {
self.dir_to_ublx_abs
.join(UBLX_NAMES.local_config_hidden_toml)
}
#[must_use]
pub fn visible_toml(&self) -> PathBuf {
self.dir_to_ublx_abs
.join(UBLX_NAMES.local_config_visible_toml)
}
#[must_use]
pub fn is_config_file(&self, path: &Path) -> bool {
let Some(name) = path.file_name() else {
return false;
};
name == OsStr::new(UBLX_NAMES.local_config_visible_toml)
|| name == OsStr::new(UBLX_NAMES.local_config_hidden_toml)
}
#[must_use]
pub fn toml_path(&self) -> Option<PathBuf> {
let hidden = self.hidden_toml();
let visible = self.visible_toml();
if hidden.exists() {
Some(hidden)
} else if visible.exists() {
Some(visible)
} else {
None
}
}
#[must_use]
pub fn local_config_path_for_write(&self) -> PathBuf {
self.toml_path().unwrap_or_else(|| self.hidden_toml())
}
#[must_use]
pub fn db(&self) -> PathBuf {
self.db_dir()
.unwrap_or_else(|| self.dir_to_ublx_abs.clone())
.join(self.db_filename())
}
#[must_use]
pub fn nefax_db(&self) -> PathBuf {
self.dir_to_ublx_abs.join(UBLX_NAMES.nefax_db)
}
#[must_use]
pub fn tmp(&self) -> PathBuf {
self.db_dir()
.unwrap_or_else(|| self.dir_to_ublx_abs.clone())
.join(format!(
"{}_tmp{}",
self.db_stem(),
UBLX_NAMES.index_db_file_ext
))
}
#[must_use]
pub fn tmp_wal(&self) -> PathBuf {
self.db_dir()
.unwrap_or_else(|| self.dir_to_ublx_abs.clone())
.join(format!(
"{}_tmp{}-wal",
self.db_stem(),
UBLX_NAMES.index_db_file_ext
))
}
#[must_use]
pub fn tmp_shm(&self) -> PathBuf {
self.db_dir()
.unwrap_or_else(|| self.dir_to_ublx_abs.clone())
.join(format!(
"{}_tmp{}-shm",
self.db_stem(),
UBLX_NAMES.index_db_file_ext
))
}
#[must_use]
pub fn wal(&self) -> PathBuf {
self.db_dir()
.unwrap_or_else(|| self.dir_to_ublx_abs.clone())
.join(format!(
"{}{}-wal",
self.db_stem(),
UBLX_NAMES.index_db_file_ext
))
}
#[must_use]
pub fn shm(&self) -> PathBuf {
self.db_dir()
.unwrap_or_else(|| self.dir_to_ublx_abs.clone())
.join(format!(
"{}{}-shm",
self.db_stem(),
UBLX_NAMES.index_db_file_ext
))
}
#[must_use]
pub fn exclude(&self) -> Vec<String> {
vec![
UBLX_NAMES.nefax_db.to_string(),
UBLX_NAMES.local_config_visible_toml.to_string(),
UBLX_NAMES.local_config_hidden_toml.to_string(),
UBLX_NAMES.zahir_export_dir_name.to_string(),
]
}
pub fn remove_aux_files(&self) -> Result<(), anyhow::Error> {
for p in [
self.tmp(),
self.tmp_wal(),
self.tmp_shm(),
self.wal(),
self.shm(),
] {
if p.exists() {
fs::remove_file(&p)?;
}
}
Ok(())
}
#[must_use]
pub fn global_config(&self) -> Option<PathBuf> {
global_config_toml()
}
#[allow(dead_code)]
#[must_use]
pub fn cache_dir(&self) -> Option<PathBuf> {
cache_dir()
}
#[must_use]
pub fn last_applied_config_path(&self) -> Option<PathBuf> {
last_applied_config_path(&self.dir_to_ublx_abs)
}
}
#[must_use]
pub fn get_log_path(dir_to_ublx: &Path) -> PathBuf {
UblxPaths::new(dir_to_ublx).log_path()
}
#[must_use]
pub fn normalize_rel_path_for_policy(s: &str) -> String {
let s = s.replace('\\', "/");
let s = s.trim_start_matches("./");
s.trim_end_matches('/').to_string()
}
#[must_use]
pub fn path_is_under_or_equal(rel: &str, prefix: &str) -> bool {
rel == prefix || (rel.starts_with(prefix) && rel.as_bytes().get(prefix.len()) == Some(&b'/'))
}