use std::path::Path;
use crate::current_unix_timestamp;
use tokensave::tokensave::TokenSave;
pub(crate) async fn update_global_db(cg: &TokenSave) {
let tokens = cg.get_tokens_saved().await.unwrap_or(0);
if let Some(gdb) = tokensave::global_db::GlobalDb::open().await {
let previous = gdb.get_project_tokens(cg.project_root()).await;
gdb.upsert(cg.project_root(), tokens).await;
if tokens > previous {
let mut config = tokensave::user_config::UserConfig::load();
config.pending_upload += tokens - previous;
config.save();
}
}
}
pub(crate) fn try_flush(config: &mut tokensave::user_config::UserConfig, force: bool) {
if config.pending_upload == 0 || !config.upload_enabled {
return;
}
let now = current_unix_timestamp();
if config.last_flush_attempt_at > config.last_upload_at
&& now - config.last_flush_attempt_at < 60
{
return;
}
if !force && now - config.last_upload_at < 30 {
return;
}
config.last_flush_attempt_at = now;
if let Some(worldwide_total) = tokensave::cloud::flush_pending(config.pending_upload) {
config.pending_upload = 0;
config.last_upload_at = now;
config.last_worldwide_total = worldwide_total;
config.last_worldwide_fetch_at = now;
}
}
pub(crate) fn check_for_update(
config: &mut tokensave::user_config::UserConfig,
skip_cache: bool,
skip_suppression: bool,
) {
let current_version = env!("CARGO_PKG_VERSION");
let now = current_unix_timestamp();
let latest = if !skip_cache && now - config.last_version_check_at < 300 {
if config.cached_latest_version.is_empty() {
return;
}
config.cached_latest_version.clone()
} else if let Some(v) = tokensave::cloud::fetch_latest_version() {
config.cached_latest_version = v.clone();
config.last_version_check_at = now;
config.save();
v
} else {
return;
};
let dominated = if skip_suppression {
tokensave::cloud::is_newer_version(current_version, &latest)
} else {
tokensave::cloud::is_newer_minor_version(current_version, &latest)
};
if dominated && (skip_suppression || now - config.last_version_warning_at >= 900) {
eprintln!(
"\n\x1b[33mUpdate available: v{} → v{}\x1b[0m\n Run: \x1b[1mtokensave upgrade\x1b[0m",
current_version, latest
);
if !skip_suppression {
config.last_version_warning_at = now;
config.save();
}
}
}
pub(crate) fn tokensave_dir_size(dir: &Path) -> u64 {
fn walk(p: &Path, acc: &mut u64) {
let Ok(entries) = std::fs::read_dir(p) else {
return;
};
for entry in entries.flatten() {
let Ok(meta) = entry.metadata() else {
continue;
};
if meta.is_dir() {
walk(&entry.path(), acc);
} else if meta.is_file() {
*acc = acc.saturating_add(meta.len());
}
}
}
let mut total = 0u64;
walk(dir, &mut total);
total
}
pub(crate) async fn gather_target_projects(
all: bool,
home_tokensave: &Option<std::path::PathBuf>,
) -> Vec<std::path::PathBuf> {
if all {
let Some(gdb) = tokensave::global_db::GlobalDb::open().await else {
return Vec::new();
};
gdb.list_project_paths()
.await
.into_iter()
.map(std::path::PathBuf::from)
.collect()
} else {
gather_local_projects(home_tokensave)
}
}
pub(crate) fn gather_local_projects(
home_tokensave: &Option<std::path::PathBuf>,
) -> Vec<std::path::PathBuf> {
let Ok(cwd) = std::env::current_dir() else {
return Vec::new();
};
gather_local_projects_from(&cwd, home_tokensave)
}
pub(crate) fn gather_local_projects_from(
cwd: &Path,
home_tokensave: &Option<std::path::PathBuf>,
) -> Vec<std::path::PathBuf> {
use std::collections::HashSet;
use std::path::PathBuf;
let canon_home_ts: Option<PathBuf> =
home_tokensave.as_ref().and_then(|p| p.canonicalize().ok());
let mut out: Vec<PathBuf> = Vec::new();
let mut seen: HashSet<PathBuf> = HashSet::new();
let is_home_tokensave = |ts: &Path| -> bool {
if let Some(ref canon) = canon_home_ts {
if ts.canonicalize().ok().as_ref() == Some(canon) {
return true;
}
}
false
};
let is_project_dir = |ts: &Path| -> bool {
!is_home_tokensave(ts) && ts.is_dir() && ts.join("tokensave.db").exists()
};
let mut cursor: Option<&Path> = Some(cwd);
while let Some(dir) = cursor {
let ts = dir.join(".tokensave");
if is_project_dir(&ts) && seen.insert(dir.to_path_buf()) {
out.push(dir.to_path_buf());
}
cursor = dir.parent();
}
find_descendant_tokensave(cwd, &canon_home_ts, &mut seen, &mut out);
out
}
pub(crate) fn find_descendant_tokensave(
start: &Path,
canon_home_ts: &Option<std::path::PathBuf>,
seen: &mut std::collections::HashSet<std::path::PathBuf>,
out: &mut Vec<std::path::PathBuf>,
) {
use std::collections::HashSet;
let mut visited: HashSet<std::path::PathBuf> = HashSet::new();
let mut work: Vec<std::path::PathBuf> = vec![start.to_path_buf()];
while let Some(dir) = work.pop() {
let canon = dir.canonicalize().unwrap_or_else(|_| dir.clone());
if !visited.insert(canon) {
continue;
}
let Ok(entries) = std::fs::read_dir(&dir) else {
continue;
};
for entry in entries.flatten() {
let Ok(ft) = entry.file_type() else {
continue;
};
if !ft.is_dir() {
continue;
}
let path = entry.path();
let name = entry.file_name();
let name_str = name.to_string_lossy();
if name_str == ".tokensave" {
if let Some(canon) = canon_home_ts {
if path.canonicalize().ok().as_ref() == Some(canon) {
continue;
}
}
if path.join("tokensave.db").exists() {
if let Some(parent) = path.parent() {
let pb = parent.to_path_buf();
if seen.insert(pb.clone()) {
out.push(pb);
}
}
}
continue;
}
if matches!(
name_str.as_ref(),
"node_modules"
| "target"
| ".git"
| "vendor"
| "dist"
| "build"
| ".next"
| ".venv"
| "__pycache__"
) {
continue;
}
work.push(path);
}
}
}
pub(crate) fn print_flash_warning(all: bool, targets: &[std::path::PathBuf]) {
const INNER_WIDTH: usize = 64;
let title = "⚠ DESTRUCTIVE ACTION — TOKENSAVE WIPE ⚠";
const TITLE_COLS: usize = 43;
let pad_total = INNER_WIDTH.saturating_sub(TITLE_COLS);
let pad_left = " ".repeat(pad_total / 2);
let pad_right = " ".repeat(pad_total - pad_total / 2);
let banner = "═".repeat(INNER_WIDTH);
let blank_red = " ".repeat(INNER_WIDTH);
eprintln!();
eprintln!("\x1b[1;31m{banner}\x1b[0m");
eprintln!("\x1b[1;5;37;41m{blank_red}\x1b[0m");
eprintln!("\x1b[1;5;37;41m{pad_left}{title}{pad_right}\x1b[0m");
eprintln!("\x1b[1;5;37;41m{blank_red}\x1b[0m");
eprintln!("\x1b[1;31m{banner}\x1b[0m");
eprintln!();
if all {
eprintln!(
"\x1b[1;31mThis will wipe \x1b[5mALL\x1b[25;1;31m tracked tokensave projects \
AND empty the global DB.\x1b[0m"
);
} else {
eprintln!(
"\x1b[1;31mThis will wipe local tokensave DBs in the current folder \
(parents and children).\x1b[0m"
);
}
eprintln!();
if targets.is_empty() {
eprintln!(" \x1b[33m(no project .tokensave directories found)\x1b[0m");
} else {
eprintln!("Targets:");
for t in targets {
eprintln!(" \x1b[31m✗\x1b[0m {}/.tokensave", t.display());
}
}
if all {
if let Some(p) = tokensave::global_db::global_db_path() {
eprintln!(" \x1b[31m✗\x1b[0m {} (global DB)", p.display());
}
}
eprintln!();
eprintln!("\x1b[1;5;33mThis cannot be undone.\x1b[0m");
eprintln!();
}
#[cfg(test)]
#[allow(clippy::unwrap_used, clippy::expect_used)]
mod gather_tests {
use super::*;
use std::fs;
use std::path::PathBuf;
fn make_project(root: &Path) {
let ts = root.join(".tokensave");
fs::create_dir_all(&ts).unwrap();
fs::write(ts.join("tokensave.db"), b"").unwrap();
}
#[test]
fn finds_project_at_cwd() {
let dir = tempfile::tempdir().unwrap();
let cwd = dir.path().canonicalize().unwrap();
make_project(&cwd);
let out = gather_local_projects_from(&cwd, &None);
assert_eq!(out, vec![cwd]);
}
#[test]
fn finds_project_at_ancestor_only() {
let dir = tempfile::tempdir().unwrap();
let root = dir.path().canonicalize().unwrap();
let nested = root.join("a").join("b").join("c");
fs::create_dir_all(&nested).unwrap();
make_project(&root);
let out = gather_local_projects_from(&nested, &None);
assert!(
out.contains(&root),
"ancestor project must be detected, got {out:?}"
);
}
#[test]
fn finds_project_at_descendant_only() {
let dir = tempfile::tempdir().unwrap();
let cwd = dir.path().canonicalize().unwrap();
let child = cwd.join("sub").join("proj");
fs::create_dir_all(&child).unwrap();
make_project(&child);
let out = gather_local_projects_from(&cwd, &None);
assert!(
out.contains(&child),
"descendant project must be detected, got {out:?}"
);
}
#[test]
fn finds_both_ancestor_and_descendant_dedup() {
let dir = tempfile::tempdir().unwrap();
let root = dir.path().canonicalize().unwrap();
let cwd = root.join("mid");
fs::create_dir_all(&cwd).unwrap();
let child = cwd.join("child");
fs::create_dir_all(&child).unwrap();
make_project(&root);
make_project(&child);
let out = gather_local_projects_from(&cwd, &None);
assert!(out.contains(&root));
assert!(out.contains(&child));
let unique: std::collections::HashSet<_> = out.iter().collect();
assert_eq!(unique.len(), out.len(), "duplicates: {out:?}");
}
#[test]
fn skips_projects_inside_node_modules() {
let dir = tempfile::tempdir().unwrap();
let cwd = dir.path().canonicalize().unwrap();
let buried = cwd.join("node_modules").join("pkg");
fs::create_dir_all(&buried).unwrap();
make_project(&buried);
let out = gather_local_projects_from(&cwd, &None);
assert!(
!out.contains(&buried),
"projects inside node_modules must be skipped, got {out:?}"
);
}
#[test]
fn skips_home_tokensave_via_canonical_path() {
let dir = tempfile::tempdir().unwrap();
let root = dir.path().canonicalize().unwrap();
let home_real = root.join("home_real");
fs::create_dir_all(&home_real).unwrap();
make_project(&home_real);
let home_alias = root.join("home_alias");
let symlink_ok = symlink_dir(&home_real, &home_alias).is_ok();
let cwd = root.clone();
let alias_ts: PathBuf = if symlink_ok {
home_alias.join(".tokensave")
} else {
home_real.join(".tokensave")
};
let out = gather_local_projects_from(&cwd, &Some(alias_ts));
assert!(
!out.contains(&home_real),
"home `.tokensave` (canonical) must be skipped, got {out:?}"
);
if symlink_ok {
assert!(
!out.contains(&home_alias),
"home `.tokensave` (alias) must be skipped, got {out:?}"
);
}
}
#[cfg(unix)]
fn symlink_dir(src: &Path, dst: &Path) -> std::io::Result<()> {
std::os::unix::fs::symlink(src, dst)
}
#[cfg(windows)]
fn symlink_dir(src: &Path, dst: &Path) -> std::io::Result<()> {
std::os::windows::fs::symlink_dir(src, dst)
}
#[test]
fn empty_dir_yields_empty_result() {
let dir = tempfile::tempdir().unwrap();
let cwd = dir.path().canonicalize().unwrap();
let out = gather_local_projects_from(&cwd, &None);
assert!(out.is_empty(), "got {out:?}");
}
}