use clap::{Parser, Subcommand};
use std::io::{self, BufRead, Write};
use std::path::{Path, PathBuf};
use std::process;
use tokensave::tokensave::TokenSave;
use tokensave::context::{format_context_as_json, format_context_as_markdown};
use tokensave::types::*;
fn current_unix_timestamp() -> i64 {
std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.unwrap_or_default()
.as_secs() as i64
}
struct Spinner {
message: std::sync::Arc<std::sync::Mutex<String>>,
stop: std::sync::Arc<std::sync::atomic::AtomicBool>,
handle: Option<std::thread::JoinHandle<()>>,
}
impl Spinner {
fn new() -> Self {
let message = std::sync::Arc::new(std::sync::Mutex::new(String::new()));
let stop = std::sync::Arc::new(std::sync::atomic::AtomicBool::new(false));
let msg = message.clone();
let stp = stop.clone();
let _ = write!(std::io::stderr(), "\x1b[?25l");
let _ = std::io::stderr().flush();
let handle = std::thread::spawn(move || {
let frames = ["⠋", "⠙", "⠹", "⠸", "⠼", "⠴", "⠦", "⠧", "⠇", "⠏"];
let mut idx = 0usize;
while !stp.load(std::sync::atomic::Ordering::Relaxed) {
let text = msg.lock().unwrap().clone();
if !text.is_empty() {
let frame = frames[idx % frames.len()];
idx += 1;
let display: std::borrow::Cow<str> = if text.len() > 50 {
format!("…{}", &text[text.len() - 49..]).into()
} else {
text.as_str().into()
};
let mut stderr = std::io::stderr();
let _ = write!(stderr, "\r\x1b[2K{} {}", frame, display);
let _ = stderr.flush();
}
std::thread::sleep(std::time::Duration::from_millis(80));
}
});
Self {
message,
stop,
handle: Some(handle),
}
}
fn set_message(&self, msg: &str) {
*self.message.lock().unwrap() = msg.to_string();
}
fn done(self, message: &str) {
self.stop
.store(true, std::sync::atomic::Ordering::Relaxed);
if let Some(h) = self.handle {
let _ = h.join();
}
let mut stderr = std::io::stderr();
let _ = write!(stderr, "\x1b[?25h");
let _ = writeln!(stderr, "\r\x1b[2K\x1b[32m✔\x1b[0m {}", message);
let _ = stderr.flush();
}
}
#[derive(Parser)]
#[command(name = "tokensave", about = "Code intelligence for 15 languages — semantic graph queries instead of file reads")]
struct Cli {
#[command(subcommand)]
command: Option<Commands>,
}
#[derive(Subcommand)]
enum Commands {
Sync {
path: Option<String>,
#[arg(short, long)]
force: bool,
#[arg(long = "skip-folder", num_args = 1..)]
skip_folders: Vec<String>,
},
Status {
path: Option<String>,
#[arg(short, long)]
json: bool,
},
Query {
search: String,
#[arg(short, long)]
path: Option<String>,
#[arg(short, long, default_value = "10")]
limit: usize,
},
Context {
task: String,
#[arg(short, long)]
path: Option<String>,
#[arg(short = 'n', long, default_value = "20")]
max_nodes: usize,
#[arg(short, long, default_value = "markdown")]
format: String,
},
Files {
#[arg(short, long)]
path: Option<String>,
#[arg(long)]
filter: Option<String>,
#[arg(long)]
pattern: Option<String>,
#[arg(short, long)]
json: bool,
},
Affected {
files: Vec<String>,
#[arg(short, long)]
path: Option<String>,
#[arg(long)]
stdin: bool,
#[arg(short, long, default_value = "5")]
depth: usize,
#[arg(short, long)]
filter: Option<String>,
#[arg(short, long)]
json: bool,
#[arg(short, long)]
quiet: bool,
},
#[command(name = "install", visible_alias = "claude-install")]
Install {
#[arg(long)]
agent: Option<String>,
},
#[command(name = "uninstall", visible_alias = "claude-uninstall")]
Uninstall {
#[arg(long)]
agent: Option<String>,
},
#[command(name = "hook-pre-tool-use", hide = true)]
HookPreToolUse,
Serve {
#[arg(short, long)]
path: Option<String>,
},
#[command(name = "disable-upload-counter")]
DisableUploadCounter,
#[command(name = "enable-upload-counter")]
EnableUploadCounter,
#[command(name = "gitignore")]
Gitignore {
#[arg(short, long)]
path: Option<String>,
action: Option<String>,
},
Doctor {
#[arg(long)]
agent: Option<String>,
},
Daemon {
#[arg(long)]
foreground: bool,
#[arg(long)]
stop: bool,
#[arg(long)]
status: bool,
#[arg(long)]
enable_autostart: bool,
#[arg(long)]
disable_autostart: bool,
},
}
#[tokio::main]
async fn main() {
let cli = Cli::parse();
if let Err(e) = run(cli).await {
eprintln!("Error: {}", e);
process::exit(1);
}
}
async fn run(cli: Cli) -> tokensave::errors::Result<()> {
let command = match cli.command {
Some(cmd) => cmd,
None => return handle_no_command().await,
};
let is_first_run = tokensave::user_config::UserConfig::is_fresh();
let is_force_flush = matches!(command, Commands::Sync { .. } | Commands::Status { .. });
let mut user_config = tokensave::user_config::UserConfig::load();
try_flush(&mut user_config, is_force_flush);
user_config.save();
if is_first_run {
eprintln!(
"note: tokensave uploads anonymous token-saved counts to a worldwide counter.\n\
\x20 Run `tokensave disable-upload-counter` to opt out."
);
}
if !matches!(command, Commands::Install { .. }) {
tokensave::agents::claude::check_install_stale();
}
match command {
Commands::Sync { path, force, skip_folders } => {
let project_path = resolve_path(path);
if project_path.join(".codegraph").is_dir() {
eprintln!(
"warning: found legacy .codegraph/ directory at '{}'. \
tokensave now uses .tokensave/ — the old directory can be safely deleted.",
project_path.display()
);
}
let version_handle = std::thread::spawn(tokensave::cloud::fetch_latest_version);
if force || !TokenSave::is_initialized(&project_path) {
if !force {
eprintln!("No existing index found — performing full index");
}
init_and_index(&project_path, &skip_folders).await?;
} else {
let mut cg = TokenSave::open(&project_path).await?;
cg.add_skip_folders(&skip_folders);
let spinner = Spinner::new();
let result = cg
.sync_with_progress(|phase, detail| {
let msg = if detail.is_empty() {
phase.to_string()
} else {
format!("{phase} {detail}")
};
spinner.set_message(&msg);
})
.await?;
spinner.done(&format!(
"sync done — {} added, {} modified, {} removed in {}ms",
result.files_added,
result.files_modified,
result.files_removed,
result.duration_ms
));
update_global_db(&cg).await;
}
if let Ok(Some(latest)) = version_handle.join() {
let current_version = env!("CARGO_PKG_VERSION");
let now = current_unix_timestamp();
let mut config = tokensave::user_config::UserConfig::load();
config.cached_latest_version = latest.clone();
config.last_version_check_at = now;
config.save();
if tokensave::cloud::is_newer_version(current_version, &latest)
&& now - config.last_version_warning_at >= 900
{
let method = tokensave::cloud::detect_install_method();
let cmd = tokensave::cloud::upgrade_command(&method);
eprintln!(
"\n\x1b[33mUpdate available: v{} → v{}\x1b[0m\n Run: \x1b[1m{}\x1b[0m",
current_version, latest, cmd
);
config.last_version_warning_at = now;
config.save();
}
}
}
Commands::Status { path, json } => {
let project_path = resolve_path(path);
let cg = if TokenSave::is_initialized(&project_path) {
TokenSave::open(&project_path).await?
} else {
eprint!(
"No TokenSave index found at '{}'. Create one now? [Y/n] ",
project_path.display()
);
io::stderr().flush().ok();
let mut answer = String::new();
io::stdin()
.lock()
.read_line(&mut answer)
.map_err(|e| tokensave::errors::TokenSaveError::Config {
message: format!("failed to read stdin: {e}"),
})?;
let answer = answer.trim();
if answer.is_empty() || answer.eq_ignore_ascii_case("y") {
init_and_index(&project_path, &[]).await?
} else {
return Ok(());
}
};
let stats = cg.get_stats().await?;
if json {
println!(
"{}",
serde_json::to_string_pretty(&stats).unwrap_or_default()
);
} else {
let tokens_saved = cg.get_tokens_saved().await.unwrap_or(0);
let global_tokens_saved = match tokensave::global_db::GlobalDb::open().await {
Some(gdb) => {
gdb.upsert(&project_path, tokens_saved).await;
gdb.global_tokens_saved().await
.map(|total| total.saturating_sub(tokens_saved))
.filter(|&other| other > 0)
}
None => None,
};
let mut config = tokensave::user_config::UserConfig::load();
let now = current_unix_timestamp();
let worldwide = if now - config.last_worldwide_fetch_at < 60 {
if config.last_worldwide_total > 0 {
Some(config.last_worldwide_total)
} else {
None
}
} else if let Some(total) = tokensave::cloud::fetch_worldwide_total() {
config.last_worldwide_total = total;
config.last_worldwide_fetch_at = now;
config.save();
Some(total)
} else if config.last_worldwide_total > 0 {
Some(config.last_worldwide_total) } else {
None
};
let country_flags = if now - config.last_flags_fetch_at < 1800 {
config.cached_country_flags.clone()
} else {
let fresh = tokensave::cloud::fetch_country_flags();
if !fresh.is_empty() {
config.cached_country_flags = fresh.clone();
config.last_flags_fetch_at = now;
config.save();
}
if fresh.is_empty() && !config.cached_country_flags.is_empty() {
config.cached_country_flags.clone()
} else {
fresh
}
};
print!("{}", include_str!("resources/logo.ansi"));
tokensave::display::print_status_table(&stats, tokens_saved, global_tokens_saved, worldwide, &country_flags);
check_for_update(&mut config, false, true);
}
}
Commands::Query {
search,
path,
limit,
} => {
let project_path = resolve_path(path);
let cg = ensure_initialized(&project_path).await?;
let results = cg.search(&search, limit).await?;
if results.is_empty() {
println!("No results found for '{}'", search);
} else {
for r in &results {
println!(
"{} ({}) - {}:{}",
r.node.name,
r.node.kind.as_str(),
r.node.file_path,
r.node.start_line
);
if let Some(sig) = &r.node.signature {
println!(" {}", sig);
}
}
}
}
Commands::Context {
task,
path,
max_nodes,
format,
} => {
let project_path = resolve_path(path);
let cg = ensure_initialized(&project_path).await?;
let output_format = if format == "json" {
OutputFormat::Json
} else {
OutputFormat::Markdown
};
let options = BuildContextOptions {
max_nodes,
format: output_format.clone(),
..Default::default()
};
let context = cg.build_context(&task, &options).await?;
match output_format {
OutputFormat::Json => {
println!("{}", format_context_as_json(&context));
}
OutputFormat::Markdown => {
println!("{}", format_context_as_markdown(&context));
}
}
}
Commands::Files {
path,
filter,
pattern,
json,
} => {
let project_path = resolve_path(path);
let cg = ensure_initialized(&project_path).await?;
let mut files = cg.get_all_files().await?;
files.sort_by(|a, b| a.path.cmp(&b.path));
if let Some(ref dir) = filter {
let prefix = if dir.ends_with('/') {
dir.clone()
} else {
format!("{}/", dir)
};
files.retain(|f| f.path.starts_with(&prefix) || f.path == dir.as_str());
}
if let Some(ref pat) = pattern {
if let Ok(glob) = glob::Pattern::new(pat) {
files.retain(|f| glob.matches(&f.path));
} else {
eprintln!("warning: invalid glob pattern '{}', ignoring", pat);
}
}
if json {
let items: Vec<serde_json::Value> = files
.iter()
.map(|f| {
serde_json::json!({
"path": f.path,
"size": f.size,
"node_count": f.node_count,
})
})
.collect();
println!(
"{}",
serde_json::to_string_pretty(&items).unwrap_or_default()
);
} else {
println!("{} indexed files", files.len());
for f in &files {
println!(
" {} ({} bytes, {} symbols)",
f.path, f.size, f.node_count
);
}
}
}
Commands::Affected {
files,
path,
stdin,
depth,
filter,
json,
quiet,
} => {
let project_path = resolve_path(path);
let cg = ensure_initialized(&project_path).await?;
let mut changed: Vec<String> = files;
if stdin {
let stdin_handle = io::stdin();
for line in stdin_handle.lock().lines() {
if let Ok(line) = line {
let trimmed = line.trim().to_string();
if !trimmed.is_empty() {
changed.push(trimmed);
}
}
}
}
if changed.is_empty() {
eprintln!("No files specified. Pass file paths as arguments or use --stdin.");
return Ok(());
}
let affected = find_affected_tests(&cg, &changed, depth, filter.as_deref()).await?;
if json {
let output = serde_json::json!({
"changed_files": changed,
"affected_tests": affected,
"count": affected.len(),
});
println!(
"{}",
serde_json::to_string_pretty(&output).unwrap_or_default()
);
} else if quiet {
for f in &affected {
println!("{}", f);
}
} else {
if affected.is_empty() {
println!("No affected test files found.");
} else {
println!("{} affected test file(s):", affected.len());
for f in &affected {
println!(" {}", f);
}
}
}
}
Commands::Install { agent } => {
let home = tokensave::agents::home_dir().ok_or_else(|| tokensave::errors::TokenSaveError::Config {
message: "could not determine home directory".to_string(),
})?;
let tokensave_bin = tokensave::agents::which_tokensave().ok_or_else(|| tokensave::errors::TokenSaveError::Config {
message: "tokensave not found on PATH. Install it first:\n \
cargo install tokensave\n \
brew install aovestdipaperino/tap/tokensave".to_string(),
})?;
let mut user_cfg = tokensave::user_config::UserConfig::load();
tokensave::agents::migrate_installed_agents(&home, &mut user_cfg);
if let Some(id) = agent {
let ag = tokensave::agents::get_integration(&id)?;
let ctx = tokensave::agents::InstallContext {
home: home.clone(),
tokensave_bin: tokensave_bin.clone(),
tool_permissions: tokensave::agents::EXPECTED_TOOL_PERMS,
};
ag.install(&ctx)?;
if !user_cfg.installed_agents.contains(&id) {
user_cfg.installed_agents.push(id);
}
user_cfg.save();
} else {
let (to_install, to_uninstall) =
tokensave::agents::pick_integrations_interactive(&home, &user_cfg.installed_agents)?;
for id in &to_uninstall {
let ag = tokensave::agents::get_integration(id)?;
let ctx = tokensave::agents::InstallContext {
home: home.clone(),
tokensave_bin: tokensave_bin.clone(),
tool_permissions: tokensave::agents::EXPECTED_TOOL_PERMS,
};
ag.uninstall(&ctx)?;
user_cfg.installed_agents.retain(|a| a != id);
}
for id in &to_install {
let ag = tokensave::agents::get_integration(id)?;
let ctx = tokensave::agents::InstallContext {
home: home.clone(),
tokensave_bin: tokensave_bin.clone(),
tool_permissions: tokensave::agents::EXPECTED_TOOL_PERMS,
};
ag.install(&ctx)?;
if !user_cfg.installed_agents.contains(id) {
user_cfg.installed_agents.push(id.clone());
}
}
user_cfg.save();
}
tokensave::agents::offer_git_post_commit_hook(&tokensave_bin);
tokensave::daemon::offer_daemon_autostart();
}
Commands::Uninstall { agent } => {
let home = tokensave::agents::home_dir().ok_or_else(|| tokensave::errors::TokenSaveError::Config {
message: "could not determine home directory".to_string(),
})?;
let mut user_cfg = tokensave::user_config::UserConfig::load();
tokensave::agents::migrate_installed_agents(&home, &mut user_cfg);
if let Some(id) = agent {
let ag = tokensave::agents::get_integration(&id)?;
let ctx = tokensave::agents::InstallContext {
home,
tokensave_bin: String::new(),
tool_permissions: tokensave::agents::EXPECTED_TOOL_PERMS,
};
ag.uninstall(&ctx)?;
user_cfg.installed_agents.retain(|a| a != &id);
user_cfg.save();
} else {
for id in user_cfg.installed_agents.clone() {
if let Ok(ag) = tokensave::agents::get_integration(&id) {
let ctx = tokensave::agents::InstallContext {
home: home.clone(),
tokensave_bin: String::new(),
tool_permissions: tokensave::agents::EXPECTED_TOOL_PERMS,
};
ag.uninstall(&ctx).ok();
}
}
user_cfg.installed_agents.clear();
user_cfg.save();
eprintln!("All agent integrations removed.");
}
}
Commands::HookPreToolUse => {
hook_pre_tool_use();
}
Commands::Serve { path } => {
let project_path = resolve_path(path);
let cg = ensure_initialized(&project_path).await?;
let server = tokensave::mcp::McpServer::new(cg).await;
server.run().await?;
}
Commands::DisableUploadCounter => {
let mut config = tokensave::user_config::UserConfig::load();
config.upload_enabled = false;
config.save();
eprintln!("Worldwide counter upload disabled. You can re-enable with `tokensave enable-upload-counter`.");
}
Commands::EnableUploadCounter => {
let mut config = tokensave::user_config::UserConfig::load();
config.upload_enabled = true;
config.save();
eprintln!("Worldwide counter upload enabled.");
}
Commands::Gitignore { path, action } => {
let project_path = resolve_path(path);
let mut config = tokensave::config::load_config(&project_path)?;
match action.as_deref() {
Some("on") => {
config.git_ignore = true;
tokensave::config::save_config(&project_path, &config)?;
eprintln!("gitignore enabled — .gitignore rules will be respected during indexing.");
eprintln!("Run `tokensave sync` to re-index with the new setting.");
}
Some("off") => {
config.git_ignore = false;
tokensave::config::save_config(&project_path, &config)?;
eprintln!("gitignore disabled — .gitignore rules will be ignored during indexing.");
eprintln!("Run `tokensave sync` to re-index with the new setting.");
}
Some(other) => {
return Err(tokensave::errors::TokenSaveError::Config {
message: format!("unknown action '{other}': expected 'on' or 'off'"),
});
}
None => {
let status = if config.git_ignore { "on" } else { "off" };
eprintln!("gitignore: {status}");
}
}
}
Commands::Doctor { agent } => {
tokensave::doctor::run_doctor(agent.as_deref()).await;
}
Commands::Daemon { foreground, stop, status, enable_autostart, disable_autostart } => {
if stop {
tokensave::daemon::stop()?;
} else if status {
let code = tokensave::daemon::status();
std::process::exit(code);
} else if enable_autostart {
tokensave::daemon::enable_autostart()?;
} else if disable_autostart {
tokensave::daemon::disable_autostart()?;
} else {
tokensave::daemon::run(foreground).await?;
}
}
}
Ok(())
}
async fn handle_no_command() -> tokensave::errors::Result<()> {
let project_path = resolve_path(None);
if TokenSave::is_initialized(&project_path) {
let _ = <Cli as clap::CommandFactory>::command().print_help();
eprintln!();
return Ok(());
}
eprint!(
"No TokenSave index found at '{}'. Create one now? [Y/n] ",
project_path.display()
);
io::stderr().flush().ok();
let mut answer = String::new();
io::stdin()
.lock()
.read_line(&mut answer)
.map_err(|e| tokensave::errors::TokenSaveError::Config {
message: format!("failed to read stdin: {}", e),
})?;
let answer = answer.trim();
if answer.is_empty() || answer.eq_ignore_ascii_case("y") {
init_and_index(&project_path, &[]).await?;
}
Ok(())
}
async fn init_and_index(project_path: &Path, skip_folders: &[String]) -> tokensave::errors::Result<TokenSave> {
debug_assert!(project_path.is_dir(), "init_and_index: project_path is not a directory");
debug_assert!(project_path.is_absolute(), "init_and_index: project_path must be absolute");
let mut cg = if TokenSave::is_initialized(project_path) {
TokenSave::open(project_path).await?
} else {
let cg = TokenSave::init(project_path).await?;
eprintln!("Initialized TokenSave at {}", project_path.display());
cg
};
cg.add_skip_folders(skip_folders);
let spinner = Spinner::new();
let index_start = std::time::Instant::now();
let result = cg.index_all_with_progress(|current, total, file| {
let elapsed = index_start.elapsed().as_secs_f64();
let eta = if current > 1 {
let per_file = elapsed / (current - 1) as f64;
let remaining = per_file * (total - current) as f64;
if remaining >= 1.0 {
format!(" (ETA: {remaining:.0}s)")
} else {
String::new()
}
} else {
String::new()
};
spinner.set_message(&format!("[{current}/{total}] indexing {file}{eta}"));
}).await?;
spinner.done(&format!(
"indexing done — {} files, {} nodes, {} edges in {}ms",
result.file_count, result.node_count, result.edge_count, result.duration_ms
));
update_global_db(&cg).await;
Ok(cg)
}
async fn ensure_initialized(project_path: &Path) -> tokensave::errors::Result<TokenSave> {
if TokenSave::is_initialized(project_path) {
return TokenSave::open(project_path).await;
}
Err(tokensave::errors::TokenSaveError::Config {
message: format!(
"no TokenSave index found at '{}' — run 'tokensave sync' first",
project_path.display()
),
})
}
async fn update_global_db(cg: &TokenSave) {
let tokens = cg.get_tokens_saved().await.unwrap_or(0);
if let Some(gdb) = tokensave::global_db::GlobalDb::open().await {
let previous = gdb.get_project_tokens(cg.project_root()).await;
gdb.upsert(cg.project_root(), tokens).await;
if tokens > previous {
let mut config = tokensave::user_config::UserConfig::load();
config.pending_upload += tokens - previous;
config.save();
}
}
}
fn try_flush(config: &mut tokensave::user_config::UserConfig, force: bool) {
if config.pending_upload == 0 || !config.upload_enabled {
return;
}
let now = current_unix_timestamp();
if config.last_flush_attempt_at > config.last_upload_at
&& now - config.last_flush_attempt_at < 60
{
return;
}
if !force && now - config.last_upload_at < 30 {
return;
}
config.last_flush_attempt_at = now;
if let Some(worldwide_total) = tokensave::cloud::flush_pending(config.pending_upload) {
config.pending_upload = 0;
config.last_upload_at = now;
config.last_worldwide_total = worldwide_total;
config.last_worldwide_fetch_at = now;
}
}
fn check_for_update(config: &mut tokensave::user_config::UserConfig, skip_cache: bool, skip_suppression: bool) {
let current_version = env!("CARGO_PKG_VERSION");
let now = current_unix_timestamp();
let latest = if !skip_cache && now - config.last_version_check_at < 300 {
if config.cached_latest_version.is_empty() {
return;
}
config.cached_latest_version.clone()
} else if let Some(v) = tokensave::cloud::fetch_latest_version() {
config.cached_latest_version = v.clone();
config.last_version_check_at = now;
config.save();
v
} else {
return;
};
if tokensave::cloud::is_newer_version(current_version, &latest)
&& (skip_suppression || now - config.last_version_warning_at >= 900)
{
let method = tokensave::cloud::detect_install_method();
let cmd = tokensave::cloud::upgrade_command(&method);
eprintln!(
"\n\x1b[33mUpdate available: v{} → v{}\x1b[0m\n Run: \x1b[1m{}\x1b[0m",
current_version, latest, cmd
);
if !skip_suppression {
config.last_version_warning_at = now;
config.save();
}
}
}
fn hook_pre_tool_use() {
let tool_input = std::env::var("TOOL_INPUT").unwrap_or_default();
let block_msg = serde_json::json!({
"decision": "block",
"reason": "STOP: Use tokensave MCP tools (tokensave_context, tokensave_search, \
tokensave_callees, tokensave_callers, tokensave_impact, tokensave_files, \
tokensave_affected) instead of agents for code research. Tokensave is \
faster and more precise for symbol relationships, call paths, and code \
structure. Only use agents for code exploration if you have already tried \
tokensave and it cannot answer the question."
});
let parsed: serde_json::Value =
serde_json::from_str(&tool_input).unwrap_or_else(|_| serde_json::json!({}));
if parsed.get("subagent_type").and_then(|v| v.as_str()) == Some("Explore") {
println!("{}", block_msg);
return;
}
if let Some(prompt) = parsed.get("prompt").and_then(|v| v.as_str()) {
let lower = prompt.to_ascii_lowercase();
let exploration_patterns = [
"explore", "codebase structure", "codebase architecture", "codebase overview",
"source files contents", "read every", "full contents", "entire codebase",
"architecture and structure", "call graph", "call path", "call chain",
"symbol relat", "symbol lookup", "who calls", "callers of", "callees of",
];
if exploration_patterns.iter().any(|pat| lower.contains(pat)) {
println!("{}", block_msg);
return;
}
}
println!(r#"{{"decision": "allow"}}"#);
}
fn resolve_path(path: Option<String>) -> PathBuf {
match path {
Some(p) => PathBuf::from(p),
None => std::env::current_dir().unwrap_or_else(|_| PathBuf::from(".")),
}
}
async fn find_affected_tests(
cg: &TokenSave,
changed_files: &[String],
max_depth: usize,
custom_filter: Option<&str>,
) -> tokensave::errors::Result<Vec<String>> {
debug_assert!(!changed_files.is_empty(), "find_affected_tests called with no changed files");
debug_assert!(max_depth > 0, "find_affected_tests max_depth must be positive");
use std::collections::{HashSet, VecDeque};
let custom_glob = custom_filter.and_then(|p| glob::Pattern::new(p).ok());
let matches_test = |path: &str| -> bool {
if let Some(ref g) = custom_glob {
g.matches(path)
} else {
tokensave::tokensave::is_test_file(path)
}
};
let mut affected: HashSet<String> = HashSet::new();
let mut visited: HashSet<String> = HashSet::new();
let mut queue: VecDeque<(String, usize)> = VecDeque::new();
for file in changed_files {
if matches_test(file) {
affected.insert(file.clone());
}
if visited.insert(file.clone()) {
queue.push_back((file.clone(), 0));
}
}
while let Some((file, depth)) = queue.pop_front() {
if depth >= max_depth {
continue;
}
let dependents = cg.get_file_dependents(&file).await?;
for dep in dependents {
if !visited.insert(dep.clone()) {
continue;
}
if matches_test(&dep) {
affected.insert(dep.clone());
} else {
queue.push_back((dep, depth + 1));
}
}
}
let mut result: Vec<String> = affected.into_iter().collect();
result.sort();
Ok(result)
}