pub mod claude;
pub mod cline;
pub mod codex;
pub mod copilot;
pub mod cursor;
pub mod gemini;
pub mod opencode;
pub mod roo_code;
pub mod zed;
use std::path::{Path, PathBuf};
use crate::errors::Result;
use crate::errors::TokenSaveError;
pub use claude::ClaudeIntegration;
pub use cline::ClineIntegration;
pub use codex::CodexIntegration;
pub use copilot::CopilotIntegration;
pub use cursor::CursorIntegration;
pub use gemini::GeminiIntegration;
pub use opencode::OpenCodeIntegration;
pub use roo_code::RooCodeIntegration;
pub use zed::ZedIntegration;
pub trait AgentIntegration {
fn name(&self) -> &'static str;
fn id(&self) -> &'static str;
fn install(&self, ctx: &InstallContext) -> Result<()>;
fn uninstall(&self, ctx: &InstallContext) -> Result<()>;
fn healthcheck(&self, dc: &mut DoctorCounters, ctx: &HealthcheckContext);
fn is_detected(&self, _home: &Path) -> bool {
false
}
fn has_tokensave(&self, _home: &Path) -> bool {
false
}
}
pub struct InstallContext {
pub home: PathBuf,
pub tokensave_bin: String,
pub tool_permissions: &'static [&'static str],
}
pub struct HealthcheckContext {
pub home: PathBuf,
pub project_path: PathBuf,
}
pub fn get_integration(id: &str) -> Result<Box<dyn AgentIntegration>> {
match id {
"claude" => Ok(Box::new(ClaudeIntegration)),
"opencode" => Ok(Box::new(OpenCodeIntegration)),
"codex" => Ok(Box::new(CodexIntegration)),
"gemini" => Ok(Box::new(GeminiIntegration)),
"copilot" => Ok(Box::new(CopilotIntegration)),
"cursor" => Ok(Box::new(CursorIntegration)),
"zed" => Ok(Box::new(ZedIntegration)),
"cline" => Ok(Box::new(ClineIntegration)),
"roo-code" => Ok(Box::new(RooCodeIntegration)),
_ => Err(TokenSaveError::Config {
message: format!(
"unknown agent: \"{id}\". Available agents: {}",
available_integrations().join(", ")
),
}),
}
}
pub fn all_integrations() -> Vec<Box<dyn AgentIntegration>> {
vec![
Box::new(ClaudeIntegration),
Box::new(OpenCodeIntegration),
Box::new(CodexIntegration),
Box::new(GeminiIntegration),
Box::new(CopilotIntegration),
Box::new(CursorIntegration),
Box::new(ZedIntegration),
Box::new(ClineIntegration),
Box::new(RooCodeIntegration),
]
}
pub fn available_integrations() -> Vec<&'static str> {
vec![
"claude", "opencode", "codex", "gemini", "copilot", "cursor", "zed", "cline", "roo-code",
]
}
pub struct DoctorCounters {
pub issues: u32,
pub warnings: u32,
}
impl DoctorCounters {
pub fn new() -> Self {
Self {
issues: 0,
warnings: 0,
}
}
pub fn pass(&self, msg: &str) {
eprintln!(" \x1b[32m✔\x1b[0m {msg}");
}
pub fn fail(&mut self, msg: &str) {
eprintln!(" \x1b[31m✘\x1b[0m {msg}");
self.issues += 1;
}
pub fn warn(&mut self, msg: &str) {
eprintln!(" \x1b[33m!\x1b[0m {msg}");
self.warnings += 1;
}
pub fn info(&self, msg: &str) {
eprintln!(" {msg}");
}
}
pub fn load_json_file(path: &Path) -> serde_json::Value {
if path.exists() {
let contents = std::fs::read_to_string(path).unwrap_or_default();
serde_json::from_str(&contents).unwrap_or_else(|_| serde_json::json!({}))
} else {
serde_json::json!({})
}
}
pub fn load_json_file_strict(path: &Path) -> Result<serde_json::Value> {
if !path.exists() {
return Ok(serde_json::json!({}));
}
let contents = std::fs::read_to_string(path).map_err(|e| TokenSaveError::Config {
message: format!("cannot read {}: {e}", path.display()),
})?;
if contents.trim().is_empty() {
return Ok(serde_json::json!({}));
}
serde_json::from_str(&contents).map_err(|e| TokenSaveError::Config {
message: format!(
"cannot parse {} as JSON: {e}\n \
Hint: fix the JSON syntax manually and re-run the command,\n \
or delete the file to start fresh",
path.display()
),
})
}
pub fn backup_config_file(path: &Path) -> Result<Option<PathBuf>> {
if !path.exists() {
return Ok(None);
}
let backup_path = PathBuf::from(format!("{}.bak", path.display()));
let staging_path = PathBuf::from(format!("{}.bak.new", path.display()));
let content = std::fs::read(path).map_err(|e| TokenSaveError::Config {
message: format!(
"failed to read {} for backup: {e}\n \
Hint: check file permissions",
path.display()
),
})?;
std::fs::write(&staging_path, &content).map_err(|e| {
std::fs::remove_file(&staging_path).ok();
TokenSaveError::Config {
message: format!(
"failed to write backup staging file {}: {e}\n \
Hint: check available disk space and permissions",
staging_path.display()
),
}
})?;
std::fs::rename(&staging_path, &backup_path).map_err(|e| {
std::fs::remove_file(&staging_path).ok();
TokenSaveError::Config {
message: format!(
"failed to create backup {}: {e}\n \
Hint: check file permissions",
backup_path.display()
),
}
})?;
Ok(Some(backup_path))
}
pub fn restore_config_backup(original: &Path, backup: &Path) {
match std::fs::copy(backup, original) {
Ok(_) => {
eprintln!(
"\x1b[33m⚠\x1b[0m Restored {} from backup",
original.display()
);
}
Err(e) => {
eprintln!(
"\x1b[31m✗\x1b[0m Failed to auto-restore {} from backup: {e}",
original.display()
);
eprintln!(
" Manual recovery: cp '{}' '{}'",
backup.display(),
original.display()
);
}
}
}
pub fn safe_write_json_file(
path: &Path,
value: &serde_json::Value,
backup: Option<&Path>,
) -> Result<()> {
let pretty = serde_json::to_string_pretty(value).map_err(|e| TokenSaveError::Config {
message: format!("failed to serialize JSON for {}: {e}", path.display()),
})?;
if serde_json::from_str::<serde_json::Value>(&pretty).is_err() {
return Err(TokenSaveError::Config {
message: format!(
"internal error: serialized JSON for {} failed re-parse validation.\n \
This is a bug in tokensave — please report it.",
path.display()
),
});
}
if let Some(parent) = path.parent() {
std::fs::create_dir_all(parent).map_err(|e| TokenSaveError::Config {
message: format!("cannot create directory {}: {e}", parent.display()),
})?;
}
let content = format!("{pretty}\n");
let new_path = PathBuf::from(format!("{}.new", path.display()));
if let Err(e) = std::fs::write(&new_path, &content) {
std::fs::remove_file(&new_path).ok(); return Err(TokenSaveError::Config {
message: format!(
"failed to write new config file {}: {e}",
new_path.display()
),
});
}
if let Err(e) = std::fs::rename(&new_path, path) {
std::fs::remove_file(&new_path).ok(); let hint = if let Some(b) = backup {
format!(
"\n Backup is at: {}\n \
The original file was NOT modified.",
b.display()
)
} else {
"\n The original file was NOT modified.".to_string()
};
return Err(TokenSaveError::Config {
message: format!(
"failed to rename {} → {}: {e}{hint}",
new_path.display(),
path.display()
),
});
}
Ok(())
}
pub fn write_json_file(path: &Path, value: &serde_json::Value) -> Result<()> {
let backup = backup_config_file(path)?;
safe_write_json_file(path, value, backup.as_deref())?;
eprintln!("\x1b[32m✔\x1b[0m Wrote {}", path.display());
Ok(())
}
pub fn which_tokensave() -> Option<String> {
if let Ok(exe) = std::env::current_exe() {
if exe
.file_name()
.and_then(|n| n.to_str())
.is_some_and(|n| n.starts_with("tokensave"))
{
return Some(normalize_path_separators(
&exe.to_string_lossy(),
));
}
}
let path_var = std::env::var("PATH").ok()?;
let separator = if cfg!(windows) { ';' } else { ':' };
let bin_name = if cfg!(windows) {
"tokensave.exe"
} else {
"tokensave"
};
path_var.split(separator).find_map(|dir| {
let candidate = PathBuf::from(dir).join(bin_name);
candidate
.exists()
.then(|| normalize_path_separators(&candidate.to_string_lossy()))
})
}
fn normalize_path_separators(path: &str) -> String {
path.replace('\\', "/")
}
pub fn home_dir() -> Option<PathBuf> {
std::env::var("HOME")
.or_else(|_| std::env::var("USERPROFILE"))
.ok()
.map(PathBuf::from)
}
pub fn parse_jsonc(input: &str) -> serde_json::Value {
let stripped = strip_jsonc_comments(input);
serde_json::from_str(&stripped).unwrap_or_else(|_| serde_json::json!({}))
}
fn strip_jsonc_comments(input: &str) -> String {
let mut out = String::with_capacity(input.len());
let chars: Vec<char> = input.chars().collect();
let len = chars.len();
let mut i = 0;
let mut in_string = false;
while i < len {
if in_string {
if chars[i] == '\\' && i + 1 < len {
out.push(chars[i]);
out.push(chars[i + 1]);
i += 2;
continue;
}
if chars[i] == '"' {
in_string = false;
}
out.push(chars[i]);
i += 1;
continue;
}
if chars[i] == '"' {
in_string = true;
out.push(chars[i]);
i += 1;
continue;
}
if chars[i] == '/' && i + 1 < len && chars[i + 1] == '/' {
while i < len && chars[i] != '\n' {
i += 1;
}
continue;
}
if chars[i] == '/' && i + 1 < len && chars[i + 1] == '*' {
i += 2;
while i + 1 < len && !(chars[i] == '*' && chars[i + 1] == '/') {
i += 1;
}
i += 2; continue;
}
out.push(chars[i]);
i += 1;
}
remove_trailing_commas(&out)
}
fn remove_trailing_commas(input: &str) -> String {
let bytes = input.as_bytes();
let len = bytes.len();
let mut out = Vec::with_capacity(len);
let mut i = 0;
while i < len {
if bytes[i] == b',' {
let mut j = i + 1;
while j < len
&& (bytes[j] == b' ' || bytes[j] == b'\t' || bytes[j] == b'\n' || bytes[j] == b'\r')
{
j += 1;
}
if j < len && (bytes[j] == b'}' || bytes[j] == b']') {
i += 1;
continue;
}
}
out.push(bytes[i]);
i += 1;
}
String::from_utf8(out).unwrap_or_else(|_| input.to_string())
}
pub fn load_jsonc_file(path: &Path) -> serde_json::Value {
let contents = match std::fs::read_to_string(path) {
Ok(s) => s,
Err(_) => return serde_json::json!({}),
};
parse_jsonc(&contents)
}
pub fn load_jsonc_file_strict(path: &Path) -> Result<serde_json::Value> {
if !path.exists() {
return Ok(serde_json::json!({}));
}
let contents = std::fs::read_to_string(path).map_err(|e| TokenSaveError::Config {
message: format!("cannot read {}: {e}", path.display()),
})?;
if contents.trim().is_empty() {
return Ok(serde_json::json!({}));
}
let stripped = strip_jsonc_comments(&contents);
serde_json::from_str(&stripped).map_err(|e| TokenSaveError::Config {
message: format!(
"cannot parse {} as JSONC: {e}\n \
Hint: fix the JSON syntax manually and re-run the command,\n \
or delete the file to start fresh",
path.display()
),
})
}
pub fn vscode_data_dir(home: &Path) -> PathBuf {
#[cfg(target_os = "macos")]
{
home.join("Library/Application Support/Code")
}
#[cfg(target_os = "linux")]
{
home.join(".config/Code")
}
#[cfg(target_os = "windows")]
{
std::env::var("APPDATA")
.map(|a| PathBuf::from(a).join("Code"))
.unwrap_or_else(|_| home.join("AppData/Roaming/Code"))
}
#[cfg(not(any(target_os = "macos", target_os = "linux", target_os = "windows")))]
{
home.join(".config/Code")
}
}
pub fn copilot_cli_dir(home: &Path) -> PathBuf {
home.join(".copilot")
}
pub fn migrate_installed_agents(home: &Path, config: &mut crate::user_config::UserConfig) {
if !config.installed_agents.is_empty() {
return; }
let mut found = Vec::new();
for ag in all_integrations() {
if ag.has_tokensave(home) {
found.push(ag.id().to_string());
}
}
if !found.is_empty() {
config.installed_agents = found;
config.save();
}
}
pub fn pick_integrations_interactive(
home: &Path,
installed: &[String],
) -> Result<(Vec<String>, Vec<String>)> {
let detected: Vec<Box<dyn AgentIntegration>> = all_integrations()
.into_iter()
.filter(|ag| ag.is_detected(home))
.collect();
if detected.is_empty() {
return Err(TokenSaveError::Config {
message: "No supported agents detected on this system".to_string(),
});
}
if detected.len() == 1 && !installed.contains(&detected[0].id().to_string()) {
let id = detected[0].id().to_string();
return Ok((vec![id], vec![]));
}
let items: Vec<String> = detected.iter().map(|ag| ag.name().to_string()).collect();
let defaults: Vec<bool> = detected
.iter()
.map(|ag| installed.contains(&ag.id().to_string()))
.collect();
let selections =
dialoguer::MultiSelect::with_theme(&dialoguer::theme::ColorfulTheme::default())
.with_prompt("Select agents to configure with tokensave MCP")
.items(&items)
.defaults(&defaults)
.interact()
.map_err(|e| TokenSaveError::Config {
message: format!("interactive selection failed: {e}"),
})?;
let selected_ids: Vec<String> = selections
.iter()
.map(|&idx| detected[idx].id().to_string())
.collect();
let to_install: Vec<String> = selected_ids
.iter()
.filter(|id| !installed.contains(id))
.cloned()
.collect();
let to_uninstall: Vec<String> = detected
.iter()
.filter(|ag| {
installed.contains(&ag.id().to_string()) && !selected_ids.contains(&ag.id().to_string())
})
.map(|ag| ag.id().to_string())
.collect();
Ok((to_install, to_uninstall))
}
pub fn load_toml_file(path: &Path) -> toml::Value {
if path.exists() {
let contents = std::fs::read_to_string(path).unwrap_or_default();
contents
.parse::<toml::Value>()
.unwrap_or_else(|_| toml::Value::Table(toml::map::Map::new()))
} else {
toml::Value::Table(toml::map::Map::new())
}
}
pub fn write_toml_file(path: &Path, value: &toml::Value) -> Result<()> {
let contents = toml::to_string_pretty(value).unwrap_or_else(|_| String::new());
std::fs::write(path, contents).map_err(|e| TokenSaveError::Config {
message: format!("failed to write {}: {e}", path.display()),
})?;
eprintln!("\x1b[32m✔\x1b[0m Wrote {}", path.display());
Ok(())
}
const HOOK_MARKER: &str = "# tokensave: auto-sync";
fn post_commit_snippet(tokensave_bin: &str) -> String {
let bin = tokensave_bin.replace('\\', "/");
format!(
"{HOOK_MARKER}\n\
{bin} sync >/dev/null 2>&1 &\n"
)
}
pub fn offer_git_post_commit_hook(tokensave_bin: &str) {
let Some(home) = home_dir() else { return };
let hooks_dir = read_global_hooks_path(&home);
let (hooks_dir, need_set_hookspath) = match hooks_dir {
Some(dir) => (dir, false),
None => (home.join(".config").join("git").join("hooks"), true),
};
let hook_path = hooks_dir.join("post-commit");
if hook_path.exists() {
if let Ok(contents) = std::fs::read_to_string(&hook_path) {
if contents.contains(HOOK_MARKER) {
eprintln!(" Global git post-commit hook already contains tokensave, skipping");
return;
}
}
}
if !atty_stdin() {
return;
}
eprintln!();
eprint!(
"Install a global git post-commit hook to auto-run \x1b[1mtokensave sync\x1b[0m after each commit? [y/N] "
);
let mut answer = String::new();
if std::io::stdin().read_line(&mut answer).is_err() {
return;
}
if !matches!(answer.trim(), "y" | "Y" | "yes" | "Yes") {
eprintln!(" Skipped git post-commit hook");
return;
}
if let Err(e) = std::fs::create_dir_all(&hooks_dir) {
eprintln!(
" \x1b[31m✘\x1b[0m Failed to create {}: {e}",
hooks_dir.display()
);
return;
}
if need_set_hookspath {
let gitconfig_path = home.join(".gitconfig");
if let Err(msg) = set_global_hooks_path(&gitconfig_path, &hooks_dir) {
eprintln!(" \x1b[31m✘\x1b[0m {msg} — hook not installed");
return;
}
eprintln!(
"\x1b[32m✔\x1b[0m Set git core.hooksPath to {}",
hooks_dir.display()
);
}
let snippet = post_commit_snippet(tokensave_bin);
if hook_path.exists() {
use std::io::Write;
let Ok(mut f) = std::fs::OpenOptions::new().append(true).open(&hook_path) else {
eprintln!(
" \x1b[31m✘\x1b[0m Failed to open {} for writing",
hook_path.display()
);
return;
};
if write!(f, "\n{snippet}").is_err() {
eprintln!(
" \x1b[31m✘\x1b[0m Failed to write to {}",
hook_path.display()
);
return;
}
} else {
let contents = format!("#!/bin/sh\n{snippet}");
if std::fs::write(&hook_path, contents).is_err() {
eprintln!(
" \x1b[31m✘\x1b[0m Failed to create {}",
hook_path.display()
);
return;
}
}
#[cfg(unix)]
{
use std::os::unix::fs::PermissionsExt;
let _ = std::fs::set_permissions(&hook_path, std::fs::Permissions::from_mode(0o755));
}
eprintln!(
"\x1b[32m✔\x1b[0m Installed global git post-commit hook at {}",
hook_path.display()
);
}
fn read_global_hooks_path(home: &Path) -> Option<PathBuf> {
let candidates = [
home.join(".gitconfig"),
home.join(".config").join("git").join("config"),
];
for path in &candidates {
if let Some(value) = parse_gitconfig_value(path, "core", "hookspath") {
let expanded = expand_tilde(&value, home);
let p = PathBuf::from(&expanded);
if p.is_absolute() {
return Some(p);
}
return Some(home.join(p));
}
}
None
}
fn parse_gitconfig_value(path: &Path, section: &str, key: &str) -> Option<String> {
let contents = std::fs::read_to_string(path).ok()?;
let section_lower = section.to_ascii_lowercase();
let key_lower = key.to_ascii_lowercase();
let mut in_section = false;
for line in contents.lines() {
let trimmed = line.trim();
if trimmed.starts_with('[') {
let header = trimmed
.trim_start_matches('[')
.split(']')
.next()
.unwrap_or("")
.trim();
let section_name = header.split_whitespace().next().unwrap_or("");
in_section = section_name.eq_ignore_ascii_case(§ion_lower);
continue;
}
if !in_section {
continue;
}
if trimmed.is_empty() || trimmed.starts_with('#') || trimmed.starts_with(';') {
continue;
}
if let Some((k, v)) = trimmed.split_once('=') {
if k.trim().to_ascii_lowercase() == key_lower {
let v = v.trim();
let v = v
.strip_prefix('"')
.and_then(|s| s.strip_suffix('"'))
.unwrap_or(v);
return Some(v.to_string());
}
}
}
None
}
fn set_global_hooks_path(
gitconfig_path: &Path,
hooks_dir: &Path,
) -> std::result::Result<(), String> {
let hooks_str = hooks_dir.to_string_lossy().replace('\\', "/");
let contents = if gitconfig_path.exists() {
std::fs::read_to_string(gitconfig_path)
.map_err(|e| format!("Failed to read {}: {e}", gitconfig_path.display()))?
} else {
String::new()
};
let new_contents = insert_gitconfig_value(&contents, "core", "hooksPath", &hooks_str);
if let Some(parent) = gitconfig_path.parent() {
std::fs::create_dir_all(parent)
.map_err(|e| format!("Failed to create {}: {e}", parent.display()))?;
}
std::fs::write(gitconfig_path, new_contents)
.map_err(|e| format!("Failed to write {}: {e}", gitconfig_path.display()))?;
Ok(())
}
fn insert_gitconfig_value(contents: &str, section: &str, key: &str, value: &str) -> String {
let section_lower = section.to_ascii_lowercase();
let lines: Vec<&str> = contents.lines().collect();
let mut result = Vec::with_capacity(lines.len() + 3);
let entry = format!("\t{key} = {value}");
let mut section_end: Option<usize> = None;
let mut in_section = false;
for (i, line) in lines.iter().enumerate() {
let trimmed = line.trim();
if trimmed.starts_with('[') {
if in_section {
section_end = Some(i);
break;
}
let header = trimmed
.trim_start_matches('[')
.split(']')
.next()
.unwrap_or("")
.trim();
let name = header.split_whitespace().next().unwrap_or("");
if name.eq_ignore_ascii_case(§ion_lower) {
in_section = true;
}
}
}
if in_section && section_end.is_none() {
section_end = Some(lines.len());
}
if let Some(insert_at) = section_end {
for (i, line) in lines.iter().enumerate() {
if i == insert_at {
result.push(entry.as_str());
}
result.push(line);
}
if insert_at == lines.len() {
result.push(&entry);
}
} else {
for line in &lines {
result.push(line);
}
if !contents.is_empty() && !contents.ends_with('\n') {
result.push("");
}
let section_header = format!("[{section}]");
let mut out = result.join("\n");
if !out.is_empty() && !out.ends_with('\n') {
out.push('\n');
}
out.push_str(§ion_header);
out.push('\n');
out.push_str(&entry);
out.push('\n');
return out;
}
let mut out = result.join("\n");
if !out.ends_with('\n') {
out.push('\n');
}
out
}
fn expand_tilde(s: &str, home: &Path) -> String {
if let Some(rest) = s.strip_prefix("~/") {
return home.join(rest).to_string_lossy().to_string();
}
if s == "~" {
return home.to_string_lossy().to_string();
}
s.to_string()
}
fn atty_stdin() -> bool {
use std::io::IsTerminal;
std::io::stdin().is_terminal()
}
#[cfg(test)]
mod git_hook_tests {
use super::*;
use std::path::Path;
#[test]
fn parse_hookspath_basic() {
let config = "[core]\n\thooksPath = /home/user/.git-hooks\n";
assert_eq!(
parse_gitconfig_value_from_str(config, "core", "hookspath"),
Some("/home/user/.git-hooks".to_string())
);
}
#[test]
fn parse_hookspath_quoted() {
let config = "[core]\n\thooksPath = \"/home/user/my hooks\"\n";
assert_eq!(
parse_gitconfig_value_from_str(config, "core", "hookspath"),
Some("/home/user/my hooks".to_string())
);
}
#[test]
fn parse_hookspath_case_insensitive() {
let config = "[Core]\n\tHooksPath = /tmp/hooks\n";
assert_eq!(
parse_gitconfig_value_from_str(config, "core", "hookspath"),
Some("/tmp/hooks".to_string())
);
}
#[test]
fn parse_hookspath_missing() {
let config = "[core]\n\tautocrlf = true\n";
assert_eq!(
parse_gitconfig_value_from_str(config, "core", "hookspath"),
None
);
}
#[test]
fn parse_hookspath_wrong_section() {
let config = "[user]\n\thooksPath = /nope\n[core]\n\tautocrlf = true\n";
assert_eq!(
parse_gitconfig_value_from_str(config, "core", "hookspath"),
None
);
}
#[test]
fn insert_into_existing_section() {
let config = "[user]\n\tname = Test\n[core]\n\tautocrlf = true\n";
let result = insert_gitconfig_value(config, "core", "hooksPath", "/tmp/hooks");
assert!(result.contains("\thooksPath = /tmp/hooks"));
assert!(result.contains("[core]"));
assert!(result.contains("autocrlf = true"));
}
#[test]
fn insert_new_section() {
let config = "[user]\n\tname = Test\n";
let result = insert_gitconfig_value(config, "core", "hooksPath", "/tmp/hooks");
assert!(result.contains("[core]\n\thooksPath = /tmp/hooks"));
}
#[test]
fn insert_into_empty_file() {
let result = insert_gitconfig_value("", "core", "hooksPath", "/tmp/hooks");
assert!(result.contains("[core]\n\thooksPath = /tmp/hooks"));
}
#[test]
fn insert_before_next_section() {
let config = "[core]\n\tautocrlf = true\n[user]\n\tname = Test\n";
let result = insert_gitconfig_value(config, "core", "hooksPath", "/tmp/hooks");
let hooks_pos = result.find("hooksPath").unwrap();
let user_pos = result.find("[user]").unwrap();
let autocrlf_pos = result.find("autocrlf").unwrap();
assert!(hooks_pos > autocrlf_pos);
assert!(hooks_pos < user_pos);
}
#[test]
fn expand_tilde_with_slash() {
let home = Path::new("/home/test");
assert_eq!(expand_tilde("~/hooks", home), "/home/test/hooks");
}
#[test]
fn expand_tilde_bare() {
let home = Path::new("/home/test");
assert_eq!(expand_tilde("~", home), "/home/test");
}
#[test]
fn expand_tilde_no_tilde() {
let home = Path::new("/home/test");
assert_eq!(expand_tilde("/abs/path", home), "/abs/path");
}
fn parse_gitconfig_value_from_str(contents: &str, section: &str, key: &str) -> Option<String> {
let section_lower = section.to_ascii_lowercase();
let key_lower = key.to_ascii_lowercase();
let mut in_section = false;
for line in contents.lines() {
let trimmed = line.trim();
if trimmed.starts_with('[') {
let header = trimmed
.trim_start_matches('[')
.split(']')
.next()
.unwrap_or("")
.trim();
let section_name = header.split_whitespace().next().unwrap_or("");
in_section = section_name.eq_ignore_ascii_case(§ion_lower);
continue;
}
if !in_section {
continue;
}
if trimmed.is_empty() || trimmed.starts_with('#') || trimmed.starts_with(';') {
continue;
}
if let Some((k, v)) = trimmed.split_once('=') {
if k.trim().to_ascii_lowercase() == key_lower {
let v = v.trim();
let v = v
.strip_prefix('"')
.and_then(|s| s.strip_suffix('"'))
.unwrap_or(v);
return Some(v.to_string());
}
}
}
None
}
}
pub const TOOL_NAMES: &[&str] = &[
"tokensave_affected",
"tokensave_callees",
"tokensave_callers",
"tokensave_changelog",
"tokensave_circular",
"tokensave_complexity",
"tokensave_context",
"tokensave_coupling",
"tokensave_dead_code",
"tokensave_diff_context",
"tokensave_distribution",
"tokensave_doc_coverage",
"tokensave_files",
"tokensave_god_class",
"tokensave_hotspots",
"tokensave_impact",
"tokensave_inheritance_depth",
"tokensave_largest",
"tokensave_module_api",
"tokensave_node",
"tokensave_rank",
"tokensave_recursion",
"tokensave_rename_preview",
"tokensave_search",
"tokensave_similar",
"tokensave_status",
"tokensave_unused_imports",
];
pub const EXPECTED_TOOL_PERMS: &[&str] = &[
"mcp__tokensave__tokensave_affected",
"mcp__tokensave__tokensave_callees",
"mcp__tokensave__tokensave_callers",
"mcp__tokensave__tokensave_changelog",
"mcp__tokensave__tokensave_circular",
"mcp__tokensave__tokensave_complexity",
"mcp__tokensave__tokensave_context",
"mcp__tokensave__tokensave_coupling",
"mcp__tokensave__tokensave_dead_code",
"mcp__tokensave__tokensave_diff_context",
"mcp__tokensave__tokensave_distribution",
"mcp__tokensave__tokensave_doc_coverage",
"mcp__tokensave__tokensave_files",
"mcp__tokensave__tokensave_god_class",
"mcp__tokensave__tokensave_hotspots",
"mcp__tokensave__tokensave_impact",
"mcp__tokensave__tokensave_inheritance_depth",
"mcp__tokensave__tokensave_largest",
"mcp__tokensave__tokensave_module_api",
"mcp__tokensave__tokensave_node",
"mcp__tokensave__tokensave_rank",
"mcp__tokensave__tokensave_recursion",
"mcp__tokensave__tokensave_rename_preview",
"mcp__tokensave__tokensave_search",
"mcp__tokensave__tokensave_similar",
"mcp__tokensave__tokensave_status",
"mcp__tokensave__tokensave_unused_imports",
];
#[cfg(test)]
mod jsonc_tests {
use super::*;
#[test]
fn parse_jsonc_plain_json() {
let input = r#"{"key": "value", "num": 42}"#;
let v = parse_jsonc(input);
assert_eq!(v["key"], "value");
assert_eq!(v["num"], 42);
}
#[test]
fn parse_jsonc_line_comment() {
let input = "{\n // this is a comment\n \"key\": \"val\"\n}";
let v = parse_jsonc(input);
assert_eq!(v["key"], "val");
}
#[test]
fn parse_jsonc_block_comment() {
let input = "{ /* block comment */ \"key\": \"val\" }";
let v = parse_jsonc(input);
assert_eq!(v["key"], "val");
}
#[test]
fn parse_jsonc_trailing_comma_object() {
let input = r#"{"a": 1, "b": 2,}"#;
let v = parse_jsonc(input);
assert_eq!(v["a"], 1);
assert_eq!(v["b"], 2);
}
#[test]
fn parse_jsonc_trailing_comma_array() {
let input = r#"{"items": [1, 2, 3,]}"#;
let v = parse_jsonc(input);
assert_eq!(v["items"][2], 3);
}
#[test]
fn parse_jsonc_combined() {
let input = "{\n // comment\n \"x\": /* inline */ 99,\n}";
let v = parse_jsonc(input);
assert_eq!(v["x"], 99);
}
#[test]
fn parse_jsonc_url_in_string_not_stripped() {
let input = r#"{"url": "https://example.com/path"}"#;
let v = parse_jsonc(input);
assert_eq!(v["url"], "https://example.com/path");
}
#[test]
fn parse_jsonc_invalid_falls_back_to_empty() {
let input = "not valid json at all !!!";
let v = parse_jsonc(input);
assert_eq!(v, serde_json::json!({}));
}
#[test]
fn parse_jsonc_empty_string() {
let v = parse_jsonc("");
assert_eq!(v, serde_json::json!({}));
}
#[test]
fn parse_jsonc_trailing_comma_with_whitespace() {
let input = "{\n \"a\": 1 ,\n}";
let v = parse_jsonc(input);
assert_eq!(v["a"], 1);
}
}
#[cfg(test)]
mod safe_config_tests {
use super::*;
use std::fs;
fn tmpdir() -> tempfile::TempDir {
tempfile::tempdir().expect("failed to create temp dir")
}
#[test]
fn backup_returns_none_when_file_missing() {
let dir = tmpdir();
let path = dir.path().join("nonexistent.json");
let result = backup_config_file(&path).unwrap();
assert!(result.is_none());
}
#[test]
fn backup_creates_bak_with_identical_content() {
let dir = tmpdir();
let path = dir.path().join("config.json");
let original = r#"{"existing": "data", "nested": {"key": 1}}"#;
fs::write(&path, original).unwrap();
let backup = backup_config_file(&path)
.unwrap()
.expect("should create backup");
assert!(backup.exists());
assert_eq!(fs::read_to_string(&backup).unwrap(), original);
assert_eq!(fs::read_to_string(&path).unwrap(), original);
}
#[test]
fn backup_staging_file_is_cleaned_up() {
let dir = tmpdir();
let path = dir.path().join("config.json");
fs::write(&path, "{}").unwrap();
backup_config_file(&path).unwrap();
let staging = dir.path().join("config.json.bak.new");
assert!(!staging.exists(), ".bak.new staging file should be removed");
}
#[test]
fn strict_load_returns_empty_for_missing_file() {
let dir = tmpdir();
let path = dir.path().join("nope.json");
let val = load_json_file_strict(&path).unwrap();
assert_eq!(val, serde_json::json!({}));
}
#[test]
fn strict_load_returns_empty_for_blank_file() {
let dir = tmpdir();
let path = dir.path().join("empty.json");
fs::write(&path, " \n ").unwrap();
let val = load_json_file_strict(&path).unwrap();
assert_eq!(val, serde_json::json!({}));
}
#[test]
fn strict_load_parses_valid_json() {
let dir = tmpdir();
let path = dir.path().join("valid.json");
fs::write(&path, r#"{"hello": "world", "n": 42}"#).unwrap();
let val = load_json_file_strict(&path).unwrap();
assert_eq!(val["hello"], "world");
assert_eq!(val["n"], 42);
}
#[test]
fn strict_load_errors_on_invalid_json() {
let dir = tmpdir();
let path = dir.path().join("bad.json");
fs::write(&path, "not json {{{").unwrap();
let err = load_json_file_strict(&path).unwrap_err();
let msg = err.to_string();
assert!(msg.contains("cannot parse"), "error: {msg}");
assert!(
msg.contains("bad.json"),
"error should mention filename: {msg}"
);
}
#[test]
fn strict_load_errors_on_truncated_json() {
let dir = tmpdir();
let path = dir.path().join("trunc.json");
fs::write(&path, r#"{"key": "value", "incomplete"#).unwrap();
assert!(load_json_file_strict(&path).is_err());
}
#[test]
fn strict_jsonc_load_returns_empty_for_missing() {
let dir = tmpdir();
let path = dir.path().join("nope.jsonc");
let val = load_jsonc_file_strict(&path).unwrap();
assert_eq!(val, serde_json::json!({}));
}
#[test]
fn strict_jsonc_load_parses_valid_jsonc() {
let dir = tmpdir();
let path = dir.path().join("settings.json");
fs::write(
&path,
"{\n // comment\n \"key\": \"val\",\n /* block */ \"n\": 1,\n}",
)
.unwrap();
let val = load_jsonc_file_strict(&path).unwrap();
assert_eq!(val["key"], "val");
assert_eq!(val["n"], 1);
}
#[test]
fn strict_jsonc_load_errors_on_garbage() {
let dir = tmpdir();
let path = dir.path().join("garbage.json");
fs::write(&path, "totally not json or jsonc !!!").unwrap();
let err = load_jsonc_file_strict(&path).unwrap_err();
assert!(err.to_string().contains("cannot parse"));
}
#[test]
fn safe_write_creates_file_from_scratch() {
let dir = tmpdir();
let path = dir.path().join("new.json");
let value = serde_json::json!({"created": true});
safe_write_json_file(&path, &value, None).unwrap();
let written = fs::read_to_string(&path).unwrap();
let parsed: serde_json::Value = serde_json::from_str(&written).unwrap();
assert_eq!(parsed["created"], true);
}
#[test]
fn safe_write_replaces_existing_file_atomically() {
let dir = tmpdir();
let path = dir.path().join("existing.json");
fs::write(&path, r#"{"old": true}"#).unwrap();
let value = serde_json::json!({"new": true});
safe_write_json_file(&path, &value, None).unwrap();
let parsed: serde_json::Value =
serde_json::from_str(&fs::read_to_string(&path).unwrap()).unwrap();
assert_eq!(parsed["new"], true);
assert!(parsed.get("old").is_none());
}
#[test]
fn safe_write_cleans_up_new_file_on_success() {
let dir = tmpdir();
let path = dir.path().join("config.json");
safe_write_json_file(&path, &serde_json::json!({}), None).unwrap();
let new_path = dir.path().join("config.json.new");
assert!(!new_path.exists(), ".new staging file should be removed");
}
#[test]
fn safe_write_creates_parent_dirs() {
let dir = tmpdir();
let path = dir.path().join("deep").join("nested").join("config.json");
safe_write_json_file(&path, &serde_json::json!({"deep": true}), None).unwrap();
assert!(path.exists());
}
#[test]
fn write_json_file_creates_backup_automatically() {
let dir = tmpdir();
let path = dir.path().join("auto.json");
fs::write(&path, r#"{"original": true}"#).unwrap();
write_json_file(&path, &serde_json::json!({"updated": true})).unwrap();
let bak = dir.path().join("auto.json.bak");
assert!(bak.exists());
let backup_content: serde_json::Value =
serde_json::from_str(&fs::read_to_string(&bak).unwrap()).unwrap();
assert_eq!(backup_content["original"], true);
}
#[test]
fn invalid_json_is_never_silently_replaced() {
let dir = tmpdir();
let path = dir.path().join("opencode.json");
let corrupted =
r#"{"mcp": {"other_server": {"url": "http://example.com"},}, "theme": "dark",}"#;
fs::write(&path, corrupted).unwrap();
let err = load_json_file_strict(&path);
assert!(err.is_err(), "strict loader must reject invalid JSON");
assert_eq!(fs::read_to_string(&path).unwrap(), corrupted);
let old_style = load_json_file(&path);
assert_eq!(
old_style,
serde_json::json!({}),
"non-strict loader returns empty"
);
}
#[test]
fn full_install_cycle_preserves_existing_config() {
let dir = tmpdir();
let path = dir.path().join("config.json");
let original = serde_json::json!({
"theme": "dark",
"mcp": {
"existing_server": {"url": "http://localhost:8080"}
},
"other_setting": [1, 2, 3]
});
fs::write(&path, serde_json::to_string_pretty(&original).unwrap()).unwrap();
let backup = backup_config_file(&path).unwrap();
let mut config = load_json_file_strict(&path).unwrap();
config["mcp"]["tokensave"] = serde_json::json!({
"type": "local",
"command": ["tokensave", "serve"]
});
safe_write_json_file(&path, &config, backup.as_deref()).unwrap();
let result: serde_json::Value =
serde_json::from_str(&fs::read_to_string(&path).unwrap()).unwrap();
assert!(result["mcp"]["tokensave"].is_object());
assert_eq!(result["theme"], "dark");
assert_eq!(
result["mcp"]["existing_server"]["url"],
"http://localhost:8080"
);
assert_eq!(result["other_setting"], serde_json::json!([1, 2, 3]));
let bak_content: serde_json::Value =
serde_json::from_str(&fs::read_to_string(backup.unwrap()).unwrap()).unwrap();
assert!(bak_content.get("tokensave").is_none());
assert_eq!(bak_content["theme"], "dark");
}
#[test]
fn full_install_cycle_aborts_on_corrupt_file() {
let dir = tmpdir();
let path = dir.path().join("config.json");
let corrupt_content = "{ this is not valid json at all }}}";
fs::write(&path, corrupt_content).unwrap();
let backup = backup_config_file(&path).unwrap();
assert!(backup.is_some());
let err = load_json_file_strict(&path);
assert!(err.is_err());
assert_eq!(fs::read_to_string(&path).unwrap(), corrupt_content);
assert_eq!(
fs::read_to_string(backup.unwrap()).unwrap(),
corrupt_content
);
}
#[test]
fn safe_write_output_is_valid_json() {
let dir = tmpdir();
let path = dir.path().join("roundtrip.json");
let value = serde_json::json!({
"unicode": "héllo wörld 🦀",
"nested": {"deep": {"array": [1, null, true, "str"]}},
"empty_obj": {},
"empty_arr": []
});
safe_write_json_file(&path, &value, None).unwrap();
let raw = fs::read_to_string(&path).unwrap();
let reparsed: serde_json::Value =
serde_json::from_str(&raw).expect("written file must be valid JSON");
assert_eq!(reparsed, value);
}
}
#[cfg(test)]
mod path_normalize_tests {
use super::*;
#[test]
fn normalizes_windows_backslashes() {
assert_eq!(
normalize_path_separators(r"C:\Users\dev\scoop\shims\tokensave.exe"),
"C:/Users/dev/scoop/shims/tokensave.exe"
);
}
#[test]
fn leaves_unix_paths_unchanged() {
assert_eq!(
normalize_path_separators("/usr/local/bin/tokensave"),
"/usr/local/bin/tokensave"
);
}
}