use sha2::{Digest, Sha256};
use super::resolve_read_path;
use super::safe_resolve_path;
use crate::providers::ToolDefinition;
use anyhow::Result;
use koda_sandbox::fs::FileSystem;
use serde_json::{Value, json};
use std::path::Path;
use std::time::SystemTime;
pub fn definitions() -> Vec<ToolDefinition> {
vec![
ToolDefinition {
name: "Read".to_string(),
description: "Read the contents of a file. The output includes line numbers. \
For large files (>500 lines), use start_line and num_lines to read specific \
portions instead of the whole file. ALWAYS read a file before editing it — \
never guess at file contents. Re-read after editing to verify changes."
.to_string(),
parameters: json!({
"type": "object",
"properties": {
"file_path": {
"type": "string",
"description": "Relative or absolute path to the file"
},
"start_line": {
"type": "integer",
"description": "Optional 1-based start line for partial reads"
},
"num_lines": {
"type": "integer",
"description": "Number of lines to read from start_line"
}
},
"required": ["file_path"]
}),
},
ToolDefinition {
name: "Write".to_string(),
description: "Create a new file or overwrite an existing one. \
Set overwrite=true to replace an existing file. \
For targeted edits to existing files, prefer Edit instead."
.to_string(),
parameters: json!({
"type": "object",
"properties": {
"file_path": {
"type": "string",
"description": "Relative or absolute path to the file"
},
"content": {
"type": "string",
"description": "The full content to write"
},
"overwrite": {
"type": "boolean",
"description": "Must be true to overwrite an existing file (default: false)"
}
},
"required": ["file_path", "content"]
}),
},
ToolDefinition {
name: "Edit".to_string(),
description: "Targeted find-and-replace in an existing file. \
Each replacement matches exact 'old_str' and replaces with 'new_str'. \
ALWAYS Read the file first to get exact text. \
Keep each diff small — target only the minimal snippet you want changed. \
Apply multiple sequential Edit calls for large refactors. \
Never paste an entire file inside old_str."
.to_string(),
parameters: json!({
"type": "object",
"properties": {
"file_path": {
"type": "string",
"description": "Path to the file to edit"
},
"replacements": {
"type": "array",
"description": "List of find-and-replace operations",
"items": {
"type": "object",
"properties": {
"old_str": {
"type": "string",
"description": "Exact text to find in the file"
},
"new_str": {
"type": "string",
"description": "Text to replace it with"
},
"replace_all": {
"type": "boolean",
"description": "Replace all occurrences instead of just the first (default: false)"
}
},
"required": ["old_str", "new_str"]
}
}
},
"required": ["file_path", "replacements"]
}),
},
ToolDefinition {
name: "Delete".to_string(),
description: "Delete a file or directory. For directories, set recursive to true. \
Returns what was removed and the count of deleted items."
.to_string(),
parameters: json!({
"type": "object",
"properties": {
"file_path": {
"type": "string",
"description": "Path to the file or directory to delete"
},
"recursive": {
"type": "boolean",
"description": "Required for deleting non-empty directories (default: false)"
}
},
"required": ["file_path"]
}),
},
ToolDefinition {
name: "List".to_string(),
description: "List files and directories in a given path. Respects .gitignore \
and skips common noise (node_modules, __pycache__, .git). \
Use with recursive=false (default) to explore project structure one level \
at a time. Use with recursive=true for a full tree view. \
For finding files by pattern (e.g. all *.rs files), prefer Glob instead."
.to_string(),
parameters: json!({
"type": "object",
"properties": {
"file_path": {
"type": "string",
"description": "Directory to list (default: project root)"
},
"recursive": {
"type": "boolean",
"description": "Whether to recurse into subdirectories (default: false)"
}
}
}),
},
]
}
pub async fn read_file(
project_root: &Path,
args: &Value,
cache: &super::FileReadCache,
fs: &dyn FileSystem,
) -> Result<String> {
let path_str = args["file_path"]
.as_str()
.or_else(|| args["path"].as_str())
.ok_or_else(|| anyhow::anyhow!("Missing 'file_path' argument"))?;
let resolved = resolve_read_path(project_root, path_str)?;
let start_line = args["start_line"].as_u64();
let num_lines = args["num_lines"].as_u64();
let metadata = tokio::fs::metadata(&resolved)
.await
.map_err(|e| anyhow::anyhow!("Failed to read {}: {}", resolved.display(), e))?;
let mtime = metadata.modified().unwrap_or(SystemTime::UNIX_EPOCH);
let size = metadata.len();
let cache_key = format!("{}:{:?}:{:?}", resolved.display(), start_line, num_lines);
{
let cache_guard = cache.lock().unwrap_or_else(|e| e.into_inner());
if let Some((cached_size, cached_mtime, _)) = cache_guard.get(&cache_key)
&& *cached_size == size
&& *cached_mtime == mtime
{
return Ok(format!(
"[File '{}' is unchanged since last read. Full content is already in \
your conversation history. To read a specific section, use the \
start_line and num_lines parameters instead of re-reading the whole file.]",
path_str
));
}
}
let mut content_sha256 = String::new();
let raw_bytes = fs
.read(&resolved, None)
.await
.map_err(|e| anyhow::anyhow!("Failed to read {}: {}", resolved.display(), e))?;
let raw_content = String::from_utf8_lossy(&raw_bytes).into_owned();
let output = match (start_line, num_lines) {
(Some(start), Some(count)) => {
let start_idx = (start as usize).saturating_sub(1); raw_content
.lines()
.skip(start_idx)
.take(count as usize)
.collect::<Vec<_>>()
.join("\n")
}
_ => {
content_sha256 = format!("{:x}", Sha256::digest(raw_content.as_bytes()));
if raw_content.len() > 20_000 {
let mut end = 20_000;
while !raw_content.is_char_boundary(end) {
end -= 1;
}
format!(
"{}\n\n... [TRUNCATED: file is {} bytes. Use start_line/num_lines for large files]",
&raw_content[..end],
raw_content.len()
)
} else {
raw_content
}
}
};
{
let mut cache_guard = cache.lock().unwrap_or_else(|e| e.into_inner());
cache_guard.insert(cache_key, (size, mtime, content_sha256));
}
Ok(output)
}
pub async fn write_file(project_root: &Path, args: &Value, fs: &dyn FileSystem) -> Result<String> {
let path_str = args["file_path"]
.as_str()
.or_else(|| args["path"].as_str())
.ok_or_else(|| anyhow::anyhow!("Missing 'file_path' argument"))?;
let content = args["content"]
.as_str()
.ok_or_else(|| anyhow::anyhow!("Missing 'content' argument"))?;
let overwrite = args["overwrite"].as_bool().unwrap_or(false);
let resolved = safe_resolve_path(project_root, path_str)?;
let already_exists = fs.stat(&resolved).await.is_ok();
if already_exists && !overwrite {
anyhow::bail!(
"File '{}' already exists. Set overwrite=true to replace it, \
or use Edit for targeted changes.",
path_str
);
}
fs.write(&resolved, content.as_bytes())
.await
.map_err(|e| anyhow::anyhow!("Failed to write {}: {}", resolved.display(), e))?;
Ok(format!(
"Written {} bytes to {}",
content.len(),
resolved.display()
))
}
fn byte_offset_to_line(content: &str, offset: usize) -> usize {
content[..offset.min(content.len())]
.chars()
.filter(|&c| c == '\n')
.count()
+ 1
}
fn match_line_numbers(haystack: &str, needle: &str) -> Vec<usize> {
let mut line_nos = Vec::new();
let mut start = 0;
while let Some(rel) = haystack[start..].find(needle) {
let abs = start + rel;
line_nos.push(byte_offset_to_line(haystack, abs));
start = abs + 1; }
line_nos
}
pub async fn edit_file(
project_root: &Path,
args: &Value,
cache: &super::FileReadCache,
fs: &dyn FileSystem,
) -> Result<String> {
let path_str = args["file_path"]
.as_str()
.or_else(|| args["path"].as_str())
.ok_or_else(|| anyhow::anyhow!("Missing 'file_path' argument"))?;
let replacements = args["replacements"]
.as_array()
.ok_or_else(|| anyhow::anyhow!("Missing 'replacements' argument"))?;
let resolved = safe_resolve_path(project_root, path_str)?;
let raw_bytes = fs
.read(&resolved, None)
.await
.map_err(|e| anyhow::anyhow!("Failed to read {}: {}", resolved.display(), e))?;
let mut content = String::from_utf8(raw_bytes)
.map_err(|e| anyhow::anyhow!("File '{}' is not valid UTF-8: {}", path_str, e))?;
let full_key = format!("{}:None:None", resolved.display());
{
let guard = cache.lock().unwrap_or_else(|e| e.into_inner());
if let Some((_, _, cached_hash)) = guard.get(&full_key)
&& !cached_hash.is_empty()
{
let current_hash = format!("{:x}", Sha256::digest(content.as_bytes()));
if *cached_hash != current_hash {
anyhow::bail!(
"File '{}' has changed on disk since you last read it \
(SHA-256 mismatch). Re-read the file to get the current \
content before editing.",
path_str
);
}
}
}
let mut changes = Vec::new();
for (i, replacement) in replacements.iter().enumerate() {
let old_str = replacement["old_str"]
.as_str()
.ok_or_else(|| anyhow::anyhow!("Replacement {i}: missing 'old_str'"))?;
let new_str = replacement["new_str"]
.as_str()
.ok_or_else(|| anyhow::anyhow!("Replacement {i}: missing 'new_str'"))?;
if old_str.is_empty() {
anyhow::bail!("Replacement {i}: 'old_str' cannot be empty");
}
let replace_all = replacement["replace_all"].as_bool().unwrap_or(false);
if content.contains(old_str) {
let count = content.matches(old_str).count();
if replace_all {
content = content.replace(old_str, new_str);
for line in old_str.lines() {
changes.push(format!("-{line}"));
}
for line in new_str.lines() {
changes.push(format!("+{line}"));
}
if count > 1 {
changes.push(format!("({count} occurrences replaced)"));
}
} else if count > 1 {
let lines = match_line_numbers(&content, old_str);
let line_list = lines
.iter()
.map(|n| n.to_string())
.collect::<Vec<_>>()
.join(", ");
anyhow::bail!(
"Replacement {i}: 'old_str' matches {count} times in '{}' \
(at lines {line_list}). Set replace_all=true to replace \
every occurrence, or expand the snippet to uniquely \
identify the one you want.",
path_str
);
} else {
content = content.replacen(old_str, new_str, 1);
for line in old_str.lines() {
changes.push(format!("-{line}"));
}
for line in new_str.lines() {
changes.push(format!("+{line}"));
}
}
} else {
let ranges = super::fuzzy::fuzzy_match_ranges(old_str, &content);
match ranges.len() {
0 => anyhow::bail!(
"Replacement {i}: 'old_str' not found in '{}'. \
Read the file first to get the exact text.",
path_str
),
1 => {
let r = ranges.into_iter().next().unwrap();
for line in old_str.lines() {
changes.push(format!("-{line}"));
}
for line in new_str.lines() {
changes.push(format!("+{line}"));
}
changes.push("(fuzzy match: trailing whitespace ignored)".into());
content = format!("{}{}{}", &content[..r.start], new_str, &content[r.end..]);
}
n => anyhow::bail!(
"Replacement {i}: 'old_str' is ambiguous — {n} fuzzy matches \
in '{}' (at lines {}). Use a more specific snippet.",
path_str,
ranges
.iter()
.map(|r| byte_offset_to_line(&content, r.start).to_string())
.collect::<Vec<_>>()
.join(", ")
),
}
}
if replacements.len() > 1 {
changes.push(String::new()); }
}
fs.write(&resolved, content.as_bytes())
.await
.map_err(|e| anyhow::anyhow!("Failed to write {}: {}", resolved.display(), e))?;
Ok(format!(
"Applied {} edit(s) to {}\n{}",
replacements.len(),
resolved.display(),
changes.join("\n")
))
}
pub async fn delete_file(project_root: &Path, args: &Value) -> Result<String> {
let path_str = args["file_path"]
.as_str()
.or_else(|| args["path"].as_str())
.ok_or_else(|| anyhow::anyhow!("Missing 'file_path' argument"))?;
let recursive = args["recursive"].as_bool().unwrap_or(false);
let resolved = safe_resolve_path(project_root, path_str)?;
if !resolved.exists() {
anyhow::bail!("Path not found: {}", resolved.display());
}
if resolved == project_root {
anyhow::bail!("Cannot delete the project root directory");
}
if resolved.is_file() {
let size = tokio::fs::metadata(&resolved).await?.len();
tokio::fs::remove_file(&resolved).await?;
Ok(format!(
"Deleted file {} ({} bytes)",
resolved.display(),
size
))
} else if resolved.is_dir() {
let is_empty = resolved.read_dir()?.next().is_none();
if is_empty {
tokio::fs::remove_dir(&resolved).await?;
Ok(format!("Deleted empty directory {}", resolved.display()))
} else if recursive {
let count = count_dir_entries(&resolved);
tokio::fs::remove_dir_all(&resolved).await?;
Ok(format!(
"Deleted directory {} ({} items removed)",
resolved.display(),
count
))
} else {
anyhow::bail!(
"Directory {} is not empty. Set recursive=true to delete it and all contents.",
resolved.display()
)
}
} else {
anyhow::bail!("Path is not a file or directory: {}", resolved.display())
}
}
fn count_dir_entries(path: &Path) -> usize {
let mut count = 0;
if let Ok(entries) = std::fs::read_dir(path) {
for entry in entries.flatten() {
count += 1;
if entry.path().is_dir() {
count += count_dir_entries(&entry.path());
}
}
}
count
}
pub async fn list_files(project_root: &Path, args: &Value, max_entries: usize) -> Result<String> {
let path_str = args["file_path"]
.as_str()
.or_else(|| args["path"].as_str())
.unwrap_or(".");
let recursive = args["recursive"].as_bool().unwrap_or(false);
let resolved = resolve_read_path(project_root, path_str)?;
let mut entries: Vec<(bool, String)> = Vec::new();
let mut total_count: usize = 0;
if recursive {
let mut builder = ignore::WalkBuilder::new(&resolved);
builder
.hidden(true) .git_ignore(true)
.filter_entry(|entry| {
let name = entry.file_name().to_string_lossy();
!matches!(
name.as_ref(),
"target"
| "node_modules"
| "__pycache__"
| ".git"
| "dist"
| "build"
| ".next"
| ".cache"
)
});
let walker = builder.build();
for entry in walker.flatten() {
let path = entry.path();
if path == resolved {
continue;
}
let relative = path.strip_prefix(project_root).unwrap_or(path);
entries.push((path.is_dir(), relative.display().to_string()));
total_count += 1;
if entries.len() >= max_entries {
break;
}
}
} else {
let mut reader = tokio::fs::read_dir(&resolved).await?;
while let Some(entry) = reader.next_entry().await? {
let ft = entry.file_type().await?;
entries.push((
ft.is_dir(),
entry.file_name().to_string_lossy().into_owned(),
));
total_count += 1;
if entries.len() >= max_entries {
break;
}
}
}
entries.sort_by(|a, b| b.0.cmp(&a.0).then_with(|| a.1.cmp(&b.1)));
let header = format!("Listing: {}", resolved.display());
if entries.is_empty() {
Ok(format!("{header}\n(empty directory)"))
} else {
let formatted: Vec<String> = entries
.into_iter()
.map(|(is_dir, name)| {
let prefix = if is_dir { "d " } else { " " };
format!("{prefix}{name}")
})
.collect();
let header = format!("{header} ({total_count} entries)");
if total_count >= max_entries {
Ok(format!(
"{header}\n{}\n\n... [CAPPED at {max_entries} entries. Use a subdirectory path to narrow results.]",
formatted.join("\n")
))
} else {
Ok(format!("{header}\n{}", formatted.join("\n")))
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use koda_sandbox::fs::LocalFileSystem;
use std::collections::HashMap;
use std::sync::Mutex;
fn cache() -> super::super::FileReadCache {
std::sync::Arc::new(Mutex::new(HashMap::new()))
}
fn fs() -> LocalFileSystem {
LocalFileSystem::new()
}
#[tokio::test]
async fn read_file_basic() {
let tmp = tempfile::tempdir().unwrap();
let f = tmp.path().join("hello.txt");
std::fs::write(&f, "line1\nline2\nline3").unwrap();
let args = json!({"file_path": f.to_string_lossy()});
let result = read_file(tmp.path(), &args, &cache(), &fs()).await.unwrap();
assert!(result.contains("line1"));
assert!(result.contains("line3"));
}
#[tokio::test]
async fn read_file_with_line_range() {
let tmp = tempfile::tempdir().unwrap();
let f = tmp.path().join("lines.txt");
let content: String = (1..=100).map(|i| format!("line {i}\n")).collect();
std::fs::write(&f, &content).unwrap();
let args = json!({"file_path": f.to_string_lossy(), "start_line": 50, "num_lines": 3});
let result = read_file(tmp.path(), &args, &cache(), &fs()).await.unwrap();
assert!(result.contains("line 50"));
assert!(result.contains("line 52"));
assert!(!result.contains("line 53"));
}
#[tokio::test]
async fn read_file_nonexistent_returns_error() {
let tmp = tempfile::tempdir().unwrap();
let args = json!({"file_path": "does_not_exist.txt"});
let result = read_file(tmp.path(), &args, &cache(), &fs()).await;
assert!(result.is_err());
}
#[tokio::test]
async fn read_file_stale_cache_returns_unchanged() {
let tmp = tempfile::tempdir().unwrap();
let f = tmp.path().join("cached.txt");
std::fs::write(&f, "original content").unwrap();
let c = cache();
let args = json!({"file_path": f.to_string_lossy()});
let r1 = read_file(tmp.path(), &args, &c, &fs()).await.unwrap();
assert!(r1.contains("original content"));
let r2 = read_file(tmp.path(), &args, &c, &fs()).await.unwrap();
assert!(r2.contains("unchanged"), "expected stale-read: {r2}");
}
#[tokio::test]
async fn read_file_missing_path_arg_errors() {
let tmp = tempfile::tempdir().unwrap();
let args = json!({});
let result = read_file(tmp.path(), &args, &cache(), &fs()).await;
assert!(result.is_err());
assert!(
result.unwrap_err().to_string().contains("file_path"),
"should mention missing param"
);
}
#[tokio::test]
async fn read_file_large_file_truncates() {
let tmp = tempfile::tempdir().unwrap();
let f = tmp.path().join("big.txt");
let content = "x".repeat(30_000);
std::fs::write(&f, &content).unwrap();
let args = json!({"file_path": f.to_string_lossy()});
let result = read_file(tmp.path(), &args, &cache(), &fs()).await.unwrap();
assert!(result.contains("TRUNCATED"));
assert!(result.len() < 25_000);
}
#[tokio::test]
async fn read_file_can_reach_outside_project_root() {
let outer = tempfile::tempdir().unwrap();
let secret = outer.path().join("secret.txt");
std::fs::write(&secret, "outer content").unwrap();
let project = outer.path().join("project");
std::fs::create_dir_all(&project).unwrap();
let args = json!({"file_path": "../secret.txt"});
let result = read_file(&project, &args, &cache(), &fs()).await;
assert!(
result.is_ok(),
"read outside project root should succeed; got: {:?}",
result
);
assert!(result.unwrap().contains("outer content"));
}
#[tokio::test]
async fn write_file_creates_new() {
let tmp = tempfile::tempdir().unwrap();
let args = json!({"file_path": "new_file.txt", "content": "hello world"});
let result = write_file(tmp.path(), &args, &fs()).await.unwrap();
assert!(result.contains("Written"));
assert_eq!(
std::fs::read_to_string(tmp.path().join("new_file.txt")).unwrap(),
"hello world"
);
}
#[tokio::test]
async fn write_file_creates_parent_dirs() {
let tmp = tempfile::tempdir().unwrap();
let args = json!({"file_path": "a/b/c/deep.txt", "content": "nested"});
write_file(tmp.path(), &args, &fs()).await.unwrap();
assert!(tmp.path().join("a/b/c/deep.txt").exists());
}
#[tokio::test]
async fn write_file_refuses_overwrite_without_flag() {
let tmp = tempfile::tempdir().unwrap();
let f = tmp.path().join("existing.txt");
std::fs::write(&f, "original").unwrap();
let args = json!({"file_path": "existing.txt", "content": "replaced"});
let result = write_file(tmp.path(), &args, &fs()).await;
assert!(result.is_err());
assert!(result.unwrap_err().to_string().contains("already exists"));
assert_eq!(std::fs::read_to_string(&f).unwrap(), "original");
}
#[tokio::test]
async fn write_file_overwrites_with_flag() {
let tmp = tempfile::tempdir().unwrap();
let f = tmp.path().join("overwrite_me.txt");
std::fs::write(&f, "old").unwrap();
let args = json!({"file_path": "overwrite_me.txt", "content": "new", "overwrite": true});
write_file(tmp.path(), &args, &fs()).await.unwrap();
assert_eq!(std::fs::read_to_string(&f).unwrap(), "new");
}
#[tokio::test]
async fn edit_file_single_replacement() {
let tmp = tempfile::tempdir().unwrap();
let f = tmp.path().join("edit_me.txt");
std::fs::write(&f, "hello world\nfoo bar").unwrap();
let args = json!({
"file_path": "edit_me.txt",
"replacements": [{"old_str": "foo", "new_str": "baz"}]
});
let result = edit_file(tmp.path(), &args, &cache(), &fs()).await.unwrap();
assert!(result.contains("Applied 1 edit"));
assert_eq!(std::fs::read_to_string(&f).unwrap(), "hello world\nbaz bar");
}
#[tokio::test]
async fn edit_file_replace_all() {
let tmp = tempfile::tempdir().unwrap();
let f = tmp.path().join("multi.txt");
std::fs::write(&f, "aaa bbb aaa ccc aaa").unwrap();
let args = json!({
"file_path": "multi.txt",
"replacements": [{"old_str": "aaa", "new_str": "zzz", "replace_all": true}]
});
let result = edit_file(tmp.path(), &args, &cache(), &fs()).await.unwrap();
assert!(result.contains("3 occurrences"));
assert_eq!(std::fs::read_to_string(&f).unwrap(), "zzz bbb zzz ccc zzz");
}
#[tokio::test]
async fn edit_file_multi_match_errors_with_line_numbers() {
let tmp = tempfile::tempdir().unwrap();
let f = tmp.path().join("multi_match.txt");
std::fs::write(&f, "aaa\nbbb\naaa").unwrap();
let args = json!({
"file_path": "multi_match.txt",
"replacements": [{"old_str": "aaa", "new_str": "zzz"}]
});
let err = edit_file(tmp.path(), &args, &cache(), &fs())
.await
.unwrap_err();
let msg = err.to_string();
assert!(msg.contains("matches 2 times"), "expected count: {msg}");
assert!(
msg.contains("lines"),
"expected line numbers mention: {msg}"
);
assert_eq!(std::fs::read_to_string(&f).unwrap(), "aaa\nbbb\naaa");
}
#[tokio::test]
async fn edit_file_not_found_errors() {
let tmp = tempfile::tempdir().unwrap();
let f = tmp.path().join("edit_me.txt");
std::fs::write(&f, "hello world").unwrap();
let args = json!({
"file_path": "edit_me.txt",
"replacements": [{"old_str": "not here", "new_str": "x"}]
});
let result = edit_file(tmp.path(), &args, &cache(), &fs()).await;
assert!(result.is_err());
assert!(result.unwrap_err().to_string().contains("not found"));
}
#[tokio::test]
async fn edit_file_empty_old_str_errors() {
let tmp = tempfile::tempdir().unwrap();
let f = tmp.path().join("edit.txt");
std::fs::write(&f, "content").unwrap();
let args = json!({
"file_path": "edit.txt",
"replacements": [{"old_str": "", "new_str": "x"}]
});
let result = edit_file(tmp.path(), &args, &cache(), &fs()).await;
assert!(result.is_err());
assert!(result.unwrap_err().to_string().contains("empty"));
}
#[tokio::test]
async fn edit_file_multiple_replacements() {
let tmp = tempfile::tempdir().unwrap();
let f = tmp.path().join("multi_edit.txt");
std::fs::write(&f, "alpha beta gamma").unwrap();
let args = json!({
"file_path": "multi_edit.txt",
"replacements": [
{"old_str": "alpha", "new_str": "ALPHA"},
{"old_str": "gamma", "new_str": "GAMMA"}
]
});
edit_file(tmp.path(), &args, &cache(), &fs()).await.unwrap();
assert_eq!(std::fs::read_to_string(&f).unwrap(), "ALPHA beta GAMMA");
}
#[tokio::test]
async fn edit_file_staleness_check_rejects_changed_file() {
use sha2::{Digest, Sha256};
let tmp = tempfile::tempdir().unwrap();
let f = tmp.path().join("staleness.txt");
let original = "line one\nline two\n";
std::fs::write(&f, original).unwrap();
let hash = format!("{:x}", Sha256::digest(original.as_bytes()));
let key = format!("{}:None:None", f.display());
let c = cache();
{
let mut g = c.lock().unwrap();
g.insert(
key,
(original.len() as u64, std::time::SystemTime::now(), hash),
);
}
std::fs::write(&f, "completely different content\n").unwrap();
let args = json!({
"file_path": "staleness.txt",
"replacements": [{"old_str": "line one", "new_str": "LINE ONE"}]
});
let err = edit_file(tmp.path(), &args, &c, &fs()).await.unwrap_err();
let msg = err.to_string();
assert!(
msg.contains("changed on disk") || msg.contains("SHA-256"),
"expected staleness error: {msg}"
);
assert_eq!(
std::fs::read_to_string(&f).unwrap(),
"completely different content\n"
);
}
#[tokio::test]
async fn delete_file_removes_file() {
let tmp = tempfile::tempdir().unwrap();
let f = tmp.path().join("doomed.txt");
std::fs::write(&f, "goodbye").unwrap();
let args = json!({"file_path": "doomed.txt"});
let result = delete_file(tmp.path(), &args).await.unwrap();
assert!(result.contains("Deleted"));
assert!(!f.exists());
}
#[tokio::test]
async fn delete_file_nonexistent_errors() {
let tmp = tempfile::tempdir().unwrap();
let args = json!({"file_path": "nope.txt"});
let result = delete_file(tmp.path(), &args).await;
assert!(result.is_err());
}
#[tokio::test]
async fn list_files_basic() {
let tmp = tempfile::tempdir().unwrap();
std::fs::write(tmp.path().join("a.txt"), "").unwrap();
std::fs::write(tmp.path().join("b.txt"), "").unwrap();
std::fs::create_dir(tmp.path().join("subdir")).unwrap();
let args = json!({"directory": "."});
let result = list_files(tmp.path(), &args, 200).await.unwrap();
assert!(result.contains("a.txt"));
assert!(result.contains("b.txt"));
assert!(result.contains("subdir"));
}
#[tokio::test]
async fn list_files_capped() {
let tmp = tempfile::tempdir().unwrap();
for i in 0..20 {
std::fs::write(tmp.path().join(format!("file_{i}.txt")), "").unwrap();
}
let args = json!({"file_path": "."});
let result = list_files(tmp.path(), &args, 5).await.unwrap();
assert!(result.contains("CAPPED"), "expected cap message: {result}");
}
#[tokio::test]
async fn list_files_includes_directory_header() {
let tmp = tempfile::tempdir().unwrap();
std::fs::write(tmp.path().join("a.txt"), "").unwrap();
let args = json!({"directory": "."});
let result = list_files(tmp.path(), &args, 200).await.unwrap();
let first_line = result.lines().next().unwrap();
assert!(
first_line.starts_with("Listing: "),
"first line must be a `Listing: ...` header, got: {first_line:?}"
);
assert!(
first_line.contains("(1 entries)"),
"header must include entry count, got: {first_line:?}"
);
}
#[tokio::test]
async fn list_files_empty_directory_still_has_header() {
let tmp = tempfile::tempdir().unwrap();
let empty = tmp.path().join("empty_subdir");
std::fs::create_dir(&empty).unwrap();
let args = json!({"file_path": "empty_subdir"});
let result = list_files(tmp.path(), &args, 200).await.unwrap();
let mut lines = result.lines();
let header = lines.next().unwrap();
assert!(
header.starts_with("Listing: ") && header.contains("empty_subdir"),
"empty-dir header must name the directory, got: {header:?}"
);
assert_eq!(lines.next(), Some("(empty directory)"));
assert!(
!header.contains("entries"),
"empty header should omit count, got: {header:?}"
);
}
#[tokio::test]
async fn list_files_sorts_dirs_first_then_alphabetical() {
let tmp = tempfile::tempdir().unwrap();
std::fs::write(tmp.path().join("zzz.txt"), "").unwrap();
std::fs::create_dir(tmp.path().join("yankee")).unwrap();
std::fs::write(tmp.path().join("aaa.txt"), "").unwrap();
std::fs::create_dir(tmp.path().join("alpha")).unwrap();
let args = json!({"directory": "."});
let result = list_files(tmp.path(), &args, 200).await.unwrap();
let body: Vec<&str> = result.lines().skip(1).collect();
assert_eq!(
body,
vec!["d alpha", "d yankee", " aaa.txt", " zzz.txt"],
"entries must be sorted dirs-first then alphabetical: {result}"
);
}
#[tokio::test]
async fn list_files_capped_output_carries_header() {
let tmp = tempfile::tempdir().unwrap();
for i in 0..20 {
std::fs::write(tmp.path().join(format!("file_{i:02}.txt")), "").unwrap();
}
let args = json!({"file_path": "."});
let result = list_files(tmp.path(), &args, 5).await.unwrap();
let first_line = result.lines().next().unwrap();
assert!(
first_line.starts_with("Listing: "),
"capped output must still start with header: {first_line:?}"
);
assert!(
result.contains("CAPPED"),
"capped output must still include cap message: {result}"
);
}
}