use crate::tools::safe_resolve_path;
use similar::{ChangeTag, TextDiff};
use std::path::Path;
const CONTEXT_LINES: usize = 3;
const MAX_DIFF_LINES: usize = 120;
const MAX_WRITE_NEW_LINES: usize = 60;
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
#[serde(tag = "kind")]
pub enum DiffPreview {
UnifiedDiff(UnifiedDiffPreview),
WriteNew(WriteNewPreview),
DeleteFile(DeleteFilePreview),
DeleteDir(DeleteDirPreview),
FileNotYetExists,
PathNotFound,
}
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
pub struct UnifiedDiffPreview {
pub path: String,
pub old_content: String,
pub new_content: String,
pub hunks: Vec<DiffHunk>,
pub truncated: bool,
}
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
pub struct DiffHunk {
pub old_start: usize,
pub old_count: usize,
pub new_start: usize,
pub new_count: usize,
pub lines: Vec<DiffLine>,
}
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
pub struct DiffLine {
pub tag: DiffTag,
pub content: String,
pub old_line: Option<usize>,
pub new_line: Option<usize>,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, serde::Serialize, serde::Deserialize)]
pub enum DiffTag {
Context,
Insert,
Delete,
}
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
pub struct WriteNewPreview {
pub path: String,
pub line_count: usize,
pub byte_count: usize,
pub first_lines: Vec<String>,
pub truncated: bool,
}
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
pub struct DeleteFilePreview {
pub line_count: usize,
pub byte_count: u64,
}
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
pub struct DeleteDirPreview {
pub recursive: bool,
}
pub async fn compute(
tool_name: &str,
args: &serde_json::Value,
project_root: &Path,
) -> Option<DiffPreview> {
match tool_name {
"Edit" => preview_edit(args, project_root).await,
"Write" => preview_write(args, project_root).await,
"Delete" => preview_delete(args, project_root).await,
_ => None,
}
}
fn build_unified_diff(path: &str, old_content: &str, new_content: &str) -> UnifiedDiffPreview {
let diff = TextDiff::from_lines(old_content, new_content);
let mut hunks = Vec::new();
let mut total_lines = 0usize;
let mut truncated = false;
for group in diff.grouped_ops(CONTEXT_LINES) {
let mut hunk_lines = Vec::new();
let mut old_start = 0;
let mut new_start = 0;
let mut old_count = 0;
let mut new_count = 0;
let mut first = true;
for op in &group {
if first {
old_start = op.old_range().start + 1; new_start = op.new_range().start + 1;
first = false;
}
for change in diff.iter_changes(op) {
let content = change.value().trim_end_matches('\n').to_string();
let (tag, old_line, new_line) = match change.tag() {
ChangeTag::Equal => {
old_count += 1;
new_count += 1;
(
DiffTag::Context,
change.old_index().map(|i| i + 1),
change.new_index().map(|i| i + 1),
)
}
ChangeTag::Delete => {
old_count += 1;
(DiffTag::Delete, change.old_index().map(|i| i + 1), None)
}
ChangeTag::Insert => {
new_count += 1;
(DiffTag::Insert, None, change.new_index().map(|i| i + 1))
}
};
hunk_lines.push(DiffLine {
tag,
content,
old_line,
new_line,
});
}
}
total_lines += hunk_lines.len();
hunks.push(DiffHunk {
old_start,
old_count,
new_start,
new_count,
lines: hunk_lines,
});
if total_lines > MAX_DIFF_LINES {
truncated = true;
break;
}
}
UnifiedDiffPreview {
path: path.to_string(),
old_content: old_content.to_string(),
new_content: new_content.to_string(),
hunks,
truncated,
}
}
async fn preview_edit(args: &serde_json::Value, project_root: &Path) -> Option<DiffPreview> {
let inner = args.get("payload").unwrap_or(args);
let path_str = inner
.get("path")
.or(inner.get("file_path"))
.and_then(|v| v.as_str())?;
let replacements = inner.get("replacements")?.as_array()?;
let resolved = safe_resolve_path(project_root, path_str).ok()?;
if !resolved.exists() {
return Some(DiffPreview::FileNotYetExists);
}
let old_content = tokio::fs::read_to_string(&resolved).await.ok()?;
let mut new_content = old_content.clone();
for replacement in replacements {
let old_str = replacement.get("old_str")?.as_str()?;
let new_str = replacement
.get("new_str")
.and_then(|v| v.as_str())
.unwrap_or("");
if let Some(pos) = new_content.find(old_str) {
new_content.replace_range(pos..pos + old_str.len(), new_str);
}
}
let preview = build_unified_diff(path_str, &old_content, &new_content);
Some(DiffPreview::UnifiedDiff(preview))
}
async fn preview_write(args: &serde_json::Value, project_root: &Path) -> Option<DiffPreview> {
let inner = args.get("payload").unwrap_or(args);
let path_str = inner
.get("path")
.or(inner.get("file_path"))
.and_then(|v| v.as_str())?;
let content = inner.get("content").and_then(|v| v.as_str())?;
let resolved = safe_resolve_path(project_root, path_str).ok()?;
if resolved.exists() {
let old_content = tokio::fs::read_to_string(&resolved).await.ok()?;
let preview = build_unified_diff(path_str, &old_content, content);
Some(DiffPreview::UnifiedDiff(preview))
} else {
let content_lines: Vec<&str> = content.lines().collect();
let line_count = content_lines.len();
let preview_count = line_count.min(MAX_WRITE_NEW_LINES);
let first_lines: Vec<String> = content_lines[..preview_count]
.iter()
.map(|s| s.to_string())
.collect();
let truncated = line_count > MAX_WRITE_NEW_LINES;
Some(DiffPreview::WriteNew(WriteNewPreview {
path: path_str.to_string(),
line_count,
byte_count: content.len(),
first_lines,
truncated,
}))
}
}
async fn preview_delete(args: &serde_json::Value, project_root: &Path) -> Option<DiffPreview> {
let inner = args.get("payload").unwrap_or(args);
let path_str = inner
.get("path")
.or(inner.get("file_path"))
.and_then(|v| v.as_str())?;
let resolved = safe_resolve_path(project_root, path_str).ok()?;
if !resolved.exists() {
return Some(DiffPreview::PathNotFound);
}
let meta = tokio::fs::metadata(&resolved).await.ok()?;
if meta.is_file() {
let line_count = tokio::fs::read_to_string(&resolved)
.await
.map(|c| c.lines().count())
.unwrap_or(0);
Some(DiffPreview::DeleteFile(DeleteFilePreview {
line_count,
byte_count: meta.len(),
}))
} else if meta.is_dir() {
let recursive = args
.get("recursive")
.and_then(|v| v.as_bool())
.unwrap_or(false);
Some(DiffPreview::DeleteDir(DeleteDirPreview { recursive }))
} else {
None
}
}
#[cfg(test)]
mod tests {
use super::*;
use serde_json::json;
use tempfile::TempDir;
#[tokio::test]
async fn test_edit_produces_unified_diff() {
let tmp = TempDir::new().unwrap();
let file = tmp.path().join("test.rs");
std::fs::write(&file, "fn main() {\n println!(\"hello\");\n}\n").unwrap();
let args = json!({
"path": file.to_str().unwrap(),
"replacements": [{
"old_str": "println!(\"hello\");",
"new_str": "println!(\"world\");"
}]
});
let preview = compute("Edit", &args, tmp.path()).await.unwrap();
match preview {
DiffPreview::UnifiedDiff(diff) => {
assert_eq!(diff.hunks.len(), 1);
let hunk = &diff.hunks[0];
let tags: Vec<_> = hunk.lines.iter().map(|l| l.tag).collect();
assert!(tags.contains(&DiffTag::Delete));
assert!(tags.contains(&DiffTag::Insert));
assert!(tags.contains(&DiffTag::Context));
let del = hunk
.lines
.iter()
.find(|l| l.tag == DiffTag::Delete)
.unwrap();
assert!(del.content.contains("hello"));
let ins = hunk
.lines
.iter()
.find(|l| l.tag == DiffTag::Insert)
.unwrap();
assert!(ins.content.contains("world"));
}
other => panic!("expected UnifiedDiff, got {other:?}"),
}
}
#[tokio::test]
async fn test_edit_multiple_replacements() {
let tmp = TempDir::new().unwrap();
let file = tmp.path().join("test.rs");
let content: String = (1..=20).map(|i| format!("line {i}\n")).collect();
std::fs::write(&file, &content).unwrap();
let args = json!({
"path": file.to_str().unwrap(),
"replacements": [
{ "old_str": "line 2", "new_str": "LINE TWO" },
{ "old_str": "line 19", "new_str": "LINE NINETEEN" }
]
});
let preview = compute("Edit", &args, tmp.path()).await.unwrap();
match preview {
DiffPreview::UnifiedDiff(diff) => {
assert_eq!(
diff.hunks.len(),
2,
"expected 2 hunks, got {:?}",
diff.hunks
);
}
other => panic!("expected UnifiedDiff, got {other:?}"),
}
}
#[tokio::test]
async fn test_write_new_file() {
let tmp = TempDir::new().unwrap();
let args = json!({
"path": "new_file.rs",
"content": "fn main() {}\n"
});
let preview = compute("Write", &args, tmp.path()).await.unwrap();
assert!(matches!(preview, DiffPreview::WriteNew(_)));
}
#[tokio::test]
async fn test_write_overwrite_produces_unified_diff() {
let tmp = TempDir::new().unwrap();
let file = tmp.path().join("existing.rs");
std::fs::write(&file, "old content\n").unwrap();
let args = json!({
"path": file.to_str().unwrap(),
"content": "new content\nline 2\n"
});
let preview = compute("Write", &args, tmp.path()).await.unwrap();
match preview {
DiffPreview::UnifiedDiff(diff) => {
assert!(!diff.hunks.is_empty());
}
other => panic!("expected UnifiedDiff for overwrite, got {other:?}"),
}
}
#[tokio::test]
async fn test_delete_file() {
let tmp = TempDir::new().unwrap();
let file = tmp.path().join("doomed.rs");
std::fs::write(&file, "goodbye\n").unwrap();
let args = json!({ "path": file.to_str().unwrap() });
let preview = compute("Delete", &args, tmp.path()).await.unwrap();
assert!(matches!(preview, DiffPreview::DeleteFile(_)));
}
#[tokio::test]
async fn test_unknown_tool_returns_none() {
let tmp = TempDir::new().unwrap();
let args = json!({"path": "anything.rs"});
let preview = compute("Read", &args, tmp.path()).await;
assert!(preview.is_none());
}
#[tokio::test]
async fn test_edit_missing_file() {
let tmp = TempDir::new().unwrap();
let args = json!({
"path": "nonexistent.rs",
"replacements": [{ "old_str": "x", "new_str": "y" }]
});
let preview = compute("Edit", &args, tmp.path()).await.unwrap();
assert!(matches!(preview, DiffPreview::FileNotYetExists));
}
#[tokio::test]
async fn test_unified_diff_has_line_numbers() {
let tmp = TempDir::new().unwrap();
let file = tmp.path().join("test.txt");
std::fs::write(&file, "a\nb\nc\nd\ne\n").unwrap();
let args = json!({
"path": file.to_str().unwrap(),
"replacements": [{ "old_str": "c", "new_str": "C" }]
});
let preview = compute("Edit", &args, tmp.path()).await.unwrap();
match preview {
DiffPreview::UnifiedDiff(diff) => {
let hunk = &diff.hunks[0];
for line in &hunk.lines {
assert!(
line.old_line.is_some() || line.new_line.is_some(),
"line should have a line number: {line:?}"
);
}
}
other => panic!("expected UnifiedDiff, got {other:?}"),
}
}
#[tokio::test]
async fn test_delete_dir() {
let tmp = TempDir::new().unwrap();
let dir = tmp.path().join("subdir");
std::fs::create_dir(&dir).unwrap();
std::fs::write(dir.join("file.txt"), "content").unwrap();
let args = json!({ "path": dir.to_str().unwrap(), "recursive": true });
let preview = compute("Delete", &args, tmp.path()).await.unwrap();
match preview {
DiffPreview::DeleteDir(d) => assert!(d.recursive),
other => panic!("expected DeleteDir, got {other:?}"),
}
}
#[tokio::test]
async fn test_delete_dir_non_recursive() {
let tmp = TempDir::new().unwrap();
let dir = tmp.path().join("emptydir");
std::fs::create_dir(&dir).unwrap();
let args = json!({ "path": dir.to_str().unwrap() });
let preview = compute("Delete", &args, tmp.path()).await.unwrap();
match preview {
DiffPreview::DeleteDir(d) => assert!(!d.recursive),
other => panic!("expected DeleteDir, got {other:?}"),
}
}
#[tokio::test]
async fn test_delete_nonexistent_path() {
let tmp = TempDir::new().unwrap();
let args = json!({ "path": "nonexistent_file.rs" });
let preview = compute("Delete", &args, tmp.path()).await.unwrap();
assert!(matches!(preview, DiffPreview::PathNotFound));
}
#[tokio::test]
async fn test_write_new_file_truncates_long_content() {
let tmp = TempDir::new().unwrap();
let content: String = (1..=100).map(|i| format!("line {i}\n")).collect();
let args = json!({ "path": "big_new_file.rs", "content": content });
let preview = compute("Write", &args, tmp.path()).await.unwrap();
match preview {
DiffPreview::WriteNew(w) => {
assert_eq!(w.line_count, 100);
assert_eq!(w.first_lines.len(), 60);
assert!(w.truncated);
}
other => panic!("expected WriteNew, got {other:?}"),
}
}
#[tokio::test]
async fn test_build_unified_diff_truncates_large_diffs() {
let old: String = (1..=200).map(|i| format!("old line {i}\n")).collect();
let new: String = (1..=200).map(|i| format!("new line {i}\n")).collect();
let diff = build_unified_diff("test.txt", &old, &new);
assert!(diff.truncated, "large diff should be truncated");
}
}