1use std::sync::Arc;
4
5use async_trait::async_trait;
6use serde_json::json;
7use similar::{ChangeTag, TextDiff};
8use tokio::sync::mpsc;
9
10use soul_core::error::SoulResult;
11use soul_core::tool::{Tool, ToolOutput};
12use soul_core::types::ToolDefinition;
13use soul_core::vfs::VirtualFs;
14
15use super::resolve_path;
16
17pub struct EditTool {
18 fs: Arc<dyn VirtualFs>,
19 cwd: String,
20}
21
22impl EditTool {
23 pub fn new(fs: Arc<dyn VirtualFs>, cwd: impl Into<String>) -> Self {
24 Self {
25 fs,
26 cwd: cwd.into(),
27 }
28 }
29}
30
31fn normalize_for_fuzzy(text: &str) -> String {
34 text.lines()
35 .map(|line| {
36 let trimmed = line.trim_end();
37 trimmed
38 .replace('\u{2018}', "'") .replace('\u{2019}', "'") .replace('\u{201C}', "\"") .replace('\u{201D}', "\"") .replace('\u{2013}', "-") .replace('\u{2014}', "-") .replace('\u{00A0}', " ") .replace('\u{202F}', " ") })
47 .collect::<Vec<_>>()
48 .join("\n")
49}
50
51fn unified_diff(old: &str, new: &str, path: &str) -> String {
53 let diff = TextDiff::from_lines(old, new);
54 let mut output = format!("--- a/{}\n+++ b/{}\n", path, path);
55
56 let mut udiff = diff.unified_diff();
57 output.push_str(&udiff.header("", "").to_string());
58
59 for change in diff.iter_all_changes() {
60 let sign = match change.tag() {
61 ChangeTag::Delete => "-",
62 ChangeTag::Insert => "+",
63 ChangeTag::Equal => " ",
64 };
65 output.push_str(&format!("{}{}", sign, change));
66 if change.missing_newline() {
67 output.push('\n');
68 }
69 }
70
71 output
72}
73
74#[async_trait]
75impl Tool for EditTool {
76 fn name(&self) -> &str {
77 "edit"
78 }
79
80 fn definition(&self) -> ToolDefinition {
81 ToolDefinition {
82 name: "edit".into(),
83 description: "Perform an exact text replacement in a file. The old text must match uniquely. Falls back to fuzzy matching (smart quote normalization, trailing whitespace) if exact match fails.".into(),
84 input_schema: json!({
85 "type": "object",
86 "properties": {
87 "path": {
88 "type": "string",
89 "description": "File path to edit"
90 },
91 "old": {
92 "type": "string",
93 "description": "Exact text to find and replace"
94 },
95 "new": {
96 "type": "string",
97 "description": "Replacement text"
98 }
99 },
100 "required": ["path", "old", "new"]
101 }),
102 }
103 }
104
105 async fn execute(
106 &self,
107 _call_id: &str,
108 arguments: serde_json::Value,
109 _partial_tx: Option<mpsc::UnboundedSender<String>>,
110 ) -> SoulResult<ToolOutput> {
111 let path = arguments
112 .get("path")
113 .and_then(|v| v.as_str())
114 .unwrap_or("");
115 let old_text = arguments
116 .get("old")
117 .and_then(|v| v.as_str())
118 .unwrap_or("");
119 let new_text = arguments
120 .get("new")
121 .and_then(|v| v.as_str())
122 .unwrap_or("");
123
124 if path.is_empty() {
125 return Ok(ToolOutput::error("Missing required parameter: path"));
126 }
127 if old_text.is_empty() {
128 return Ok(ToolOutput::error("Missing required parameter: old"));
129 }
130 if old_text == new_text {
131 return Ok(ToolOutput::error(
132 "old and new text are identical — no change would occur",
133 ));
134 }
135
136 let resolved = resolve_path(&self.cwd, path);
137
138 let exists = self.fs.exists(&resolved).await?;
139 if !exists {
140 return Ok(ToolOutput::error(format!("File not found: {}", path)));
141 }
142
143 let content = match self.fs.read_to_string(&resolved).await {
144 Ok(c) => c,
145 Err(e) => return Ok(ToolOutput::error(format!("Failed to read {}: {}", path, e))),
146 };
147
148 let matches: Vec<_> = content.match_indices(old_text).collect();
150
151 let (new_content, method) = if matches.len() == 1 {
152 (content.replacen(old_text, new_text, 1), "exact")
153 } else if matches.len() > 1 {
154 return Ok(ToolOutput::error(format!(
155 "Found {} occurrences of the old text — must be unique. Provide more context to disambiguate.",
156 matches.len()
157 )));
158 } else {
159 let norm_content = normalize_for_fuzzy(&content);
161 let norm_old = normalize_for_fuzzy(old_text);
162
163 let fuzzy_matches: Vec<_> = norm_content.match_indices(&norm_old).collect();
164
165 if fuzzy_matches.len() == 1 {
166 let fuzzy_pos = fuzzy_matches[0].0;
168 let norm_lines_before = norm_content[..fuzzy_pos].lines().count();
170 let original_lines: Vec<&str> = content.lines().collect();
171 let search_lines: Vec<&str> = old_text.lines().collect();
172
173 if norm_lines_before > 0 && norm_lines_before <= original_lines.len() {
174 let start_line = norm_lines_before.saturating_sub(1);
175 let end_line = (start_line + search_lines.len()).min(original_lines.len());
176 let original_section = original_lines[start_line..end_line].join("\n");
177 (content.replacen(&original_section, new_text, 1), "fuzzy")
178 } else {
179 let result = norm_content.replacen(&norm_old, new_text, 1);
181 (result, "fuzzy")
182 }
183 } else if fuzzy_matches.len() > 1 {
184 return Ok(ToolOutput::error(format!(
185 "Found {} fuzzy occurrences — must be unique. Provide more context.",
186 fuzzy_matches.len()
187 )));
188 } else {
189 return Ok(ToolOutput::error(
190 "Text not found in file (tried exact and fuzzy matching). Verify the old text matches the file content.",
191 ));
192 }
193 };
194
195 match self.fs.write(&resolved, &new_content).await {
197 Ok(()) => {
198 let diff = unified_diff(&content, &new_content, path);
199 let first_changed_line = content
201 .lines()
202 .zip(new_content.lines())
203 .enumerate()
204 .find(|(_, (a, b))| a != b)
205 .map(|(i, _)| i + 1)
206 .unwrap_or(1);
207
208 Ok(ToolOutput::success(format!(
209 "Applied edit to {} ({})\n\n{}",
210 path, method, diff
211 ))
212 .with_metadata(json!({
213 "method": method,
214 "first_changed_line": first_changed_line,
215 "path": path,
216 })))
217 }
218 Err(e) => Ok(ToolOutput::error(format!(
219 "Failed to write {}: {}",
220 path, e
221 ))),
222 }
223 }
224}
225
226#[cfg(test)]
227mod tests {
228 use super::*;
229 use soul_core::vfs::MemoryFs;
230
231 async fn setup() -> (Arc<MemoryFs>, EditTool) {
232 let fs = Arc::new(MemoryFs::new());
233 let tool = EditTool::new(fs.clone() as Arc<dyn VirtualFs>, "/project");
234 (fs, tool)
235 }
236
237 #[tokio::test]
238 async fn exact_replacement() {
239 let (fs, tool) = setup().await;
240 fs.write("/project/code.rs", "fn main() {\n println!(\"hello\");\n}")
241 .await
242 .unwrap();
243
244 let result = tool
245 .execute(
246 "c1",
247 json!({
248 "path": "code.rs",
249 "old": "println!(\"hello\")",
250 "new": "println!(\"world\")"
251 }),
252 None,
253 )
254 .await
255 .unwrap();
256
257 assert!(!result.is_error);
258 assert!(result.content.contains("exact"));
259 let content = fs.read_to_string("/project/code.rs").await.unwrap();
260 assert!(content.contains("world"));
261 assert!(!content.contains("hello"));
262 }
263
264 #[tokio::test]
265 async fn fuzzy_smart_quotes() {
266 let (fs, tool) = setup().await;
267 fs.write("/project/quotes.txt", "It\u{2019}s a test")
268 .await
269 .unwrap();
270
271 let result = tool
272 .execute(
273 "c2",
274 json!({
275 "path": "quotes.txt",
276 "old": "It's a test",
277 "new": "It is a test"
278 }),
279 None,
280 )
281 .await
282 .unwrap();
283
284 assert!(!result.is_error);
285 assert!(result.content.contains("fuzzy"));
286 }
287
288 #[tokio::test]
289 async fn multiple_matches_error() {
290 let (fs, tool) = setup().await;
291 fs.write("/project/dup.txt", "hello hello hello")
292 .await
293 .unwrap();
294
295 let result = tool
296 .execute(
297 "c3",
298 json!({"path": "dup.txt", "old": "hello", "new": "world"}),
299 None,
300 )
301 .await
302 .unwrap();
303
304 assert!(result.is_error);
305 assert!(result.content.contains("occurrences"));
306 }
307
308 #[tokio::test]
309 async fn text_not_found() {
310 let (fs, tool) = setup().await;
311 fs.write("/project/missing.txt", "something else")
312 .await
313 .unwrap();
314
315 let result = tool
316 .execute(
317 "c4",
318 json!({"path": "missing.txt", "old": "nothere", "new": "replacement"}),
319 None,
320 )
321 .await
322 .unwrap();
323
324 assert!(result.is_error);
325 assert!(result.content.contains("not found"));
326 }
327
328 #[tokio::test]
329 async fn identical_old_new() {
330 let (fs, tool) = setup().await;
331 fs.write("/project/same.txt", "content").await.unwrap();
332
333 let result = tool
334 .execute(
335 "c5",
336 json!({"path": "same.txt", "old": "content", "new": "content"}),
337 None,
338 )
339 .await
340 .unwrap();
341
342 assert!(result.is_error);
343 assert!(result.content.contains("identical"));
344 }
345
346 #[tokio::test]
347 async fn file_not_found() {
348 let (_fs, tool) = setup().await;
349 let result = tool
350 .execute(
351 "c6",
352 json!({"path": "nope.txt", "old": "a", "new": "b"}),
353 None,
354 )
355 .await
356 .unwrap();
357 assert!(result.is_error);
358 }
359
360 #[tokio::test]
361 async fn diff_output() {
362 let (fs, tool) = setup().await;
363 fs.write("/project/diff.txt", "line1\nline2\nline3")
364 .await
365 .unwrap();
366
367 let result = tool
368 .execute(
369 "c7",
370 json!({"path": "diff.txt", "old": "line2", "new": "modified"}),
371 None,
372 )
373 .await
374 .unwrap();
375
376 assert!(!result.is_error);
377 assert!(result.content.contains("-line2"));
378 assert!(result.content.contains("+modified"));
379 }
380
381 #[test]
382 fn normalize_fuzzy_quotes() {
383 let input = "\u{201C}hello\u{201D} \u{2018}world\u{2019}";
384 let normalized = normalize_for_fuzzy(input);
385 assert_eq!(normalized, "\"hello\" 'world'");
386 }
387
388 #[test]
389 fn normalize_fuzzy_dashes() {
390 let input = "a\u{2013}b\u{2014}c";
391 let normalized = normalize_for_fuzzy(input);
392 assert_eq!(normalized, "a-b-c");
393 }
394
395 #[test]
396 fn normalize_fuzzy_trailing_whitespace() {
397 let input = "hello \nworld ";
398 let normalized = normalize_for_fuzzy(input);
399 assert_eq!(normalized, "hello\nworld");
400 }
401
402 #[test]
403 fn tool_name_and_definition() {
404 let fs = Arc::new(MemoryFs::new());
405 let tool = EditTool::new(fs as Arc<dyn VirtualFs>, "/");
406 assert_eq!(tool.name(), "edit");
407 let def = tool.definition();
408 assert_eq!(def.name, "edit");
409 }
410}