1use std::sync::Arc;
4
5use async_trait::async_trait;
6use serde_json::json;
7use similar::{ChangeTag, TextDiff};
8use tokio::sync::mpsc;
9
10use soul_core::error::SoulResult;
11use soul_core::tool::{Tool, ToolOutput};
12use soul_core::types::ToolDefinition;
13use soul_core::vfs::VirtualFs;
14
15use super::resolve_path;
16
17pub struct EditTool {
18 fs: Arc<dyn VirtualFs>,
19 cwd: String,
20}
21
22impl EditTool {
23 pub fn new(fs: Arc<dyn VirtualFs>, cwd: impl Into<String>) -> Self {
24 Self {
25 fs,
26 cwd: cwd.into(),
27 }
28 }
29}
30
31fn normalize_for_fuzzy(text: &str) -> String {
34 text.lines()
35 .map(|line| {
36 let trimmed = line.trim_end();
37 trimmed
38 .replace('\u{2018}', "'") .replace('\u{2019}', "'") .replace('\u{201C}', "\"") .replace('\u{201D}', "\"") .replace('\u{2013}', "-") .replace('\u{2014}', "-") .replace('\u{00A0}', " ") .replace('\u{202F}', " ") })
47 .collect::<Vec<_>>()
48 .join("\n")
49}
50
51fn unified_diff(old: &str, new: &str, path: &str) -> String {
53 let diff = TextDiff::from_lines(old, new);
54 let mut output = format!("--- a/{}\n+++ b/{}\n", path, path);
55
56 let mut udiff = diff.unified_diff();
57 output.push_str(&udiff.header("", "").to_string());
58
59 for change in diff.iter_all_changes() {
60 let sign = match change.tag() {
61 ChangeTag::Delete => "-",
62 ChangeTag::Insert => "+",
63 ChangeTag::Equal => " ",
64 };
65 output.push_str(&format!("{}{}", sign, change));
66 if change.missing_newline() {
67 output.push('\n');
68 }
69 }
70
71 output
72}
73
74#[cfg_attr(not(target_arch = "wasm32"), async_trait)]
75#[cfg_attr(target_arch = "wasm32", async_trait(?Send))]
76impl Tool for EditTool {
77 fn name(&self) -> &str {
78 "edit"
79 }
80
81 fn definition(&self) -> ToolDefinition {
82 ToolDefinition {
83 name: "edit".into(),
84 description: "Perform an exact text replacement in a file. The old text must match uniquely. Falls back to fuzzy matching (smart quote normalization, trailing whitespace) if exact match fails.".into(),
85 input_schema: json!({
86 "type": "object",
87 "properties": {
88 "path": {
89 "type": "string",
90 "description": "File path to edit"
91 },
92 "old": {
93 "type": "string",
94 "description": "Exact text to find and replace"
95 },
96 "new": {
97 "type": "string",
98 "description": "Replacement text"
99 }
100 },
101 "required": ["path", "old", "new"]
102 }),
103 }
104 }
105
106 async fn execute(
107 &self,
108 _call_id: &str,
109 arguments: serde_json::Value,
110 _partial_tx: Option<mpsc::UnboundedSender<String>>,
111 ) -> SoulResult<ToolOutput> {
112 let path = arguments
113 .get("path")
114 .and_then(|v| v.as_str())
115 .unwrap_or("");
116 let old_text = arguments
117 .get("old")
118 .and_then(|v| v.as_str())
119 .unwrap_or("");
120 let new_text = arguments
121 .get("new")
122 .and_then(|v| v.as_str())
123 .unwrap_or("");
124
125 if path.is_empty() {
126 return Ok(ToolOutput::error("Missing required parameter: path"));
127 }
128 if old_text.is_empty() {
129 return Ok(ToolOutput::error("Missing required parameter: old"));
130 }
131 if old_text == new_text {
132 return Ok(ToolOutput::error(
133 "old and new text are identical — no change would occur",
134 ));
135 }
136
137 let resolved = resolve_path(&self.cwd, path);
138
139 let exists = self.fs.exists(&resolved).await?;
140 if !exists {
141 return Ok(ToolOutput::error(format!("File not found: {}", path)));
142 }
143
144 let content = match self.fs.read_to_string(&resolved).await {
145 Ok(c) => c,
146 Err(e) => return Ok(ToolOutput::error(format!("Failed to read {}: {}", path, e))),
147 };
148
149 let matches: Vec<_> = content.match_indices(old_text).collect();
151
152 let (new_content, method) = if matches.len() == 1 {
153 (content.replacen(old_text, new_text, 1), "exact")
154 } else if matches.len() > 1 {
155 return Ok(ToolOutput::error(format!(
156 "Found {} occurrences of the old text — must be unique. Provide more context to disambiguate.",
157 matches.len()
158 )));
159 } else {
160 let norm_content = normalize_for_fuzzy(&content);
162 let norm_old = normalize_for_fuzzy(old_text);
163
164 let fuzzy_matches: Vec<_> = norm_content.match_indices(&norm_old).collect();
165
166 if fuzzy_matches.len() == 1 {
167 let fuzzy_pos = fuzzy_matches[0].0;
169 let norm_lines_before = norm_content[..fuzzy_pos].lines().count();
171 let original_lines: Vec<&str> = content.lines().collect();
172 let search_lines: Vec<&str> = old_text.lines().collect();
173
174 if norm_lines_before > 0 && norm_lines_before <= original_lines.len() {
175 let start_line = norm_lines_before.saturating_sub(1);
176 let end_line = (start_line + search_lines.len()).min(original_lines.len());
177 let original_section = original_lines[start_line..end_line].join("\n");
178 (content.replacen(&original_section, new_text, 1), "fuzzy")
179 } else {
180 let result = norm_content.replacen(&norm_old, new_text, 1);
182 (result, "fuzzy")
183 }
184 } else if fuzzy_matches.len() > 1 {
185 return Ok(ToolOutput::error(format!(
186 "Found {} fuzzy occurrences — must be unique. Provide more context.",
187 fuzzy_matches.len()
188 )));
189 } else {
190 return Ok(ToolOutput::error(
191 "Text not found in file (tried exact and fuzzy matching). Verify the old text matches the file content.",
192 ));
193 }
194 };
195
196 match self.fs.write(&resolved, &new_content).await {
198 Ok(()) => {
199 let diff = unified_diff(&content, &new_content, path);
200 let first_changed_line = content
202 .lines()
203 .zip(new_content.lines())
204 .enumerate()
205 .find(|(_, (a, b))| a != b)
206 .map(|(i, _)| i + 1)
207 .unwrap_or(1);
208
209 Ok(ToolOutput::success(format!(
210 "Applied edit to {} ({})\n\n{}",
211 path, method, diff
212 ))
213 .with_metadata(json!({
214 "method": method,
215 "first_changed_line": first_changed_line,
216 "path": path,
217 })))
218 }
219 Err(e) => Ok(ToolOutput::error(format!(
220 "Failed to write {}: {}",
221 path, e
222 ))),
223 }
224 }
225}
226
227#[cfg(test)]
228mod tests {
229 use super::*;
230 use soul_core::vfs::MemoryFs;
231
232 async fn setup() -> (Arc<MemoryFs>, EditTool) {
233 let fs = Arc::new(MemoryFs::new());
234 let tool = EditTool::new(fs.clone() as Arc<dyn VirtualFs>, "/project");
235 (fs, tool)
236 }
237
238 #[tokio::test]
239 async fn exact_replacement() {
240 let (fs, tool) = setup().await;
241 fs.write("/project/code.rs", "fn main() {\n println!(\"hello\");\n}")
242 .await
243 .unwrap();
244
245 let result = tool
246 .execute(
247 "c1",
248 json!({
249 "path": "code.rs",
250 "old": "println!(\"hello\")",
251 "new": "println!(\"world\")"
252 }),
253 None,
254 )
255 .await
256 .unwrap();
257
258 assert!(!result.is_error);
259 assert!(result.content.contains("exact"));
260 let content = fs.read_to_string("/project/code.rs").await.unwrap();
261 assert!(content.contains("world"));
262 assert!(!content.contains("hello"));
263 }
264
265 #[tokio::test]
266 async fn fuzzy_smart_quotes() {
267 let (fs, tool) = setup().await;
268 fs.write("/project/quotes.txt", "It\u{2019}s a test")
269 .await
270 .unwrap();
271
272 let result = tool
273 .execute(
274 "c2",
275 json!({
276 "path": "quotes.txt",
277 "old": "It's a test",
278 "new": "It is a test"
279 }),
280 None,
281 )
282 .await
283 .unwrap();
284
285 assert!(!result.is_error);
286 assert!(result.content.contains("fuzzy"));
287 }
288
289 #[tokio::test]
290 async fn multiple_matches_error() {
291 let (fs, tool) = setup().await;
292 fs.write("/project/dup.txt", "hello hello hello")
293 .await
294 .unwrap();
295
296 let result = tool
297 .execute(
298 "c3",
299 json!({"path": "dup.txt", "old": "hello", "new": "world"}),
300 None,
301 )
302 .await
303 .unwrap();
304
305 assert!(result.is_error);
306 assert!(result.content.contains("occurrences"));
307 }
308
309 #[tokio::test]
310 async fn text_not_found() {
311 let (fs, tool) = setup().await;
312 fs.write("/project/missing.txt", "something else")
313 .await
314 .unwrap();
315
316 let result = tool
317 .execute(
318 "c4",
319 json!({"path": "missing.txt", "old": "nothere", "new": "replacement"}),
320 None,
321 )
322 .await
323 .unwrap();
324
325 assert!(result.is_error);
326 assert!(result.content.contains("not found"));
327 }
328
329 #[tokio::test]
330 async fn identical_old_new() {
331 let (fs, tool) = setup().await;
332 fs.write("/project/same.txt", "content").await.unwrap();
333
334 let result = tool
335 .execute(
336 "c5",
337 json!({"path": "same.txt", "old": "content", "new": "content"}),
338 None,
339 )
340 .await
341 .unwrap();
342
343 assert!(result.is_error);
344 assert!(result.content.contains("identical"));
345 }
346
347 #[tokio::test]
348 async fn file_not_found() {
349 let (_fs, tool) = setup().await;
350 let result = tool
351 .execute(
352 "c6",
353 json!({"path": "nope.txt", "old": "a", "new": "b"}),
354 None,
355 )
356 .await
357 .unwrap();
358 assert!(result.is_error);
359 }
360
361 #[tokio::test]
362 async fn diff_output() {
363 let (fs, tool) = setup().await;
364 fs.write("/project/diff.txt", "line1\nline2\nline3")
365 .await
366 .unwrap();
367
368 let result = tool
369 .execute(
370 "c7",
371 json!({"path": "diff.txt", "old": "line2", "new": "modified"}),
372 None,
373 )
374 .await
375 .unwrap();
376
377 assert!(!result.is_error);
378 assert!(result.content.contains("-line2"));
379 assert!(result.content.contains("+modified"));
380 }
381
382 #[test]
383 fn normalize_fuzzy_quotes() {
384 let input = "\u{201C}hello\u{201D} \u{2018}world\u{2019}";
385 let normalized = normalize_for_fuzzy(input);
386 assert_eq!(normalized, "\"hello\" 'world'");
387 }
388
389 #[test]
390 fn normalize_fuzzy_dashes() {
391 let input = "a\u{2013}b\u{2014}c";
392 let normalized = normalize_for_fuzzy(input);
393 assert_eq!(normalized, "a-b-c");
394 }
395
396 #[test]
397 fn normalize_fuzzy_trailing_whitespace() {
398 let input = "hello \nworld ";
399 let normalized = normalize_for_fuzzy(input);
400 assert_eq!(normalized, "hello\nworld");
401 }
402
403 #[test]
404 fn tool_name_and_definition() {
405 let fs = Arc::new(MemoryFs::new());
406 let tool = EditTool::new(fs as Arc<dyn VirtualFs>, "/");
407 assert_eq!(tool.name(), "edit");
408 let def = tool.definition();
409 assert_eq!(def.name, "edit");
410 }
411}