1use std::sync::Arc;
4
5use async_trait::async_trait;
6use serde_json::json;
7use similar::{ChangeTag, TextDiff};
8use tokio::sync::mpsc;
9
10use soul_core::error::SoulResult;
11use soul_core::tool::{Tool, ToolOutput};
12use soul_core::types::ToolDefinition;
13use soul_core::vfs::VirtualFs;
14
15pub struct EditTool {
16 fs: Arc<dyn VirtualFs>,
17 cwd: String,
18}
19
20impl EditTool {
21 pub fn new(fs: Arc<dyn VirtualFs>, cwd: impl Into<String>) -> Self {
22 Self {
23 fs,
24 cwd: cwd.into(),
25 }
26 }
27
28 fn resolve_path(&self, path: &str) -> String {
29 if path.starts_with('/') {
30 path.to_string()
31 } else {
32 format!("{}/{}", self.cwd.trim_end_matches('/'), path)
33 }
34 }
35}
36
37fn normalize_for_fuzzy(text: &str) -> String {
40 text.lines()
41 .map(|line| {
42 let trimmed = line.trim_end();
43 trimmed
44 .replace('\u{2018}', "'") .replace('\u{2019}', "'") .replace('\u{201C}', "\"") .replace('\u{201D}', "\"") .replace('\u{2013}', "-") .replace('\u{2014}', "-") .replace('\u{00A0}', " ") .replace('\u{202F}', " ") })
53 .collect::<Vec<_>>()
54 .join("\n")
55}
56
57fn unified_diff(old: &str, new: &str, path: &str) -> String {
59 let diff = TextDiff::from_lines(old, new);
60 let mut output = format!("--- a/{}\n+++ b/{}\n", path, path);
61
62 let mut udiff = diff.unified_diff();
63 output.push_str(&udiff.header("", "").to_string());
64
65 for change in diff.iter_all_changes() {
66 let sign = match change.tag() {
67 ChangeTag::Delete => "-",
68 ChangeTag::Insert => "+",
69 ChangeTag::Equal => " ",
70 };
71 output.push_str(&format!("{}{}", sign, change));
72 if change.missing_newline() {
73 output.push('\n');
74 }
75 }
76
77 output
78}
79
80#[async_trait]
81impl Tool for EditTool {
82 fn name(&self) -> &str {
83 "edit"
84 }
85
86 fn definition(&self) -> ToolDefinition {
87 ToolDefinition {
88 name: "edit".into(),
89 description: "Perform an exact text replacement in a file. The old text must match uniquely. Falls back to fuzzy matching (smart quote normalization, trailing whitespace) if exact match fails.".into(),
90 input_schema: json!({
91 "type": "object",
92 "properties": {
93 "path": {
94 "type": "string",
95 "description": "File path to edit"
96 },
97 "old": {
98 "type": "string",
99 "description": "Exact text to find and replace"
100 },
101 "new": {
102 "type": "string",
103 "description": "Replacement text"
104 }
105 },
106 "required": ["path", "old", "new"]
107 }),
108 }
109 }
110
111 async fn execute(
112 &self,
113 _call_id: &str,
114 arguments: serde_json::Value,
115 _partial_tx: Option<mpsc::UnboundedSender<String>>,
116 ) -> SoulResult<ToolOutput> {
117 let path = arguments
118 .get("path")
119 .and_then(|v| v.as_str())
120 .unwrap_or("");
121 let old_text = arguments
122 .get("old")
123 .and_then(|v| v.as_str())
124 .unwrap_or("");
125 let new_text = arguments
126 .get("new")
127 .and_then(|v| v.as_str())
128 .unwrap_or("");
129
130 if path.is_empty() {
131 return Ok(ToolOutput::error("Missing required parameter: path"));
132 }
133 if old_text.is_empty() {
134 return Ok(ToolOutput::error("Missing required parameter: old"));
135 }
136 if old_text == new_text {
137 return Ok(ToolOutput::error(
138 "old and new text are identical — no change would occur",
139 ));
140 }
141
142 let resolved = self.resolve_path(path);
143
144 let exists = self.fs.exists(&resolved).await?;
145 if !exists {
146 return Ok(ToolOutput::error(format!("File not found: {}", path)));
147 }
148
149 let content = match self.fs.read_to_string(&resolved).await {
150 Ok(c) => c,
151 Err(e) => return Ok(ToolOutput::error(format!("Failed to read {}: {}", path, e))),
152 };
153
154 let matches: Vec<_> = content.match_indices(old_text).collect();
156
157 let (new_content, method) = if matches.len() == 1 {
158 (content.replacen(old_text, new_text, 1), "exact")
159 } else if matches.len() > 1 {
160 return Ok(ToolOutput::error(format!(
161 "Found {} occurrences of the old text — must be unique. Provide more context to disambiguate.",
162 matches.len()
163 )));
164 } else {
165 let norm_content = normalize_for_fuzzy(&content);
167 let norm_old = normalize_for_fuzzy(old_text);
168
169 let fuzzy_matches: Vec<_> = norm_content.match_indices(&norm_old).collect();
170
171 if fuzzy_matches.len() == 1 {
172 let fuzzy_pos = fuzzy_matches[0].0;
174 let norm_lines_before = norm_content[..fuzzy_pos].lines().count();
176 let original_lines: Vec<&str> = content.lines().collect();
177 let search_lines: Vec<&str> = old_text.lines().collect();
178
179 if norm_lines_before > 0 && norm_lines_before <= original_lines.len() {
180 let start_line = norm_lines_before.saturating_sub(1);
181 let end_line = (start_line + search_lines.len()).min(original_lines.len());
182 let original_section = original_lines[start_line..end_line].join("\n");
183 (content.replacen(&original_section, new_text, 1), "fuzzy")
184 } else {
185 let result = norm_content.replacen(&norm_old, new_text, 1);
187 (result, "fuzzy")
188 }
189 } else if fuzzy_matches.len() > 1 {
190 return Ok(ToolOutput::error(format!(
191 "Found {} fuzzy occurrences — must be unique. Provide more context.",
192 fuzzy_matches.len()
193 )));
194 } else {
195 return Ok(ToolOutput::error(
196 "Text not found in file (tried exact and fuzzy matching). Verify the old text matches the file content.",
197 ));
198 }
199 };
200
201 match self.fs.write(&resolved, &new_content).await {
203 Ok(()) => {
204 let diff = unified_diff(&content, &new_content, path);
205 let first_changed_line = content
207 .lines()
208 .zip(new_content.lines())
209 .enumerate()
210 .find(|(_, (a, b))| a != b)
211 .map(|(i, _)| i + 1)
212 .unwrap_or(1);
213
214 Ok(ToolOutput::success(format!(
215 "Applied edit to {} ({})\n\n{}",
216 path, method, diff
217 ))
218 .with_metadata(json!({
219 "method": method,
220 "first_changed_line": first_changed_line,
221 "path": path,
222 })))
223 }
224 Err(e) => Ok(ToolOutput::error(format!(
225 "Failed to write {}: {}",
226 path, e
227 ))),
228 }
229 }
230}
231
232#[cfg(test)]
233mod tests {
234 use super::*;
235 use soul_core::vfs::MemoryFs;
236
237 async fn setup() -> (Arc<MemoryFs>, EditTool) {
238 let fs = Arc::new(MemoryFs::new());
239 let tool = EditTool::new(fs.clone() as Arc<dyn VirtualFs>, "/project");
240 (fs, tool)
241 }
242
243 #[tokio::test]
244 async fn exact_replacement() {
245 let (fs, tool) = setup().await;
246 fs.write("/project/code.rs", "fn main() {\n println!(\"hello\");\n}")
247 .await
248 .unwrap();
249
250 let result = tool
251 .execute(
252 "c1",
253 json!({
254 "path": "code.rs",
255 "old": "println!(\"hello\")",
256 "new": "println!(\"world\")"
257 }),
258 None,
259 )
260 .await
261 .unwrap();
262
263 assert!(!result.is_error);
264 assert!(result.content.contains("exact"));
265 let content = fs.read_to_string("/project/code.rs").await.unwrap();
266 assert!(content.contains("world"));
267 assert!(!content.contains("hello"));
268 }
269
270 #[tokio::test]
271 async fn fuzzy_smart_quotes() {
272 let (fs, tool) = setup().await;
273 fs.write("/project/quotes.txt", "It\u{2019}s a test")
274 .await
275 .unwrap();
276
277 let result = tool
278 .execute(
279 "c2",
280 json!({
281 "path": "quotes.txt",
282 "old": "It's a test",
283 "new": "It is a test"
284 }),
285 None,
286 )
287 .await
288 .unwrap();
289
290 assert!(!result.is_error);
291 assert!(result.content.contains("fuzzy"));
292 }
293
294 #[tokio::test]
295 async fn multiple_matches_error() {
296 let (fs, tool) = setup().await;
297 fs.write("/project/dup.txt", "hello hello hello")
298 .await
299 .unwrap();
300
301 let result = tool
302 .execute(
303 "c3",
304 json!({"path": "dup.txt", "old": "hello", "new": "world"}),
305 None,
306 )
307 .await
308 .unwrap();
309
310 assert!(result.is_error);
311 assert!(result.content.contains("occurrences"));
312 }
313
314 #[tokio::test]
315 async fn text_not_found() {
316 let (fs, tool) = setup().await;
317 fs.write("/project/missing.txt", "something else")
318 .await
319 .unwrap();
320
321 let result = tool
322 .execute(
323 "c4",
324 json!({"path": "missing.txt", "old": "nothere", "new": "replacement"}),
325 None,
326 )
327 .await
328 .unwrap();
329
330 assert!(result.is_error);
331 assert!(result.content.contains("not found"));
332 }
333
334 #[tokio::test]
335 async fn identical_old_new() {
336 let (fs, tool) = setup().await;
337 fs.write("/project/same.txt", "content").await.unwrap();
338
339 let result = tool
340 .execute(
341 "c5",
342 json!({"path": "same.txt", "old": "content", "new": "content"}),
343 None,
344 )
345 .await
346 .unwrap();
347
348 assert!(result.is_error);
349 assert!(result.content.contains("identical"));
350 }
351
352 #[tokio::test]
353 async fn file_not_found() {
354 let (_fs, tool) = setup().await;
355 let result = tool
356 .execute(
357 "c6",
358 json!({"path": "nope.txt", "old": "a", "new": "b"}),
359 None,
360 )
361 .await
362 .unwrap();
363 assert!(result.is_error);
364 }
365
366 #[tokio::test]
367 async fn diff_output() {
368 let (fs, tool) = setup().await;
369 fs.write("/project/diff.txt", "line1\nline2\nline3")
370 .await
371 .unwrap();
372
373 let result = tool
374 .execute(
375 "c7",
376 json!({"path": "diff.txt", "old": "line2", "new": "modified"}),
377 None,
378 )
379 .await
380 .unwrap();
381
382 assert!(!result.is_error);
383 assert!(result.content.contains("-line2"));
384 assert!(result.content.contains("+modified"));
385 }
386
387 #[test]
388 fn normalize_fuzzy_quotes() {
389 let input = "\u{201C}hello\u{201D} \u{2018}world\u{2019}";
390 let normalized = normalize_for_fuzzy(input);
391 assert_eq!(normalized, "\"hello\" 'world'");
392 }
393
394 #[test]
395 fn normalize_fuzzy_dashes() {
396 let input = "a\u{2013}b\u{2014}c";
397 let normalized = normalize_for_fuzzy(input);
398 assert_eq!(normalized, "a-b-c");
399 }
400
401 #[test]
402 fn normalize_fuzzy_trailing_whitespace() {
403 let input = "hello \nworld ";
404 let normalized = normalize_for_fuzzy(input);
405 assert_eq!(normalized, "hello\nworld");
406 }
407
408 #[test]
409 fn tool_name_and_definition() {
410 let fs = Arc::new(MemoryFs::new());
411 let tool = EditTool::new(fs as Arc<dyn VirtualFs>, "/");
412 assert_eq!(tool.name(), "edit");
413 let def = tool.definition();
414 assert_eq!(def.name, "edit");
415 }
416}