1use anyhow::{Context, Result};
2use diffy::{Patch, apply};
3use serde::Deserialize;
4use serde_json::{Value, json};
5use sha2::{Digest, Sha256};
6use std::collections::HashMap;
7use std::fs;
8use std::path::{Path, PathBuf};
9use walkdir::WalkDir;
10
11use brainwires_core::{StagedWrite, Tool, ToolContext, ToolInputSchema, ToolResult};
12
13pub struct FileOpsTool;
15
16impl FileOpsTool {
17 pub fn get_tools() -> Vec<Tool> {
19 vec![
20 Self::read_file_tool(),
21 Self::write_file_tool(),
22 Self::edit_file_tool(),
23 Self::patch_file_tool(),
24 Self::list_directory_tool(),
25 Self::search_files_tool(),
26 Self::delete_file_tool(),
27 Self::create_directory_tool(),
28 ]
29 }
30
31 fn read_file_tool() -> Tool {
32 let mut properties = HashMap::new();
33 properties.insert(
34 "path".to_string(),
35 json!({"type": "string", "description": "Path to the file to read (relative or absolute)"}),
36 );
37 properties.insert(
38 "offset".to_string(),
39 json!({
40 "type": "number",
41 "description": "Line number to start reading from (1-based, default 1)",
42 "default": 1
43 }),
44 );
45 properties.insert(
46 "limit".to_string(),
47 json!({
48 "type": "number",
49 "description": "Maximum lines to read (default 2000). Output truncation marker is appended if the file is larger.",
50 "default": 2000
51 }),
52 );
53 Tool {
54 name: "read_file".to_string(),
55 description: "Read the contents of a local file. Defaults to the first 2000 lines; use offset+limit for paged reads of large files.".to_string(),
56 input_schema: ToolInputSchema::object(properties, vec!["path".to_string()]),
57 requires_approval: false,
58 ..Default::default()
59 }
60 }
61
62 fn write_file_tool() -> Tool {
63 let mut properties = HashMap::new();
64 properties.insert(
65 "path".to_string(),
66 json!({"type": "string", "description": "Path to the file to write"}),
67 );
68 properties.insert(
69 "content".to_string(),
70 json!({"type": "string", "description": "Content to write to the file"}),
71 );
72 Tool {
73 name: "write_file".to_string(),
74 description: "Create or overwrite a file with the given content.".to_string(),
75 input_schema: ToolInputSchema::object(
76 properties,
77 vec!["path".to_string(), "content".to_string()],
78 ),
79 requires_approval: true,
80 ..Default::default()
81 }
82 }
83
84 fn edit_file_tool() -> Tool {
85 let mut properties = HashMap::new();
86 properties.insert(
87 "path".to_string(),
88 json!({"type": "string", "description": "Path to the file to edit"}),
89 );
90 properties.insert(
91 "old_text".to_string(),
92 json!({"type": "string", "description": "Exact text to find in the file"}),
93 );
94 properties.insert(
95 "new_text".to_string(),
96 json!({"type": "string", "description": "Text to replace old_text with"}),
97 );
98 Tool {
99 name: "edit_file".to_string(),
100 description: "Replace the first occurrence of old_text with new_text in a file."
101 .to_string(),
102 input_schema: ToolInputSchema::object(
103 properties,
104 vec![
105 "path".to_string(),
106 "old_text".to_string(),
107 "new_text".to_string(),
108 ],
109 ),
110 requires_approval: true,
111 ..Default::default()
112 }
113 }
114
115 fn patch_file_tool() -> Tool {
116 let mut properties = HashMap::new();
117 properties.insert(
118 "path".to_string(),
119 json!({"type": "string", "description": "Path to the file to patch"}),
120 );
121 properties.insert(
122 "patch".to_string(),
123 json!({"type": "string", "description": "Unified diff patch to apply"}),
124 );
125 Tool {
126 name: "patch_file".to_string(),
127 description: "Apply a unified diff patch to a file.".to_string(),
128 input_schema: ToolInputSchema::object(
129 properties,
130 vec!["path".to_string(), "patch".to_string()],
131 ),
132 requires_approval: true,
133 ..Default::default()
134 }
135 }
136
137 fn list_directory_tool() -> Tool {
138 let mut properties = HashMap::new();
139 properties.insert(
140 "path".to_string(),
141 json!({"type": "string", "description": "Path to the directory to list"}),
142 );
143 properties.insert("recursive".to_string(), json!({"type": "boolean", "description": "Whether to list recursively", "default": false}));
144 Tool {
145 name: "list_directory".to_string(),
146 description: "List files and directories in a local path.".to_string(),
147 input_schema: ToolInputSchema::object(properties, vec!["path".to_string()]),
148 requires_approval: false,
149 ..Default::default()
150 }
151 }
152
153 fn search_files_tool() -> Tool {
154 let mut properties = HashMap::new();
155 properties.insert(
156 "path".to_string(),
157 json!({"type": "string", "description": "Directory to search in"}),
158 );
159 properties.insert(
160 "pattern".to_string(),
161 json!({"type": "string", "description": "File name pattern to match (glob pattern)"}),
162 );
163 Tool {
164 name: "search_files".to_string(),
165 description: "Search for files matching a glob pattern.".to_string(),
166 input_schema: ToolInputSchema::object(
167 properties,
168 vec!["path".to_string(), "pattern".to_string()],
169 ),
170 requires_approval: false,
171 ..Default::default()
172 }
173 }
174
175 fn delete_file_tool() -> Tool {
176 let mut properties = HashMap::new();
177 properties.insert(
178 "path".to_string(),
179 json!({"type": "string", "description": "Path to the file or directory to delete"}),
180 );
181 Tool {
182 name: "delete_file".to_string(),
183 description: "Delete a file or directory.".to_string(),
184 input_schema: ToolInputSchema::object(properties, vec!["path".to_string()]),
185 requires_approval: true,
186 ..Default::default()
187 }
188 }
189
190 fn create_directory_tool() -> Tool {
191 let mut properties = HashMap::new();
192 properties.insert(
193 "path".to_string(),
194 json!({"type": "string", "description": "Path to the directory to create"}),
195 );
196 Tool {
197 name: "create_directory".to_string(),
198 description: "Create a new directory (including parent directories).".to_string(),
199 input_schema: ToolInputSchema::object(properties, vec!["path".to_string()]),
200 requires_approval: true,
201 ..Default::default()
202 }
203 }
204
205 #[tracing::instrument(name = "tool.execute", skip(input, context), fields(tool_name))]
207 pub fn execute(
208 tool_use_id: &str,
209 tool_name: &str,
210 input: &Value,
211 context: &ToolContext,
212 ) -> ToolResult {
213 let result = match tool_name {
214 "read_file" => Self::read_file(input, context),
215 "write_file" => Self::write_file(input, context),
216 "edit_file" => Self::edit_file(input, context),
217 "patch_file" => Self::patch_file(input, context),
218 "list_directory" => Self::list_directory(input, context),
219 "search_files" => Self::search_files(input, context),
220 "delete_file" => Self::delete_file(input, context),
221 "create_directory" => Self::create_directory(input, context),
222 _ => Err(anyhow::anyhow!(
223 "Unknown file operation tool: {}",
224 tool_name
225 )),
226 };
227 match result {
228 Ok(output) => ToolResult::success(tool_use_id.to_string(), output),
229 Err(e) => ToolResult::error(
230 tool_use_id.to_string(),
231 format!("File operation failed: {}", e),
232 ),
233 }
234 }
235
236 fn read_file(input: &Value, context: &ToolContext) -> Result<String> {
237 #[derive(Deserialize)]
238 struct Input {
239 path: String,
240 #[serde(default = "default_read_offset")]
241 offset: u32,
242 #[serde(default = "default_read_limit")]
243 limit: u32,
244 }
245 fn default_read_offset() -> u32 {
246 1
247 }
248 fn default_read_limit() -> u32 {
249 2000
250 }
251 let params: Input = serde_json::from_value(input.clone())?;
252 let full_path = Self::resolve_path(¶ms.path, context)?;
253 let content = fs::read_to_string(&full_path)
254 .with_context(|| format!("Failed to read file: {}", full_path.display()))?;
255 let total_bytes = content.len();
256 let total_lines = content.lines().count();
257
258 let start = params.offset.saturating_sub(1) as usize;
259 let limit = params.limit.max(1) as usize;
260 let sliced: String = content
261 .lines()
262 .skip(start)
263 .take(limit)
264 .collect::<Vec<_>>()
265 .join("\n");
266
267 let end = (start + limit).min(total_lines);
268 let truncated = end < total_lines;
269 let header = if truncated {
270 format!(
271 "File: {}\nSize: {} bytes, {} lines total\nShowing lines {}-{} of {} (... truncated: call again with offset={} to continue)\n\n",
272 full_path.display(),
273 total_bytes,
274 total_lines,
275 start + 1,
276 end,
277 total_lines,
278 end + 1,
279 )
280 } else {
281 format!(
282 "File: {}\nSize: {} bytes, {} lines total\nShowing lines {}-{}\n\n",
283 full_path.display(),
284 total_bytes,
285 total_lines,
286 start + 1,
287 end.max(start + 1),
288 )
289 };
290 Ok(format!("{}{}", header, sliced))
291 }
292
293 fn write_file(input: &Value, context: &ToolContext) -> Result<String> {
294 #[derive(Deserialize)]
295 struct Input {
296 path: String,
297 content: String,
298 }
299 let params: Input = serde_json::from_value(input.clone())?;
300 let full_path = Self::resolve_path(¶ms.path, context)?;
301
302 let content_hash = Sha256::digest(params.content.as_bytes());
304 let key = Self::derive_idempotency_key("write_file", &full_path, &content_hash);
305 if let Some(ref registry) = context.idempotency_registry
306 && let Some(record) = registry.get(&key)
307 {
308 tracing::debug!(path = %full_path.display(), "write_file: idempotent retry, returning cached result");
309 return Ok(record.cached_result);
310 }
311
312 if let Some(ref backend) = context.staging_backend {
314 let staged = StagedWrite {
315 key,
316 target_path: full_path.clone(),
317 content: params.content.clone(),
318 };
319 backend.stage(staged);
320 return Ok(format!(
321 "Staged write of {} bytes to {} (pending commit)",
322 params.content.len(),
323 full_path.display()
324 ));
325 }
326
327 if let Some(parent) = full_path.parent() {
329 fs::create_dir_all(parent).with_context(|| {
330 format!("Failed to create parent directory: {}", parent.display())
331 })?;
332 }
333 fs::write(&full_path, ¶ms.content)
334 .with_context(|| format!("Failed to write file: {}", full_path.display()))?;
335 let msg = format!(
336 "Successfully wrote {} bytes to {}",
337 params.content.len(),
338 full_path.display()
339 );
340 if let Some(ref registry) = context.idempotency_registry {
341 registry.record(
342 Self::derive_idempotency_key("write_file", &full_path, &content_hash),
343 msg.clone(),
344 );
345 }
346 Ok(msg)
347 }
348
349 fn edit_file(input: &Value, context: &ToolContext) -> Result<String> {
350 #[derive(Deserialize)]
351 struct Input {
352 path: String,
353 old_text: String,
354 new_text: String,
355 }
356 let params: Input = serde_json::from_value(input.clone())?;
357 let full_path = Self::resolve_path(¶ms.path, context)?;
358
359 let mut hasher = Sha256::new();
361 hasher.update(params.old_text.as_bytes());
362 hasher.update(b"\0");
363 hasher.update(params.new_text.as_bytes());
364 let content_hash = hasher.finalize();
365 let key = Self::derive_idempotency_key("edit_file", &full_path, &content_hash);
366
367 if let Some(ref registry) = context.idempotency_registry
369 && let Some(record) = registry.get(&key)
370 {
371 tracing::debug!(path = %full_path.display(), "edit_file: idempotent retry, returning cached result");
372 return Ok(record.cached_result);
373 }
374
375 let current = fs::read_to_string(&full_path)
377 .with_context(|| format!("Failed to read file: {}", full_path.display()))?;
378 if !current.contains(¶ms.old_text) {
379 return Err(anyhow::anyhow!(
380 "Text not found in file: '{}'",
381 params.old_text
382 ));
383 }
384 let new_content = current.replacen(¶ms.old_text, ¶ms.new_text, 1);
385
386 if let Some(ref backend) = context.staging_backend {
388 backend.stage(StagedWrite {
389 key,
390 target_path: full_path.clone(),
391 content: new_content,
392 });
393 return Ok(format!(
394 "Staged edit (1 replacement) in {} (pending commit)",
395 full_path.display()
396 ));
397 }
398
399 fs::write(&full_path, &new_content)
401 .with_context(|| format!("Failed to write file: {}", full_path.display()))?;
402 let msg = format!(
403 "Successfully replaced 1 occurrence(s) in {}",
404 full_path.display()
405 );
406 if let Some(ref registry) = context.idempotency_registry {
407 registry.record(
408 Self::derive_idempotency_key("edit_file", &full_path, &content_hash),
409 msg.clone(),
410 );
411 }
412 Ok(msg)
413 }
414
415 fn patch_file(input: &Value, context: &ToolContext) -> Result<String> {
416 #[derive(Deserialize)]
417 struct Input {
418 path: String,
419 patch: String,
420 }
421 let params: Input = serde_json::from_value(input.clone())?;
422 let full_path = Self::resolve_path(¶ms.path, context)?;
423
424 let patch_hash = Sha256::digest(params.patch.as_bytes());
426 let key = Self::derive_idempotency_key("patch_file", &full_path, &patch_hash);
427
428 if let Some(ref registry) = context.idempotency_registry
430 && let Some(record) = registry.get(&key)
431 {
432 tracing::debug!(path = %full_path.display(), "patch_file: idempotent retry, returning cached result");
433 return Ok(record.cached_result);
434 }
435
436 let content = fs::read_to_string(&full_path)
438 .with_context(|| format!("Failed to read file: {}", full_path.display()))?;
439 let patch: Patch<'_, str> = Patch::from_str(¶ms.patch)
440 .map_err(|e| anyhow::anyhow!("Failed to parse patch: {}", e))?;
441 let hunk_count = patch.hunks().len();
442 let new_content =
443 apply(&content, &patch).map_err(|e| anyhow::anyhow!("Failed to apply patch: {}", e))?;
444
445 if let Some(ref backend) = context.staging_backend {
447 backend.stage(StagedWrite {
448 key,
449 target_path: full_path.clone(),
450 content: new_content.to_string(),
451 });
452 return Ok(format!(
453 "Staged patch of {} hunk(s) to {} (pending commit)",
454 hunk_count,
455 full_path.display()
456 ));
457 }
458
459 fs::write(&full_path, new_content.as_str())
461 .with_context(|| format!("Failed to write file: {}", full_path.display()))?;
462 let msg = format!(
463 "Successfully applied patch with {} hunk(s) to {}",
464 hunk_count,
465 full_path.display()
466 );
467 if let Some(ref registry) = context.idempotency_registry {
468 registry.record(
469 Self::derive_idempotency_key("patch_file", &full_path, &patch_hash),
470 msg.clone(),
471 );
472 }
473 Ok(msg)
474 }
475
476 fn list_directory(input: &Value, context: &ToolContext) -> Result<String> {
477 #[derive(Deserialize)]
478 struct Input {
479 path: String,
480 #[serde(default)]
481 recursive: bool,
482 }
483 let params: Input = serde_json::from_value(input.clone())?;
484 let full_path = Self::resolve_path(¶ms.path, context)?;
485 if !full_path.is_dir() {
486 return Err(anyhow::anyhow!("Not a directory: {}", full_path.display()));
487 }
488
489 let mut entries = Vec::new();
490 if params.recursive {
491 for entry in WalkDir::new(&full_path).min_depth(1) {
492 let entry = entry?;
493 let path = entry.path();
494 let relative = path.strip_prefix(&full_path).unwrap_or(path);
495 let type_str = if path.is_dir() { "dir" } else { "file" };
496 entries.push(format!("{} - {}", type_str, relative.display()));
497 }
498 } else {
499 for entry in fs::read_dir(&full_path)? {
500 let entry = entry?;
501 let path = entry.path();
502 let name = entry.file_name();
503 let type_str = if path.is_dir() { "dir" } else { "file" };
504 entries.push(format!("{} - {}", type_str, name.to_string_lossy()));
505 }
506 }
507 entries.sort();
508 Ok(format!(
509 "Directory: {}\nEntries: {}\n\n{}",
510 full_path.display(),
511 entries.len(),
512 entries.join("\n")
513 ))
514 }
515
516 fn search_files(input: &Value, context: &ToolContext) -> Result<String> {
517 #[derive(Deserialize)]
518 struct Input {
519 path: String,
520 pattern: String,
521 }
522 let params: Input = serde_json::from_value(input.clone())?;
523 let full_path = Self::resolve_path(¶ms.path, context)?;
524 let glob_pattern = full_path.join(¶ms.pattern);
525 let pattern_str = glob_pattern.to_string_lossy().to_string();
526 let mut matches = Vec::new();
527 for entry in glob::glob(&pattern_str)? {
528 match entry {
529 Ok(path) => {
530 let relative = path.strip_prefix(&full_path).unwrap_or(&path);
531 matches.push(relative.display().to_string());
532 }
533 Err(e) => tracing::warn!("Error reading glob entry: {}", e),
534 }
535 }
536 matches.sort();
537 Ok(format!(
538 "Search pattern: {}\nMatches: {}\n\n{}",
539 params.pattern,
540 matches.len(),
541 matches.join("\n")
542 ))
543 }
544
545 fn delete_file(input: &Value, context: &ToolContext) -> Result<String> {
546 #[derive(Deserialize)]
547 struct Input {
548 path: String,
549 }
550 let params: Input = serde_json::from_value(input.clone())?;
551 let full_path = Self::resolve_path(¶ms.path, context)?;
552
553 if let Some(ref registry) = context.idempotency_registry {
555 let key = Self::derive_idempotency_key("delete_file", &full_path, b"");
556 if let Some(record) = registry.get(&key) {
557 tracing::debug!(path = %full_path.display(), "delete_file: idempotent retry, returning cached result");
558 return Ok(record.cached_result);
559 }
560 let msg = if full_path.is_dir() {
561 fs::remove_dir_all(&full_path).with_context(|| {
562 format!("Failed to delete directory: {}", full_path.display())
563 })?;
564 format!("Successfully deleted directory: {}", full_path.display())
565 } else {
566 fs::remove_file(&full_path)
567 .with_context(|| format!("Failed to delete file: {}", full_path.display()))?;
568 format!("Successfully deleted file: {}", full_path.display())
569 };
570 registry.record(key, msg.clone());
571 return Ok(msg);
572 }
573
574 if full_path.is_dir() {
575 fs::remove_dir_all(&full_path)
576 .with_context(|| format!("Failed to delete directory: {}", full_path.display()))?;
577 Ok(format!(
578 "Successfully deleted directory: {}",
579 full_path.display()
580 ))
581 } else {
582 fs::remove_file(&full_path)
583 .with_context(|| format!("Failed to delete file: {}", full_path.display()))?;
584 Ok(format!(
585 "Successfully deleted file: {}",
586 full_path.display()
587 ))
588 }
589 }
590
591 fn create_directory(input: &Value, context: &ToolContext) -> Result<String> {
592 #[derive(Deserialize)]
593 struct Input {
594 path: String,
595 }
596 let params: Input = serde_json::from_value(input.clone())?;
597 let full_path = Self::resolve_path(¶ms.path, context)?;
598
599 if let Some(ref registry) = context.idempotency_registry {
601 let key = Self::derive_idempotency_key("create_directory", &full_path, b"");
602 if let Some(record) = registry.get(&key) {
603 tracing::debug!(path = %full_path.display(), "create_directory: idempotent retry, returning cached result");
604 return Ok(record.cached_result);
605 }
606 fs::create_dir_all(&full_path)
607 .with_context(|| format!("Failed to create directory: {}", full_path.display()))?;
608 let msg = format!("Successfully created directory: {}", full_path.display());
609 registry.record(key, msg.clone());
610 return Ok(msg);
611 }
612
613 fs::create_dir_all(&full_path)
614 .with_context(|| format!("Failed to create directory: {}", full_path.display()))?;
615 Ok(format!(
616 "Successfully created directory: {}",
617 full_path.display()
618 ))
619 }
620
621 pub fn resolve_path(path: &str, context: &ToolContext) -> Result<PathBuf> {
623 let path = Path::new(path);
624 let resolved = if path.is_absolute() {
625 path.to_path_buf()
626 } else {
627 Path::new(&context.working_directory).join(path)
628 };
629 Ok(resolved.canonicalize().unwrap_or(resolved))
630 }
631
632 fn derive_idempotency_key(tool_name: &str, path: &Path, content_factor: &[u8]) -> String {
641 let mut hasher = Sha256::new();
642 hasher.update(tool_name.as_bytes());
643 hasher.update(b"\0");
644 hasher.update(path.to_string_lossy().as_bytes());
645 hasher.update(b"\0");
646 hasher.update(content_factor);
647 hex::encode(hasher.finalize())
648 }
649}
650
651#[cfg(test)]
652mod tests {
653 use super::*;
654 use tempfile::TempDir;
655
656 fn create_test_context(working_dir: &str) -> ToolContext {
657 ToolContext {
658 working_directory: working_dir.to_string(),
659 ..Default::default()
660 }
661 }
662
663 fn create_test_context_with_registry(working_dir: &str) -> ToolContext {
664 ToolContext {
665 working_directory: working_dir.to_string(),
666 idempotency_registry: Some(brainwires_core::IdempotencyRegistry::new()),
667 ..Default::default()
668 }
669 }
670
671 #[test]
672 fn test_get_tools() {
673 let tools = FileOpsTool::get_tools();
674 assert_eq!(tools.len(), 8);
675 let names: Vec<_> = tools.iter().map(|t| t.name.as_str()).collect();
676 assert!(names.contains(&"read_file"));
677 assert!(names.contains(&"write_file"));
678 assert!(names.contains(&"edit_file"));
679 assert!(names.contains(&"patch_file"));
680 }
681
682 #[test]
683 fn test_read_file() {
684 let temp_dir = TempDir::new().unwrap();
685 let test_file = temp_dir.path().join("test.txt");
686 fs::write(&test_file, "Hello, World!").unwrap();
687 let context = create_test_context(temp_dir.path().to_str().unwrap());
688 let input = json!({"path": "test.txt"});
689 let result = FileOpsTool::execute("1", "read_file", &input, &context);
690 assert!(!result.is_error);
691 assert!(result.content.contains("Hello, World!"));
692 }
693
694 #[test]
695 fn test_read_file_truncates_large_file_and_emits_marker() {
696 let temp_dir = TempDir::new().unwrap();
697 let test_file = temp_dir.path().join("big.txt");
698 let body = (1..=3000)
699 .map(|i| format!("line {}", i))
700 .collect::<Vec<_>>()
701 .join("\n");
702 fs::write(&test_file, &body).unwrap();
703 let context = create_test_context(temp_dir.path().to_str().unwrap());
704 let input = json!({"path": "big.txt"});
706 let result = FileOpsTool::execute("1", "read_file", &input, &context);
707 assert!(!result.is_error);
708 assert!(result.content.contains("truncated"));
709 assert!(result.content.contains("line 1\n"));
710 assert!(result.content.contains("line 2000"));
711 assert!(!result.content.contains("line 2001"));
712 }
713
714 #[test]
715 fn test_read_file_respects_offset_and_limit() {
716 let temp_dir = TempDir::new().unwrap();
717 let test_file = temp_dir.path().join("paged.txt");
718 let body = (1..=100)
719 .map(|i| format!("row {}", i))
720 .collect::<Vec<_>>()
721 .join("\n");
722 fs::write(&test_file, &body).unwrap();
723 let context = create_test_context(temp_dir.path().to_str().unwrap());
724 let input = json!({"path": "paged.txt", "offset": 10, "limit": 5});
726 let result = FileOpsTool::execute("1", "read_file", &input, &context);
727 assert!(!result.is_error);
728 assert!(result.content.contains("row 10"));
729 assert!(result.content.contains("row 14"));
730 assert!(!result.content.contains("row 15"));
731 assert!(!result.content.contains("row 9\n"));
732 }
733
734 #[test]
735 fn test_write_file() {
736 let temp_dir = TempDir::new().unwrap();
737 let context = create_test_context(temp_dir.path().to_str().unwrap());
738 let input = json!({"path": "new.txt", "content": "Test"});
739 let result = FileOpsTool::execute("2", "write_file", &input, &context);
740 assert!(!result.is_error);
741 assert!(temp_dir.path().join("new.txt").exists());
742 }
743
744 #[test]
745 fn test_edit_file() {
746 let temp_dir = TempDir::new().unwrap();
747 fs::write(
748 temp_dir.path().join("edit.txt"),
749 "Hello World! Hello World!",
750 )
751 .unwrap();
752 let context = create_test_context(temp_dir.path().to_str().unwrap());
753 let input = json!({"path": "edit.txt", "old_text": "World", "new_text": "Rust"});
754 let result = FileOpsTool::execute("3", "edit_file", &input, &context);
755 assert!(!result.is_error);
756 let content = fs::read_to_string(temp_dir.path().join("edit.txt")).unwrap();
757 assert_eq!(content, "Hello Rust! Hello World!");
758 }
759
760 #[test]
761 fn test_list_directory() {
762 let temp_dir = TempDir::new().unwrap();
763 fs::write(temp_dir.path().join("a.txt"), "").unwrap();
764 fs::write(temp_dir.path().join("b.txt"), "").unwrap();
765 let context = create_test_context(temp_dir.path().to_str().unwrap());
766 let input = json!({"path": ".", "recursive": false});
767 let result = FileOpsTool::execute("4", "list_directory", &input, &context);
768 assert!(!result.is_error);
769 assert!(result.content.contains("a.txt"));
770 assert!(result.content.contains("b.txt"));
771 }
772
773 #[test]
774 fn test_delete_file() {
775 let temp_dir = TempDir::new().unwrap();
776 let file = temp_dir.path().join("del.txt");
777 fs::write(&file, "").unwrap();
778 let context = create_test_context(temp_dir.path().to_str().unwrap());
779 let input = json!({"path": "del.txt"});
780 let result = FileOpsTool::execute("5", "delete_file", &input, &context);
781 assert!(!result.is_error);
782 assert!(!file.exists());
783 }
784
785 #[test]
788 fn test_write_file_idempotent_same_content() {
789 let temp_dir = TempDir::new().unwrap();
790 let ctx = create_test_context_with_registry(temp_dir.path().to_str().unwrap());
791 let input = json!({"path": "idem.txt", "content": "Hello"});
792
793 let r1 = FileOpsTool::execute("1", "write_file", &input, &ctx);
794 assert!(!r1.is_error);
795 assert!(temp_dir.path().join("idem.txt").exists());
796
797 fs::write(temp_dir.path().join("idem.txt"), "CORRUPTED").unwrap();
799
800 let r2 = FileOpsTool::execute("2", "write_file", &input, &ctx);
802 assert!(!r2.is_error);
803 let on_disk = fs::read_to_string(temp_dir.path().join("idem.txt")).unwrap();
804 assert_eq!(
805 on_disk, "CORRUPTED",
806 "Idempotent retry must not overwrite the file"
807 );
808 }
809
810 #[test]
811 fn test_write_file_different_content_not_idempotent() {
812 let temp_dir = TempDir::new().unwrap();
813 let ctx = create_test_context_with_registry(temp_dir.path().to_str().unwrap());
814
815 FileOpsTool::execute(
816 "1",
817 "write_file",
818 &json!({"path": "f.txt", "content": "v1"}),
819 &ctx,
820 );
821 FileOpsTool::execute(
822 "2",
823 "write_file",
824 &json!({"path": "f.txt", "content": "v2"}),
825 &ctx,
826 );
827
828 let on_disk = fs::read_to_string(temp_dir.path().join("f.txt")).unwrap();
829 assert_eq!(on_disk, "v2", "Different content must produce a new write");
830 }
831
832 #[test]
833 fn test_write_file_no_registry_always_writes() {
834 let temp_dir = TempDir::new().unwrap();
835 let ctx = create_test_context(temp_dir.path().to_str().unwrap()); let input = json!({"path": "f.txt", "content": "v1"});
837
838 FileOpsTool::execute("1", "write_file", &input, &ctx);
839 fs::write(temp_dir.path().join("f.txt"), "v_corrupted").unwrap();
840 FileOpsTool::execute("2", "write_file", &input, &ctx);
841
842 let on_disk = fs::read_to_string(temp_dir.path().join("f.txt")).unwrap();
843 assert_eq!(on_disk, "v1", "Without registry every call must go through");
844 }
845
846 #[test]
847 fn test_delete_file_idempotent() {
848 let temp_dir = TempDir::new().unwrap();
849 let ctx = create_test_context_with_registry(temp_dir.path().to_str().unwrap());
850 let file = temp_dir.path().join("del.txt");
851 fs::write(&file, "").unwrap();
852
853 let r1 = FileOpsTool::execute("1", "delete_file", &json!({"path": "del.txt"}), &ctx);
854 assert!(!r1.is_error);
855 assert!(!file.exists());
856
857 let r2 = FileOpsTool::execute("2", "delete_file", &json!({"path": "del.txt"}), &ctx);
859 assert!(
860 !r2.is_error,
861 "Idempotent delete must not fail on missing file"
862 );
863 }
864
865 #[test]
866 fn test_create_directory_idempotent() {
867 let temp_dir = TempDir::new().unwrap();
868 let ctx = create_test_context_with_registry(temp_dir.path().to_str().unwrap());
869
870 let r1 = FileOpsTool::execute("1", "create_directory", &json!({"path": "sub/dir"}), &ctx);
871 assert!(!r1.is_error);
872 assert!(temp_dir.path().join("sub/dir").is_dir());
873
874 let r2 = FileOpsTool::execute("2", "create_directory", &json!({"path": "sub/dir"}), &ctx);
875 assert!(
876 !r2.is_error,
877 "Second create_directory must return cached success"
878 );
879 }
880
881 #[test]
882 fn test_idempotency_registry_cloned_context_shares_state() {
883 let temp_dir = TempDir::new().unwrap();
884 let ctx = create_test_context_with_registry(temp_dir.path().to_str().unwrap());
885 let ctx2 = ctx.clone(); FileOpsTool::execute(
888 "1",
889 "write_file",
890 &json!({"path": "shared.txt", "content": "x"}),
891 &ctx,
892 );
893 fs::write(temp_dir.path().join("shared.txt"), "CORRUPTED").unwrap();
894
895 FileOpsTool::execute(
897 "2",
898 "write_file",
899 &json!({"path": "shared.txt", "content": "x"}),
900 &ctx2,
901 );
902 let on_disk = fs::read_to_string(temp_dir.path().join("shared.txt")).unwrap();
903 assert_eq!(
904 on_disk, "CORRUPTED",
905 "Cloned context must share idempotency state"
906 );
907 }
908
909 #[test]
912 fn test_write_file_staged_commit() {
913 use crate::transaction::TransactionManager;
914 use brainwires_core::StagingBackend;
915 use std::sync::Arc;
916
917 let temp_dir = TempDir::new().unwrap();
918 let target = temp_dir.path().join("staged.txt");
919 let mgr = Arc::new(TransactionManager::new().unwrap());
920 let ctx = ToolContext {
921 working_directory: temp_dir.path().to_str().unwrap().to_string(),
922 staging_backend: Some(mgr.clone()),
923 ..Default::default()
924 };
925
926 let result = FileOpsTool::execute(
927 "1",
928 "write_file",
929 &json!({"path": "staged.txt", "content": "staged content"}),
930 &ctx,
931 );
932 assert!(!result.is_error);
933 assert!(
934 result.content.contains("Staged"),
935 "Result must indicate staging"
936 );
937 assert!(!target.exists(), "File must not exist before commit");
938
939 mgr.commit().unwrap();
940 assert!(target.exists());
941 assert_eq!(fs::read_to_string(&target).unwrap(), "staged content");
942 }
943
944 #[test]
945 fn test_write_file_staged_rollback() {
946 use crate::transaction::TransactionManager;
947 use brainwires_core::StagingBackend;
948 use std::sync::Arc;
949
950 let temp_dir = TempDir::new().unwrap();
951 let target = temp_dir.path().join("rollback.txt");
952 let mgr = Arc::new(TransactionManager::new().unwrap());
953 let ctx = ToolContext {
954 working_directory: temp_dir.path().to_str().unwrap().to_string(),
955 staging_backend: Some(mgr.clone()),
956 ..Default::default()
957 };
958
959 FileOpsTool::execute(
960 "1",
961 "write_file",
962 &json!({"path": "rollback.txt", "content": "data"}),
963 &ctx,
964 );
965 mgr.rollback();
966 assert!(!target.exists(), "File must not exist after rollback");
967 }
968
969 #[test]
970 fn test_edit_file_staged_commit() {
971 use crate::transaction::TransactionManager;
972 use brainwires_core::StagingBackend;
973 use std::sync::Arc;
974
975 let temp_dir = TempDir::new().unwrap();
976 let target = temp_dir.path().join("edit.txt");
977 fs::write(&target, "Hello World").unwrap();
978
979 let mgr = Arc::new(TransactionManager::new().unwrap());
980 let ctx = ToolContext {
981 working_directory: temp_dir.path().to_str().unwrap().to_string(),
982 staging_backend: Some(mgr.clone()),
983 ..Default::default()
984 };
985
986 let result = FileOpsTool::execute(
987 "1",
988 "edit_file",
989 &json!({"path": "edit.txt", "old_text": "World", "new_text": "Rust"}),
990 &ctx,
991 );
992 assert!(!result.is_error);
993 assert!(
994 result.content.contains("Staged"),
995 "Result must indicate staging"
996 );
997
998 assert_eq!(fs::read_to_string(&target).unwrap(), "Hello World");
1000
1001 mgr.commit().unwrap();
1002 assert_eq!(fs::read_to_string(&target).unwrap(), "Hello Rust");
1003 }
1004}