1use crate::registry::Tool;
4use async_trait::async_trait;
5use rustant_core::error::ToolError;
6use rustant_core::types::{Artifact, RiskLevel, ToolOutput};
7use std::path::{Path, PathBuf};
8use tracing::{debug, warn};
9
10fn validate_workspace_path(
16 workspace: &Path,
17 path_str: &str,
18 tool_name: &str,
19) -> Result<PathBuf, ToolError> {
20 let workspace_canonical = workspace
22 .canonicalize()
23 .unwrap_or_else(|_| workspace.to_path_buf());
24
25 let resolved = if Path::new(path_str).is_absolute() {
26 PathBuf::from(path_str)
27 } else {
28 workspace_canonical.join(path_str)
29 };
30
31 if resolved.exists() {
33 let canonical = resolved
34 .canonicalize()
35 .map_err(|e| ToolError::ExecutionFailed {
36 name: tool_name.into(),
37 message: format!("Path resolution failed: {}", e),
38 })?;
39
40 if !canonical.starts_with(&workspace_canonical) {
41 return Err(ToolError::PermissionDenied {
42 name: tool_name.into(),
43 reason: format!("Path '{}' is outside the workspace", path_str),
44 });
45 }
46 return Ok(canonical);
47 }
48
49 let mut normalized = Vec::new();
51 for component in resolved.components() {
52 match component {
53 std::path::Component::ParentDir => {
54 if normalized.pop().is_none() {
55 return Err(ToolError::PermissionDenied {
56 name: tool_name.into(),
57 reason: format!("Path '{}' escapes the workspace", path_str),
58 });
59 }
60 }
61 std::path::Component::CurDir => {} other => normalized.push(other),
63 }
64 }
65 let normalized_path: PathBuf = normalized.iter().collect();
66
67 if !normalized_path.starts_with(&workspace_canonical) {
68 return Err(ToolError::PermissionDenied {
69 name: tool_name.into(),
70 reason: format!("Path '{}' is outside the workspace", path_str),
71 });
72 }
73
74 Ok(resolved)
75}
76
77pub struct FileReadTool {
79 workspace: PathBuf,
80}
81
82impl FileReadTool {
83 pub fn new(workspace: PathBuf) -> Self {
84 Self { workspace }
85 }
86
87 fn resolve_path(&self, path: &str) -> Result<PathBuf, ToolError> {
88 let resolved = if Path::new(path).is_absolute() {
89 PathBuf::from(path)
90 } else {
91 self.workspace.join(path)
92 };
93
94 let canonical = resolved
96 .canonicalize()
97 .map_err(|e| ToolError::ExecutionFailed {
98 name: "file_read".into(),
99 message: format!("Path resolution failed: {}", e),
100 })?;
101
102 let workspace_canonical = self
104 .workspace
105 .canonicalize()
106 .unwrap_or_else(|_| self.workspace.clone());
107
108 if !canonical.starts_with(&workspace_canonical) {
109 return Err(ToolError::PermissionDenied {
110 name: "file_read".into(),
111 reason: format!("Path '{}' is outside the workspace", path),
112 });
113 }
114
115 Ok(canonical)
116 }
117}
118
119#[async_trait]
120impl Tool for FileReadTool {
121 fn name(&self) -> &str {
122 "file_read"
123 }
124
125 fn description(&self) -> &str {
126 "Read the contents of a file. Supports optional line range with start_line and end_line parameters."
127 }
128
129 fn parameters_schema(&self) -> serde_json::Value {
130 serde_json::json!({
131 "type": "object",
132 "properties": {
133 "path": {
134 "type": "string",
135 "description": "Path to the file to read (relative to workspace or absolute)"
136 },
137 "start_line": {
138 "type": "integer",
139 "description": "Starting line number (1-based, inclusive). Optional."
140 },
141 "end_line": {
142 "type": "integer",
143 "description": "Ending line number (1-based, inclusive). Optional."
144 }
145 },
146 "required": ["path"]
147 })
148 }
149
150 async fn execute(&self, args: serde_json::Value) -> Result<ToolOutput, ToolError> {
151 let path_str = args["path"]
152 .as_str()
153 .ok_or_else(|| ToolError::InvalidArguments {
154 name: "file_read".into(),
155 reason: "'path' parameter is required and must be a string".into(),
156 })?;
157
158 let path = self.resolve_path(path_str)?;
159
160 debug!(path = %path.display(), "Reading file");
161
162 let content =
163 tokio::fs::read_to_string(&path)
164 .await
165 .map_err(|e| ToolError::ExecutionFailed {
166 name: "file_read".into(),
167 message: format!("Failed to read '{}': {}", path_str, e),
168 })?;
169
170 let start_line = args["start_line"].as_u64().map(|n| n as usize);
171 let end_line = args["end_line"].as_u64().map(|n| n as usize);
172
173 let output = if start_line.is_some() || end_line.is_some() {
174 let lines: Vec<&str> = content.lines().collect();
175 let start = start_line.unwrap_or(1).saturating_sub(1);
176 let end = end_line.unwrap_or(lines.len()).min(lines.len());
177
178 if start >= lines.len() {
179 return Ok(ToolOutput::text(format!(
180 "File has {} lines, start_line {} is out of range",
181 lines.len(),
182 start + 1
183 )));
184 }
185
186 let selected: Vec<String> = lines[start..end]
187 .iter()
188 .enumerate()
189 .map(|(i, line)| format!("{:>4} | {}", start + i + 1, line))
190 .collect();
191 selected.join("\n")
192 } else {
193 content
195 .lines()
196 .enumerate()
197 .map(|(i, line)| format!("{:>4} | {}", i + 1, line))
198 .collect::<Vec<_>>()
199 .join("\n")
200 };
201
202 Ok(ToolOutput::text(output))
203 }
204
205 fn risk_level(&self) -> RiskLevel {
206 RiskLevel::ReadOnly
207 }
208}
209
210pub struct FileListTool {
212 workspace: PathBuf,
213}
214
215impl FileListTool {
216 pub fn new(workspace: PathBuf) -> Self {
217 Self { workspace }
218 }
219}
220
221#[async_trait]
222impl Tool for FileListTool {
223 fn name(&self) -> &str {
224 "file_list"
225 }
226
227 fn description(&self) -> &str {
228 "List files and directories at the given path. Respects .gitignore patterns."
229 }
230
231 fn parameters_schema(&self) -> serde_json::Value {
232 serde_json::json!({
233 "type": "object",
234 "properties": {
235 "path": {
236 "type": "string",
237 "description": "Directory path to list (relative to workspace). Defaults to workspace root."
238 },
239 "recursive": {
240 "type": "boolean",
241 "description": "Whether to list files recursively. Default: false."
242 },
243 "max_depth": {
244 "type": "integer",
245 "description": "Maximum depth for recursive listing. Default: 3."
246 }
247 }
248 })
249 }
250
251 async fn execute(&self, args: serde_json::Value) -> Result<ToolOutput, ToolError> {
252 let path_str = args["path"].as_str().unwrap_or(".");
253 let recursive = args["recursive"].as_bool().unwrap_or(false);
254 let max_depth = args["max_depth"].as_u64().unwrap_or(3) as usize;
255
256 let target_dir = if path_str == "." {
257 self.workspace.clone()
258 } else if Path::new(path_str).is_absolute() {
259 PathBuf::from(path_str)
260 } else {
261 self.workspace.join(path_str)
262 };
263
264 if !target_dir.exists() {
265 return Err(ToolError::ExecutionFailed {
266 name: "file_list".into(),
267 message: format!("Directory '{}' does not exist", path_str),
268 });
269 }
270
271 if !target_dir.is_dir() {
272 return Err(ToolError::ExecutionFailed {
273 name: "file_list".into(),
274 message: format!("'{}' is not a directory", path_str),
275 });
276 }
277
278 debug!(path = %target_dir.display(), recursive, max_depth, "Listing directory");
279
280 let mut entries = Vec::new();
281
282 if recursive {
283 let walker = ignore::WalkBuilder::new(&target_dir)
285 .max_depth(Some(max_depth))
286 .hidden(false)
287 .git_ignore(true)
288 .build();
289
290 for entry in walker {
291 match entry {
292 Ok(entry) => {
293 let path = entry.path();
294 if path == target_dir {
295 continue;
296 }
297 let relative = path.strip_prefix(&target_dir).unwrap_or(path);
298 let type_indicator = if path.is_dir() { "/" } else { "" };
299 entries.push(format!("{}{}", relative.display(), type_indicator));
300 }
301 Err(e) => {
302 warn!("Error walking directory: {}", e);
303 }
304 }
305 }
306 } else {
307 let mut read_dir =
308 tokio::fs::read_dir(&target_dir)
309 .await
310 .map_err(|e| ToolError::ExecutionFailed {
311 name: "file_list".into(),
312 message: format!("Failed to read directory '{}': {}", path_str, e),
313 })?;
314
315 while let Some(entry) =
316 read_dir
317 .next_entry()
318 .await
319 .map_err(|e| ToolError::ExecutionFailed {
320 name: "file_list".into(),
321 message: format!("Error reading entry: {}", e),
322 })?
323 {
324 let file_type =
325 entry
326 .file_type()
327 .await
328 .map_err(|e| ToolError::ExecutionFailed {
329 name: "file_list".into(),
330 message: format!("Error reading file type: {}", e),
331 })?;
332
333 let name = entry.file_name().to_string_lossy().to_string();
334 let type_indicator = if file_type.is_dir() { "/" } else { "" };
335 entries.push(format!("{}{}", name, type_indicator));
336 }
337 }
338
339 entries.sort();
340 let output = if entries.is_empty() {
341 format!("Directory '{}' is empty", path_str)
342 } else {
343 format!("Contents of '{}':\n{}", path_str, entries.join("\n"))
344 };
345
346 Ok(ToolOutput::text(output))
347 }
348
349 fn risk_level(&self) -> RiskLevel {
350 RiskLevel::ReadOnly
351 }
352}
353
354pub struct FileSearchTool {
356 workspace: PathBuf,
357}
358
359impl FileSearchTool {
360 pub fn new(workspace: PathBuf) -> Self {
361 Self { workspace }
362 }
363}
364
365#[async_trait]
366impl Tool for FileSearchTool {
367 fn name(&self) -> &str {
368 "file_search"
369 }
370
371 fn description(&self) -> &str {
372 "Search for a text pattern within files in the workspace. Returns matching lines with file paths and line numbers."
373 }
374
375 fn parameters_schema(&self) -> serde_json::Value {
376 serde_json::json!({
377 "type": "object",
378 "properties": {
379 "pattern": {
380 "type": "string",
381 "description": "Text pattern to search for (supports regex)"
382 },
383 "path": {
384 "type": "string",
385 "description": "Directory or file to search in (relative to workspace). Defaults to workspace root."
386 },
387 "file_pattern": {
388 "type": "string",
389 "description": "Glob pattern for filtering files (e.g., '*.rs', '*.py')"
390 },
391 "max_results": {
392 "type": "integer",
393 "description": "Maximum number of results to return. Default: 50."
394 }
395 },
396 "required": ["pattern"]
397 })
398 }
399
400 async fn execute(&self, args: serde_json::Value) -> Result<ToolOutput, ToolError> {
401 let pattern = args["pattern"]
402 .as_str()
403 .ok_or_else(|| ToolError::InvalidArguments {
404 name: "file_search".into(),
405 reason: "'pattern' parameter is required".into(),
406 })?;
407
408 let search_path = args["path"].as_str().unwrap_or(".");
409 let file_pattern = args["file_pattern"].as_str();
410 let max_results = args["max_results"].as_u64().unwrap_or(50) as usize;
411
412 let target_path = if search_path == "." {
413 self.workspace.clone()
414 } else {
415 self.workspace.join(search_path)
416 };
417
418 debug!(
419 pattern = pattern,
420 path = %target_path.display(),
421 "Searching files"
422 );
423
424 let mut results = Vec::new();
425
426 let normalized = Self::strip_regex_escapes(pattern);
429 let pattern_lower = normalized.to_lowercase();
430
431 let mut files_to_search: Vec<PathBuf> = Vec::new();
433
434 if target_path.is_file() {
435 files_to_search.push(target_path);
437 } else {
438 let walker = ignore::WalkBuilder::new(&target_path)
440 .hidden(false)
441 .git_ignore(true)
442 .build();
443
444 for entry in walker {
445 let entry = match entry {
446 Ok(e) => e,
447 Err(_) => continue,
448 };
449
450 let path = entry.path();
451 if !path.is_file() {
452 continue;
453 }
454
455 if let Some(fp) = file_pattern {
457 let file_name = path.file_name().and_then(|n| n.to_str()).unwrap_or("");
458 if !Self::simple_glob_match(fp, file_name) {
459 continue;
460 }
461 }
462
463 files_to_search.push(path.to_path_buf());
464 }
465 }
466
467 for file_path in &files_to_search {
468 if results.len() >= max_results {
469 break;
470 }
471
472 let content = match std::fs::read_to_string(file_path) {
474 Ok(c) => c,
475 Err(_) => continue, };
477
478 let relative = file_path.strip_prefix(&self.workspace).unwrap_or(file_path);
479
480 for (line_num, line) in content.lines().enumerate() {
481 if results.len() >= max_results {
482 break;
483 }
484 if line.to_lowercase().contains(&pattern_lower) {
485 results.push(format!(
486 "{}:{}: {}",
487 relative.display(),
488 line_num + 1,
489 line.trim()
490 ));
491 }
492 }
493 }
494
495 let output = if results.is_empty() {
496 format!("No matches found for pattern '{}'", pattern)
497 } else {
498 format!(
499 "Found {} match{}:\n{}",
500 results.len(),
501 if results.len() == 1 { "" } else { "es" },
502 results.join("\n")
503 )
504 };
505
506 Ok(ToolOutput::text(output))
507 }
508
509 fn risk_level(&self) -> RiskLevel {
510 RiskLevel::ReadOnly
511 }
512}
513
514impl FileSearchTool {
515 fn simple_glob_match(pattern: &str, name: &str) -> bool {
516 if pattern.starts_with("*.") {
517 let ext = &pattern[1..];
518 name.ends_with(ext)
519 } else if let Some(prefix) = pattern.strip_suffix("*") {
520 name.starts_with(prefix)
521 } else {
522 name == pattern
523 }
524 }
525
526 fn strip_regex_escapes(pattern: &str) -> String {
532 let mut result = String::with_capacity(pattern.len());
533 let mut chars = pattern.chars().peekable();
534 while let Some(ch) = chars.next() {
535 if ch == '\\'
536 && let Some(&next) = chars.peek()
537 {
538 if matches!(
540 next,
541 '[' | ']'
542 | '('
543 | ')'
544 | '{'
545 | '}'
546 | '.'
547 | '*'
548 | '+'
549 | '?'
550 | '^'
551 | '$'
552 | '|'
553 | '\\'
554 ) {
555 result.push(chars.next().unwrap());
556 continue;
557 }
558 }
559 result.push(ch);
560 }
561 result
562 }
563}
564
565const MAX_WRITE_BYTES: usize = 10 * 1024 * 1024;
568
569pub struct FileWriteTool {
571 workspace: PathBuf,
572}
573
574impl FileWriteTool {
575 pub fn new(workspace: PathBuf) -> Self {
576 Self { workspace }
577 }
578}
579
580#[async_trait]
581impl Tool for FileWriteTool {
582 fn name(&self) -> &str {
583 "file_write"
584 }
585
586 fn description(&self) -> &str {
587 "Write content to a file. Creates the file if it doesn't exist, overwrites if it does. Creates parent directories as needed."
588 }
589
590 fn parameters_schema(&self) -> serde_json::Value {
591 serde_json::json!({
592 "type": "object",
593 "properties": {
594 "path": {
595 "type": "string",
596 "description": "Path to the file to write (relative to workspace)"
597 },
598 "content": {
599 "type": "string",
600 "description": "Content to write to the file"
601 }
602 },
603 "required": ["path", "content"]
604 })
605 }
606
607 async fn execute(&self, args: serde_json::Value) -> Result<ToolOutput, ToolError> {
608 let path_str = args["path"]
609 .as_str()
610 .ok_or_else(|| ToolError::InvalidArguments {
611 name: "file_write".into(),
612 reason: "'path' parameter is required".into(),
613 })?;
614 let content = args["content"]
615 .as_str()
616 .ok_or_else(|| ToolError::InvalidArguments {
617 name: "file_write".into(),
618 reason: "'content' parameter is required".into(),
619 })?;
620
621 if content.len() > MAX_WRITE_BYTES {
623 return Err(ToolError::InvalidArguments {
624 name: "file_write".into(),
625 reason: format!(
626 "Content size ({} bytes) exceeds maximum allowed ({} bytes / {} MB)",
627 content.len(),
628 MAX_WRITE_BYTES,
629 MAX_WRITE_BYTES / (1024 * 1024)
630 ),
631 });
632 }
633
634 let _ = validate_workspace_path(&self.workspace, path_str, "file_write")?;
636 let path = self.workspace.join(path_str);
637
638 if let Some(parent) = path.parent() {
640 tokio::fs::create_dir_all(parent)
641 .await
642 .map_err(|e| ToolError::ExecutionFailed {
643 name: "file_write".into(),
644 message: format!("Failed to create directories: {}", e),
645 })?;
646 }
647
648 let existed = path.exists();
649 let bytes = content.len();
650
651 debug!(path = %path.display(), bytes, existed, "Writing file");
652
653 tokio::fs::write(&path, content)
654 .await
655 .map_err(|e| ToolError::ExecutionFailed {
656 name: "file_write".into(),
657 message: format!("Failed to write '{}': {}", path_str, e),
658 })?;
659
660 let action = if existed { "Updated" } else { "Created" };
661 let artifact = if existed {
662 Artifact::FileModified {
663 path: PathBuf::from(path_str),
664 diff: String::new(), }
666 } else {
667 Artifact::FileCreated {
668 path: PathBuf::from(path_str),
669 }
670 };
671
672 Ok(
673 ToolOutput::text(format!("{} '{}' ({} bytes)", action, path_str, bytes))
674 .with_artifact(artifact),
675 )
676 }
677
678 fn risk_level(&self) -> RiskLevel {
679 RiskLevel::Write
680 }
681}
682
683pub struct FilePatchTool {
685 workspace: PathBuf,
686}
687
688impl FilePatchTool {
689 pub fn new(workspace: PathBuf) -> Self {
690 Self { workspace }
691 }
692}
693
694#[async_trait]
695impl Tool for FilePatchTool {
696 fn name(&self) -> &str {
697 "file_patch"
698 }
699
700 fn description(&self) -> &str {
701 "Apply a text replacement to a file. Specify the old text to find and the new text to replace it with."
702 }
703
704 fn parameters_schema(&self) -> serde_json::Value {
705 serde_json::json!({
706 "type": "object",
707 "properties": {
708 "path": {
709 "type": "string",
710 "description": "Path to the file to patch (relative to workspace)"
711 },
712 "old_text": {
713 "type": "string",
714 "description": "The exact text to find in the file"
715 },
716 "new_text": {
717 "type": "string",
718 "description": "The replacement text"
719 }
720 },
721 "required": ["path", "old_text", "new_text"]
722 })
723 }
724
725 async fn execute(&self, args: serde_json::Value) -> Result<ToolOutput, ToolError> {
726 let path_str = args["path"]
727 .as_str()
728 .ok_or_else(|| ToolError::InvalidArguments {
729 name: "file_patch".into(),
730 reason: "'path' parameter is required".into(),
731 })?;
732 let old_text = args["old_text"]
733 .as_str()
734 .ok_or_else(|| ToolError::InvalidArguments {
735 name: "file_patch".into(),
736 reason: "'old_text' parameter is required".into(),
737 })?;
738 let new_text = args["new_text"]
739 .as_str()
740 .ok_or_else(|| ToolError::InvalidArguments {
741 name: "file_patch".into(),
742 reason: "'new_text' parameter is required".into(),
743 })?;
744
745 let _ = validate_workspace_path(&self.workspace, path_str, "file_patch")?;
747 let path = self.workspace.join(path_str);
748
749 let content =
750 tokio::fs::read_to_string(&path)
751 .await
752 .map_err(|e| ToolError::ExecutionFailed {
753 name: "file_patch".into(),
754 message: format!("Failed to read '{}': {}", path_str, e),
755 })?;
756
757 if !content.contains(old_text) {
758 return Err(ToolError::ExecutionFailed {
759 name: "file_patch".into(),
760 message: format!(
761 "Could not find the specified text in '{}'. The old_text must match exactly.",
762 path_str
763 ),
764 });
765 }
766
767 let count = content.matches(old_text).count();
768 let new_content = content.replacen(old_text, new_text, 1);
769
770 tokio::fs::write(&path, &new_content)
771 .await
772 .map_err(|e| ToolError::ExecutionFailed {
773 name: "file_patch".into(),
774 message: format!("Failed to write '{}': {}", path_str, e),
775 })?;
776
777 let mut output = ToolOutput::text(format!(
778 "Patched '{}' ({} occurrence{} found, replaced first)",
779 path_str,
780 count,
781 if count == 1 { "" } else { "s" }
782 ));
783 output.artifacts.push(Artifact::FileModified {
784 path: PathBuf::from(path_str),
785 diff: format!(
786 "- {}\n+ {}",
787 old_text.lines().next().unwrap_or(""),
788 new_text.lines().next().unwrap_or("")
789 ),
790 });
791
792 Ok(output)
793 }
794
795 fn risk_level(&self) -> RiskLevel {
796 RiskLevel::Write
797 }
798}
799
800#[cfg(test)]
801mod tests {
802 use super::*;
803 use tempfile::TempDir;
804
805 fn setup_workspace() -> TempDir {
806 let dir = TempDir::new().unwrap();
807 std::fs::write(
809 dir.path().join("hello.txt"),
810 "Hello, World!\nLine 2\nLine 3\n",
811 )
812 .unwrap();
813 std::fs::create_dir_all(dir.path().join("src")).unwrap();
814 std::fs::write(
815 dir.path().join("src/main.rs"),
816 "fn main() {\n println!(\"Hello\");\n}\n",
817 )
818 .unwrap();
819 std::fs::write(
820 dir.path().join("src/lib.rs"),
821 "pub fn add(a: i32, b: i32) -> i32 {\n a + b\n}\n",
822 )
823 .unwrap();
824 dir
825 }
826
827 #[tokio::test]
830 async fn test_file_read_basic() {
831 let dir = setup_workspace();
832 let tool = FileReadTool::new(dir.path().to_path_buf());
833
834 let result = tool
835 .execute(serde_json::json!({"path": "hello.txt"}))
836 .await
837 .unwrap();
838 assert!(result.content.contains("Hello, World!"));
839 assert!(result.content.contains("1 |")); }
841
842 #[tokio::test]
843 async fn test_file_read_with_line_range() {
844 let dir = setup_workspace();
845 let tool = FileReadTool::new(dir.path().to_path_buf());
846
847 let result = tool
848 .execute(serde_json::json!({"path": "hello.txt", "start_line": 2, "end_line": 3}))
849 .await
850 .unwrap();
851 assert!(result.content.contains("Line 2"));
852 assert!(result.content.contains("Line 3"));
853 assert!(!result.content.contains("Hello, World!"));
854 }
855
856 #[tokio::test]
857 async fn test_file_read_missing_file() {
858 let dir = setup_workspace();
859 let tool = FileReadTool::new(dir.path().to_path_buf());
860
861 let result = tool
862 .execute(serde_json::json!({"path": "nonexistent.txt"}))
863 .await;
864 assert!(result.is_err());
865 }
866
867 #[tokio::test]
868 async fn test_file_read_missing_path_param() {
869 let dir = setup_workspace();
870 let tool = FileReadTool::new(dir.path().to_path_buf());
871
872 let result = tool.execute(serde_json::json!({})).await;
873 assert!(result.is_err());
874 match result.unwrap_err() {
875 ToolError::InvalidArguments { name, .. } => assert_eq!(name, "file_read"),
876 e => panic!("Expected InvalidArguments, got: {:?}", e),
877 }
878 }
879
880 #[test]
881 fn test_file_read_properties() {
882 let tool = FileReadTool::new(PathBuf::from("/tmp"));
883 assert_eq!(tool.name(), "file_read");
884 assert_eq!(tool.risk_level(), RiskLevel::ReadOnly);
885 assert!(tool.description().contains("Read"));
886 }
887
888 #[tokio::test]
891 async fn test_file_list_basic() {
892 let dir = setup_workspace();
893 let tool = FileListTool::new(dir.path().to_path_buf());
894
895 let result = tool.execute(serde_json::json!({})).await.unwrap();
896 assert!(result.content.contains("hello.txt"));
897 assert!(result.content.contains("src/"));
898 }
899
900 #[tokio::test]
901 async fn test_file_list_subdirectory() {
902 let dir = setup_workspace();
903 let tool = FileListTool::new(dir.path().to_path_buf());
904
905 let result = tool
906 .execute(serde_json::json!({"path": "src"}))
907 .await
908 .unwrap();
909 assert!(result.content.contains("main.rs"));
910 assert!(result.content.contains("lib.rs"));
911 }
912
913 #[tokio::test]
914 async fn test_file_list_recursive() {
915 let dir = setup_workspace();
916 let tool = FileListTool::new(dir.path().to_path_buf());
917
918 let result = tool
919 .execute(serde_json::json!({"path": ".", "recursive": true}))
920 .await
921 .unwrap();
922 assert!(result.content.contains("src/main.rs") || result.content.contains("src\\main.rs"));
923 }
924
925 #[tokio::test]
926 async fn test_file_list_nonexistent_dir() {
927 let dir = setup_workspace();
928 let tool = FileListTool::new(dir.path().to_path_buf());
929
930 let result = tool
931 .execute(serde_json::json!({"path": "nonexistent"}))
932 .await;
933 assert!(result.is_err());
934 }
935
936 #[test]
937 fn test_file_list_properties() {
938 let tool = FileListTool::new(PathBuf::from("/tmp"));
939 assert_eq!(tool.name(), "file_list");
940 assert_eq!(tool.risk_level(), RiskLevel::ReadOnly);
941 }
942
943 #[tokio::test]
946 async fn test_file_search_basic() {
947 let dir = setup_workspace();
948 let tool = FileSearchTool::new(dir.path().to_path_buf());
949
950 let result = tool
951 .execute(serde_json::json!({"pattern": "Hello"}))
952 .await
953 .unwrap();
954 assert!(result.content.contains("match"));
955 assert!(result.content.contains("Hello"));
956 }
957
958 #[tokio::test]
959 async fn test_file_search_with_file_pattern() {
960 let dir = setup_workspace();
961 let tool = FileSearchTool::new(dir.path().to_path_buf());
962
963 let result = tool
964 .execute(serde_json::json!({"pattern": "fn", "file_pattern": "*.rs"}))
965 .await
966 .unwrap();
967 assert!(result.content.contains("fn main"));
968 }
969
970 #[tokio::test]
971 async fn test_file_search_no_results() {
972 let dir = setup_workspace();
973 let tool = FileSearchTool::new(dir.path().to_path_buf());
974
975 let result = tool
976 .execute(serde_json::json!({"pattern": "xyznonexistent123"}))
977 .await
978 .unwrap();
979 assert!(result.content.contains("No matches found"));
980 }
981
982 #[tokio::test]
983 async fn test_file_search_case_insensitive() {
984 let dir = setup_workspace();
985 let tool = FileSearchTool::new(dir.path().to_path_buf());
986
987 let result = tool
988 .execute(serde_json::json!({"pattern": "hello"}))
989 .await
990 .unwrap();
991 assert!(result.content.contains("match"));
993 }
994
995 #[test]
996 fn test_file_search_properties() {
997 let tool = FileSearchTool::new(PathBuf::from("/tmp"));
998 assert_eq!(tool.name(), "file_search");
999 assert_eq!(tool.risk_level(), RiskLevel::ReadOnly);
1000 }
1001
1002 #[test]
1003 fn test_simple_glob_match() {
1004 assert!(FileSearchTool::simple_glob_match("*.rs", "main.rs"));
1005 assert!(FileSearchTool::simple_glob_match("*.rs", "lib.rs"));
1006 assert!(!FileSearchTool::simple_glob_match("*.rs", "main.py"));
1007 assert!(FileSearchTool::simple_glob_match("test*", "test_file.rs"));
1008 assert!(FileSearchTool::simple_glob_match("main.rs", "main.rs"));
1009 assert!(!FileSearchTool::simple_glob_match("main.rs", "lib.rs"));
1010 }
1011
1012 #[test]
1013 fn test_strip_regex_escapes() {
1014 assert_eq!(FileSearchTool::strip_regex_escapes(r"#\[test\]"), "#[test]");
1016 assert_eq!(FileSearchTool::strip_regex_escapes(r"foo\.bar"), "foo.bar");
1017 assert_eq!(FileSearchTool::strip_regex_escapes(r"a\+b\*c"), "a+b*c");
1018 assert_eq!(FileSearchTool::strip_regex_escapes(r"\(group\)"), "(group)");
1019 assert_eq!(FileSearchTool::strip_regex_escapes(r"\{brace\}"), "{brace}");
1020 assert_eq!(FileSearchTool::strip_regex_escapes(r"a\\b"), "a\\b");
1021 assert_eq!(FileSearchTool::strip_regex_escapes("hello"), "hello");
1023 assert_eq!(FileSearchTool::strip_regex_escapes("#[test]"), "#[test]");
1024 assert_eq!(FileSearchTool::strip_regex_escapes("abc\\"), "abc\\");
1026 assert_eq!(FileSearchTool::strip_regex_escapes(r"\n"), "\\n");
1028 }
1029
1030 #[tokio::test]
1031 async fn test_file_search_with_file_path() {
1032 let dir = setup_workspace();
1033 std::fs::write(
1035 dir.path().join("src/tests.rs"),
1036 "#[test]\nfn test_one() {}\n\n#[test]\nfn test_two() {}\n",
1037 )
1038 .unwrap();
1039 let tool = FileSearchTool::new(dir.path().to_path_buf());
1040
1041 let result = tool
1043 .execute(serde_json::json!({"pattern": "#[test]", "path": "src/tests.rs"}))
1044 .await
1045 .unwrap();
1046 assert!(
1047 result.content.contains("Found 2"),
1048 "Should find 2 matches when path is a file: {}",
1049 result.content
1050 );
1051 }
1052
1053 #[tokio::test]
1054 async fn test_file_search_regex_escaped_pattern() {
1055 let dir = setup_workspace();
1056 std::fs::write(
1057 dir.path().join("src/tests.rs"),
1058 "#[test]\nfn test_one() {}\n\n#[test]\nfn test_two() {}\n",
1059 )
1060 .unwrap();
1061 let tool = FileSearchTool::new(dir.path().to_path_buf());
1062
1063 let result = tool
1065 .execute(serde_json::json!({"pattern": r"#\[test\]", "path": "src"}))
1066 .await
1067 .unwrap();
1068 assert!(
1069 result.content.contains("Found 2"),
1070 "Regex-escaped pattern should match after stripping: {}",
1071 result.content
1072 );
1073 }
1074
1075 #[tokio::test]
1078 async fn test_file_write_create_new() {
1079 let dir = setup_workspace();
1080 let tool = FileWriteTool::new(dir.path().to_path_buf());
1081
1082 let result = tool
1083 .execute(serde_json::json!({
1084 "path": "new_file.txt",
1085 "content": "New content!"
1086 }))
1087 .await
1088 .unwrap();
1089
1090 assert!(result.content.contains("Created"));
1091 assert!(result.artifacts.len() == 1);
1092 assert!(matches!(&result.artifacts[0], Artifact::FileCreated { .. }));
1093
1094 let content = std::fs::read_to_string(dir.path().join("new_file.txt")).unwrap();
1095 assert_eq!(content, "New content!");
1096 }
1097
1098 #[tokio::test]
1099 async fn test_file_write_overwrite_existing() {
1100 let dir = setup_workspace();
1101 let tool = FileWriteTool::new(dir.path().to_path_buf());
1102
1103 let result = tool
1104 .execute(serde_json::json!({
1105 "path": "hello.txt",
1106 "content": "Overwritten!"
1107 }))
1108 .await
1109 .unwrap();
1110
1111 assert!(result.content.contains("Updated"));
1112
1113 let content = std::fs::read_to_string(dir.path().join("hello.txt")).unwrap();
1114 assert_eq!(content, "Overwritten!");
1115 }
1116
1117 #[tokio::test]
1118 async fn test_file_write_creates_directories() {
1119 let dir = setup_workspace();
1120 let tool = FileWriteTool::new(dir.path().to_path_buf());
1121
1122 let result = tool
1123 .execute(serde_json::json!({
1124 "path": "deep/nested/dir/file.txt",
1125 "content": "Deep content"
1126 }))
1127 .await
1128 .unwrap();
1129
1130 assert!(result.content.contains("Created"));
1131 assert!(dir.path().join("deep/nested/dir/file.txt").exists());
1132 }
1133
1134 #[tokio::test]
1135 async fn test_file_write_missing_params() {
1136 let dir = setup_workspace();
1137 let tool = FileWriteTool::new(dir.path().to_path_buf());
1138
1139 let result = tool.execute(serde_json::json!({"path": "test.txt"})).await;
1140 assert!(result.is_err());
1141 }
1142
1143 #[test]
1144 fn test_file_write_properties() {
1145 let tool = FileWriteTool::new(PathBuf::from("/tmp"));
1146 assert_eq!(tool.name(), "file_write");
1147 assert_eq!(tool.risk_level(), RiskLevel::Write);
1148 }
1149
1150 #[tokio::test]
1153 async fn test_file_patch_basic() {
1154 let dir = setup_workspace();
1155 let tool = FilePatchTool::new(dir.path().to_path_buf());
1156
1157 let result = tool
1158 .execute(serde_json::json!({
1159 "path": "hello.txt",
1160 "old_text": "Hello, World!",
1161 "new_text": "Hi, World!"
1162 }))
1163 .await
1164 .unwrap();
1165
1166 assert!(result.content.contains("Patched"));
1167
1168 let content = std::fs::read_to_string(dir.path().join("hello.txt")).unwrap();
1169 assert!(content.contains("Hi, World!"));
1170 assert!(!content.contains("Hello, World!"));
1171 }
1172
1173 #[tokio::test]
1174 async fn test_file_patch_text_not_found() {
1175 let dir = setup_workspace();
1176 let tool = FilePatchTool::new(dir.path().to_path_buf());
1177
1178 let result = tool
1179 .execute(serde_json::json!({
1180 "path": "hello.txt",
1181 "old_text": "nonexistent text",
1182 "new_text": "replacement"
1183 }))
1184 .await;
1185
1186 assert!(result.is_err());
1187 match result.unwrap_err() {
1188 ToolError::ExecutionFailed { name, message } => {
1189 assert_eq!(name, "file_patch");
1190 assert!(message.contains("Could not find"));
1191 }
1192 e => panic!("Expected ExecutionFailed, got: {:?}", e),
1193 }
1194 }
1195
1196 #[tokio::test]
1197 async fn test_file_patch_missing_file() {
1198 let dir = setup_workspace();
1199 let tool = FilePatchTool::new(dir.path().to_path_buf());
1200
1201 let result = tool
1202 .execute(serde_json::json!({
1203 "path": "nonexistent.txt",
1204 "old_text": "old",
1205 "new_text": "new"
1206 }))
1207 .await;
1208
1209 assert!(result.is_err());
1210 }
1211
1212 #[test]
1213 fn test_file_patch_properties() {
1214 let tool = FilePatchTool::new(PathBuf::from("/tmp"));
1215 assert_eq!(tool.name(), "file_patch");
1216 assert_eq!(tool.risk_level(), RiskLevel::Write);
1217 }
1218
1219 #[tokio::test]
1222 async fn test_file_write_rejects_path_traversal() {
1223 let dir = setup_workspace();
1224 let tool = FileWriteTool::new(dir.path().to_path_buf());
1225
1226 let result = tool
1227 .execute(serde_json::json!({
1228 "path": "../../escape.txt",
1229 "content": "escaped!"
1230 }))
1231 .await;
1232 assert!(result.is_err());
1233 match result.unwrap_err() {
1234 ToolError::PermissionDenied { name, .. } => assert_eq!(name, "file_write"),
1235 e => panic!("Expected PermissionDenied, got: {:?}", e),
1236 }
1237 }
1238
1239 #[tokio::test]
1240 async fn test_file_write_rejects_absolute_path_outside_workspace() {
1241 let dir = setup_workspace();
1242 let tool = FileWriteTool::new(dir.path().to_path_buf());
1243
1244 let result = tool
1245 .execute(serde_json::json!({
1246 "path": "/tmp/escape.txt",
1247 "content": "escaped!"
1248 }))
1249 .await;
1250 assert!(result.is_err());
1251 match result.unwrap_err() {
1252 ToolError::PermissionDenied { name, .. } => assert_eq!(name, "file_write"),
1253 e => panic!("Expected PermissionDenied, got: {:?}", e),
1254 }
1255 }
1256
1257 #[tokio::test]
1258 async fn test_file_write_rejects_oversized_content() {
1259 let dir = setup_workspace();
1260 let tool = FileWriteTool::new(dir.path().to_path_buf());
1261
1262 let oversized = "x".repeat(MAX_WRITE_BYTES + 1);
1264 let result = tool
1265 .execute(serde_json::json!({
1266 "path": "big.txt",
1267 "content": oversized
1268 }))
1269 .await;
1270 assert!(result.is_err());
1271 match result.unwrap_err() {
1272 ToolError::InvalidArguments { name, reason } => {
1273 assert_eq!(name, "file_write");
1274 assert!(reason.contains("exceeds maximum"));
1275 }
1276 e => panic!("Expected InvalidArguments, got: {:?}", e),
1277 }
1278 }
1279
1280 #[tokio::test]
1281 async fn test_file_write_accepts_content_at_limit() {
1282 let dir = setup_workspace();
1283 let tool = FileWriteTool::new(dir.path().to_path_buf());
1284
1285 let at_limit = "x".repeat(MAX_WRITE_BYTES);
1287 let result = tool
1288 .execute(serde_json::json!({
1289 "path": "exact.txt",
1290 "content": at_limit
1291 }))
1292 .await;
1293 assert!(result.is_ok());
1294 }
1295
1296 #[tokio::test]
1297 async fn test_file_patch_rejects_path_traversal() {
1298 let dir = setup_workspace();
1299 let tool = FilePatchTool::new(dir.path().to_path_buf());
1300
1301 let result = tool
1302 .execute(serde_json::json!({
1303 "path": "../../escape.txt",
1304 "old_text": "old",
1305 "new_text": "new"
1306 }))
1307 .await;
1308 assert!(result.is_err());
1309 match result.unwrap_err() {
1310 ToolError::PermissionDenied { name, .. } => assert_eq!(name, "file_patch"),
1311 e => panic!("Expected PermissionDenied, got: {:?}", e),
1312 }
1313 }
1314
1315 #[tokio::test]
1316 async fn test_file_read_rejects_path_traversal() {
1317 let dir = setup_workspace();
1318 let tool = FileReadTool::new(dir.path().to_path_buf());
1319
1320 let result = tool
1322 .execute(serde_json::json!({"path": "../../etc/passwd"}))
1323 .await;
1324 assert!(result.is_err());
1325 }
1326}