1use crate::agent::ide::IdeClient;
12use crate::agent::ui::confirmation::ConfirmationResult;
13use crate::agent::ui::diff::{confirm_file_write, confirm_file_write_with_ide};
14use rig::completion::ToolDefinition;
15use rig::tool::Tool;
16use serde::{Deserialize, Serialize};
17use serde_json::json;
18use std::collections::HashSet;
19use std::fs;
20use std::path::PathBuf;
21use std::sync::Mutex;
22
23#[derive(Debug, Deserialize)]
28pub struct ReadFileArgs {
29 pub path: String,
30 pub start_line: Option<u64>,
31 pub end_line: Option<u64>,
32}
33
34#[derive(Debug, thiserror::Error)]
35#[error("Read file error: {0}")]
36pub struct ReadFileError(String);
37
38#[derive(Debug, Clone, Serialize, Deserialize)]
39pub struct ReadFileTool {
40 project_path: PathBuf,
41}
42
43impl ReadFileTool {
44 pub fn new(project_path: PathBuf) -> Self {
45 Self { project_path }
46 }
47
48 fn validate_path(&self, requested: &PathBuf) -> Result<PathBuf, ReadFileError> {
49 let canonical_project = self.project_path.canonicalize()
50 .map_err(|e| ReadFileError(format!("Invalid project path: {}", e)))?;
51
52 let target = if requested.is_absolute() {
53 requested.clone()
54 } else {
55 self.project_path.join(requested)
56 };
57
58 let canonical_target = target.canonicalize()
59 .map_err(|e| ReadFileError(format!("File not found: {}", e)))?;
60
61 if !canonical_target.starts_with(&canonical_project) {
62 return Err(ReadFileError("Access denied: path is outside project directory".to_string()));
63 }
64
65 Ok(canonical_target)
66 }
67}
68
69impl Tool for ReadFileTool {
70 const NAME: &'static str = "read_file";
71
72 type Error = ReadFileError;
73 type Args = ReadFileArgs;
74 type Output = String;
75
76 async fn definition(&self, _prompt: String) -> ToolDefinition {
77 ToolDefinition {
78 name: Self::NAME.to_string(),
79 description: "Read the contents of a file in the project. Use this to examine source code, configuration files, or any text file.".to_string(),
80 parameters: json!({
81 "type": "object",
82 "properties": {
83 "path": {
84 "type": "string",
85 "description": "Path to the file to read (relative to project root)"
86 },
87 "start_line": {
88 "type": "integer",
89 "description": "Optional starting line number (1-based)"
90 },
91 "end_line": {
92 "type": "integer",
93 "description": "Optional ending line number (1-based, inclusive)"
94 }
95 },
96 "required": ["path"]
97 }),
98 }
99 }
100
101 async fn call(&self, args: Self::Args) -> Result<Self::Output, Self::Error> {
102 let requested_path = PathBuf::from(&args.path);
103 let file_path = self.validate_path(&requested_path)?;
104
105 let metadata = fs::metadata(&file_path)
106 .map_err(|e| ReadFileError(format!("Cannot read file: {}", e)))?;
107
108 const MAX_SIZE: u64 = 1024 * 1024;
109 if metadata.len() > MAX_SIZE {
110 return Ok(json!({
111 "error": format!("File too large ({} bytes). Maximum size is {} bytes.", metadata.len(), MAX_SIZE)
112 }).to_string());
113 }
114
115 let content = fs::read_to_string(&file_path)
116 .map_err(|e| ReadFileError(format!("Failed to read file: {}", e)))?;
117
118 let output = if let Some(start) = args.start_line {
119 let lines: Vec<&str> = content.lines().collect();
120 let start_idx = (start as usize).saturating_sub(1);
121 let end_idx = args.end_line.map(|e| (e as usize).min(lines.len())).unwrap_or(lines.len());
122
123 if start_idx >= lines.len() {
124 return Ok(json!({
125 "error": format!("Start line {} exceeds file length ({})", start, lines.len())
126 }).to_string());
127 }
128
129 let end_idx = end_idx.max(start_idx);
131
132 let selected: Vec<String> = lines[start_idx..end_idx]
133 .iter()
134 .enumerate()
135 .map(|(i, line)| format!("{:>4} | {}", start_idx + i + 1, line))
136 .collect();
137
138 json!({
139 "file": args.path,
140 "lines": format!("{}-{}", start, end_idx),
141 "total_lines": lines.len(),
142 "content": selected.join("\n")
143 })
144 } else {
145 json!({
146 "file": args.path,
147 "total_lines": content.lines().count(),
148 "content": content
149 })
150 };
151
152 serde_json::to_string_pretty(&output)
153 .map_err(|e| ReadFileError(format!("Failed to serialize: {}", e)))
154 }
155}
156
157#[derive(Debug, Deserialize)]
162pub struct ListDirectoryArgs {
163 pub path: Option<String>,
164 pub recursive: Option<bool>,
165}
166
167#[derive(Debug, thiserror::Error)]
168#[error("List directory error: {0}")]
169pub struct ListDirectoryError(String);
170
171#[derive(Debug, Clone, Serialize, Deserialize)]
172pub struct ListDirectoryTool {
173 project_path: PathBuf,
174}
175
176impl ListDirectoryTool {
177 pub fn new(project_path: PathBuf) -> Self {
178 Self { project_path }
179 }
180
181 fn validate_path(&self, requested: &PathBuf) -> Result<PathBuf, ListDirectoryError> {
182 let canonical_project = self.project_path.canonicalize()
183 .map_err(|e| ListDirectoryError(format!("Invalid project path: {}", e)))?;
184
185 let target = if requested.is_absolute() {
186 requested.clone()
187 } else {
188 self.project_path.join(requested)
189 };
190
191 let canonical_target = target.canonicalize()
192 .map_err(|e| ListDirectoryError(format!("Directory not found: {}", e)))?;
193
194 if !canonical_target.starts_with(&canonical_project) {
195 return Err(ListDirectoryError("Access denied: path is outside project directory".to_string()));
196 }
197
198 Ok(canonical_target)
199 }
200
201 fn list_entries(
202 &self,
203 base_path: &PathBuf,
204 current_path: &PathBuf,
205 recursive: bool,
206 depth: usize,
207 max_depth: usize,
208 entries: &mut Vec<serde_json::Value>,
209 ) -> Result<(), ListDirectoryError> {
210 let skip_dirs = ["node_modules", ".git", "target", "__pycache__", ".venv", "venv", "dist", "build"];
211
212 let dir_name = current_path.file_name().and_then(|n| n.to_str()).unwrap_or("");
213
214 if depth > 0 && skip_dirs.contains(&dir_name) {
215 return Ok(());
216 }
217
218 let read_dir = fs::read_dir(current_path)
219 .map_err(|e| ListDirectoryError(format!("Cannot read directory: {}", e)))?;
220
221 for entry in read_dir {
222 let entry = entry.map_err(|e| ListDirectoryError(format!("Error reading entry: {}", e)))?;
223 let path = entry.path();
224 let metadata = entry.metadata().ok();
225
226 let relative_path = path.strip_prefix(base_path).unwrap_or(&path).to_string_lossy().to_string();
227 let is_dir = metadata.as_ref().map(|m| m.is_dir()).unwrap_or(false);
228 let size = metadata.as_ref().map(|m| m.len()).unwrap_or(0);
229
230 entries.push(json!({
231 "name": entry.file_name().to_string_lossy(),
232 "path": relative_path,
233 "type": if is_dir { "directory" } else { "file" },
234 "size": if is_dir { None::<u64> } else { Some(size) }
235 }));
236
237 if recursive && is_dir && depth < max_depth {
238 self.list_entries(base_path, &path, recursive, depth + 1, max_depth, entries)?;
239 }
240 }
241
242 Ok(())
243 }
244}
245
246impl Tool for ListDirectoryTool {
247 const NAME: &'static str = "list_directory";
248
249 type Error = ListDirectoryError;
250 type Args = ListDirectoryArgs;
251 type Output = String;
252
253 async fn definition(&self, _prompt: String) -> ToolDefinition {
254 ToolDefinition {
255 name: Self::NAME.to_string(),
256 description: "List the contents of a directory in the project. Returns file and subdirectory names with their types and sizes.".to_string(),
257 parameters: json!({
258 "type": "object",
259 "properties": {
260 "path": {
261 "type": "string",
262 "description": "Path to the directory to list (relative to project root). Use '.' for root."
263 },
264 "recursive": {
265 "type": "boolean",
266 "description": "If true, list contents recursively (max depth 3). Default is false."
267 }
268 }
269 }),
270 }
271 }
272
273 async fn call(&self, args: Self::Args) -> Result<Self::Output, Self::Error> {
274 let path_str = args.path.as_deref().unwrap_or(".");
275
276 let requested_path = if path_str.is_empty() || path_str == "." {
277 self.project_path.clone()
278 } else {
279 PathBuf::from(path_str)
280 };
281
282 let dir_path = self.validate_path(&requested_path)?;
283 let recursive = args.recursive.unwrap_or(false);
284
285 let mut entries = Vec::new();
286 self.list_entries(&dir_path, &dir_path, recursive, 0, 3, &mut entries)?;
287
288 let result = json!({
289 "path": path_str,
290 "entries": entries,
291 "total_count": entries.len()
292 });
293
294 serde_json::to_string_pretty(&result)
295 .map_err(|e| ListDirectoryError(format!("Failed to serialize: {}", e)))
296 }
297}
298
299#[derive(Debug, Deserialize)]
304pub struct WriteFileArgs {
305 pub path: String,
307 pub content: String,
309 pub create_dirs: Option<bool>,
311}
312
313#[derive(Debug, thiserror::Error)]
314#[error("Write file error: {0}")]
315pub struct WriteFileError(String);
316
317#[derive(Debug)]
319pub struct AllowedFilePatterns {
320 patterns: Mutex<HashSet<String>>,
321}
322
323impl AllowedFilePatterns {
324 pub fn new() -> Self {
325 Self {
326 patterns: Mutex::new(HashSet::new()),
327 }
328 }
329
330 pub fn is_allowed(&self, filename: &str) -> bool {
332 let patterns = self.patterns.lock().unwrap();
333 patterns.contains(filename)
334 }
335
336 pub fn allow(&self, pattern: String) {
338 let mut patterns = self.patterns.lock().unwrap();
339 patterns.insert(pattern);
340 }
341}
342
343impl Default for AllowedFilePatterns {
344 fn default() -> Self {
345 Self::new()
346 }
347}
348
349#[derive(Debug, Clone)]
350pub struct WriteFileTool {
351 project_path: PathBuf,
352 require_confirmation: bool,
354 allowed_patterns: std::sync::Arc<AllowedFilePatterns>,
356 ide_client: Option<std::sync::Arc<tokio::sync::Mutex<IdeClient>>>,
358}
359
360impl WriteFileTool {
361 pub fn new(project_path: PathBuf) -> Self {
362 Self {
363 project_path,
364 require_confirmation: true,
365 allowed_patterns: std::sync::Arc::new(AllowedFilePatterns::new()),
366 ide_client: None,
367 }
368 }
369
370 pub fn with_allowed_patterns(
372 project_path: PathBuf,
373 allowed_patterns: std::sync::Arc<AllowedFilePatterns>,
374 ) -> Self {
375 Self {
376 project_path,
377 require_confirmation: true,
378 allowed_patterns,
379 ide_client: None,
380 }
381 }
382
383 pub fn with_ide_client(
385 mut self,
386 ide_client: std::sync::Arc<tokio::sync::Mutex<IdeClient>>,
387 ) -> Self {
388 self.ide_client = Some(ide_client);
389 self
390 }
391
392 pub fn without_confirmation(mut self) -> Self {
394 self.require_confirmation = false;
395 self
396 }
397
398 fn validate_path(&self, requested: &PathBuf) -> Result<PathBuf, WriteFileError> {
399 let canonical_project = self.project_path.canonicalize()
400 .map_err(|e| WriteFileError(format!("Invalid project path: {}", e)))?;
401
402 let target = if requested.is_absolute() {
403 requested.clone()
404 } else {
405 self.project_path.join(requested)
406 };
407
408 let parent = target.parent()
410 .ok_or_else(|| WriteFileError("Invalid path: no parent directory".to_string()))?;
411
412 let is_within_project = if parent.exists() {
414 let canonical_parent = parent.canonicalize()
415 .map_err(|e| WriteFileError(format!("Invalid parent path: {}", e)))?;
416 canonical_parent.starts_with(&canonical_project)
417 } else {
418 let normalized = self.project_path.join(requested);
420 !normalized.components().any(|c| c == std::path::Component::ParentDir)
421 };
422
423 if !is_within_project {
424 return Err(WriteFileError("Access denied: path is outside project directory".to_string()));
425 }
426
427 Ok(target)
428 }
429}
430
431impl Tool for WriteFileTool {
432 const NAME: &'static str = "write_file";
433
434 type Error = WriteFileError;
435 type Args = WriteFileArgs;
436 type Output = String;
437
438 async fn definition(&self, _prompt: String) -> ToolDefinition {
439 ToolDefinition {
440 name: Self::NAME.to_string(),
441 description: r#"Write content to a file in the project. Creates the file if it doesn't exist, or overwrites if it does.
442
443Use this tool to:
444- Generate Dockerfiles for applications
445- Create Terraform configuration files (.tf)
446- Write Helm chart templates and values
447- Create docker-compose.yml files
448- Generate CI/CD configuration files (.github/workflows, .gitlab-ci.yml)
449- Write Kubernetes manifests
450
451The tool will create parent directories automatically if they don't exist."#.to_string(),
452 parameters: json!({
453 "type": "object",
454 "properties": {
455 "path": {
456 "type": "string",
457 "description": "Path to the file to write (relative to project root). Example: 'Dockerfile', 'terraform/main.tf', 'helm/values.yaml'"
458 },
459 "content": {
460 "type": "string",
461 "description": "The complete content to write to the file"
462 },
463 "create_dirs": {
464 "type": "boolean",
465 "description": "If true (default), create parent directories if they don't exist"
466 }
467 },
468 "required": ["path", "content"]
469 }),
470 }
471 }
472
473 async fn call(&self, args: Self::Args) -> Result<Self::Output, Self::Error> {
474 let requested_path = PathBuf::from(&args.path);
475 let file_path = self.validate_path(&requested_path)?;
476
477 let old_content = if file_path.exists() {
479 fs::read_to_string(&file_path).ok()
480 } else {
481 None
482 };
483
484 let filename = std::path::Path::new(&args.path)
486 .file_name()
487 .map(|n| n.to_string_lossy().to_string())
488 .unwrap_or_else(|| args.path.clone());
489
490 let needs_confirmation = self.require_confirmation
492 && !self.allowed_patterns.is_allowed(&filename);
493
494 if needs_confirmation {
495 let ide_client_guard = if let Some(ref client) = self.ide_client {
497 Some(client.lock().await)
498 } else {
499 None
500 };
501 let ide_client_ref = ide_client_guard.as_deref();
502
503 let confirmation = confirm_file_write_with_ide(
505 &args.path,
506 old_content.as_deref(),
507 &args.content,
508 ide_client_ref,
509 )
510 .await;
511
512 match confirmation {
513 ConfirmationResult::Proceed => {
514 }
516 ConfirmationResult::ProceedAlways(pattern) => {
517 self.allowed_patterns.allow(pattern);
519 }
520 ConfirmationResult::Modify(feedback) => {
521 let result = json!({
523 "cancelled": true,
524 "reason": "User requested changes",
525 "user_feedback": feedback,
526 "original_path": args.path
527 });
528 return serde_json::to_string_pretty(&result)
529 .map_err(|e| WriteFileError(format!("Failed to serialize: {}", e)));
530 }
531 ConfirmationResult::Cancel => {
532 let result = json!({
534 "cancelled": true,
535 "reason": "User cancelled the operation",
536 "original_path": args.path
537 });
538 return serde_json::to_string_pretty(&result)
539 .map_err(|e| WriteFileError(format!("Failed to serialize: {}", e)));
540 }
541 }
542 }
543
544 let create_dirs = args.create_dirs.unwrap_or(true);
546 if create_dirs {
547 if let Some(parent) = file_path.parent() {
548 if !parent.exists() {
549 fs::create_dir_all(parent)
550 .map_err(|e| WriteFileError(format!("Failed to create directories: {}", e)))?;
551 }
552 }
553 }
554
555 let file_existed = file_path.exists();
557
558 fs::write(&file_path, &args.content)
560 .map_err(|e| WriteFileError(format!("Failed to write file: {}", e)))?;
561
562 let action = if file_existed { "Updated" } else { "Created" };
563 let lines = args.content.lines().count();
564
565 let result = json!({
566 "success": true,
567 "action": action,
568 "path": args.path,
569 "lines_written": lines,
570 "bytes_written": args.content.len()
571 });
572
573 serde_json::to_string_pretty(&result)
574 .map_err(|e| WriteFileError(format!("Failed to serialize: {}", e)))
575 }
576}
577
578#[derive(Debug, Deserialize)]
583pub struct FileToWrite {
584 pub path: String,
586 pub content: String,
588}
589
590#[derive(Debug, Deserialize)]
591pub struct WriteFilesArgs {
592 pub files: Vec<FileToWrite>,
594 pub create_dirs: Option<bool>,
596}
597
598#[derive(Debug, thiserror::Error)]
599#[error("Write files error: {0}")]
600pub struct WriteFilesError(String);
601
602#[derive(Debug, Clone)]
603pub struct WriteFilesTool {
604 project_path: PathBuf,
605 require_confirmation: bool,
607 allowed_patterns: std::sync::Arc<AllowedFilePatterns>,
609 ide_client: Option<std::sync::Arc<tokio::sync::Mutex<IdeClient>>>,
611}
612
613impl WriteFilesTool {
614 pub fn new(project_path: PathBuf) -> Self {
615 Self {
616 project_path,
617 require_confirmation: true,
618 allowed_patterns: std::sync::Arc::new(AllowedFilePatterns::new()),
619 ide_client: None,
620 }
621 }
622
623 pub fn with_allowed_patterns(
625 project_path: PathBuf,
626 allowed_patterns: std::sync::Arc<AllowedFilePatterns>,
627 ) -> Self {
628 Self {
629 project_path,
630 require_confirmation: true,
631 allowed_patterns,
632 ide_client: None,
633 }
634 }
635
636 pub fn without_confirmation(mut self) -> Self {
638 self.require_confirmation = false;
639 self
640 }
641
642 pub fn with_ide_client(mut self, ide_client: std::sync::Arc<tokio::sync::Mutex<IdeClient>>) -> Self {
644 self.ide_client = Some(ide_client);
645 self
646 }
647
648 fn validate_path(&self, requested: &PathBuf) -> Result<PathBuf, WriteFilesError> {
649 let canonical_project = self.project_path.canonicalize()
650 .map_err(|e| WriteFilesError(format!("Invalid project path: {}", e)))?;
651
652 let target = if requested.is_absolute() {
653 requested.clone()
654 } else {
655 self.project_path.join(requested)
656 };
657
658 let parent = target.parent()
659 .ok_or_else(|| WriteFilesError("Invalid path: no parent directory".to_string()))?;
660
661 let is_within_project = if parent.exists() {
662 let canonical_parent = parent.canonicalize()
663 .map_err(|e| WriteFilesError(format!("Invalid parent path: {}", e)))?;
664 canonical_parent.starts_with(&canonical_project)
665 } else {
666 let normalized = self.project_path.join(requested);
667 !normalized.components().any(|c| c == std::path::Component::ParentDir)
668 };
669
670 if !is_within_project {
671 return Err(WriteFilesError("Access denied: path is outside project directory".to_string()));
672 }
673
674 Ok(target)
675 }
676}
677
678impl Tool for WriteFilesTool {
679 const NAME: &'static str = "write_files";
680
681 type Error = WriteFilesError;
682 type Args = WriteFilesArgs;
683 type Output = String;
684
685 async fn definition(&self, _prompt: String) -> ToolDefinition {
686 ToolDefinition {
687 name: Self::NAME.to_string(),
688 description: r#"Write multiple files at once. Ideal for creating complete infrastructure configurations.
689
690Use this tool when you need to create multiple related files together:
691- Complete Terraform modules (main.tf, variables.tf, outputs.tf, providers.tf)
692- Full Helm charts (Chart.yaml, values.yaml, templates/*.yaml)
693- Kubernetes manifests (deployment.yaml, service.yaml, configmap.yaml)
694- Multi-file docker-compose setups
695
696All files are written atomically - if any file fails, previously written files in the batch remain."#.to_string(),
697 parameters: json!({
698 "type": "object",
699 "properties": {
700 "files": {
701 "type": "array",
702 "description": "List of files to write",
703 "items": {
704 "type": "object",
705 "properties": {
706 "path": {
707 "type": "string",
708 "description": "Path to the file (relative to project root)"
709 },
710 "content": {
711 "type": "string",
712 "description": "Content to write to the file"
713 }
714 },
715 "required": ["path", "content"]
716 }
717 },
718 "create_dirs": {
719 "type": "boolean",
720 "description": "If true (default), create parent directories if they don't exist"
721 }
722 },
723 "required": ["files"]
724 }),
725 }
726 }
727
728 async fn call(&self, args: Self::Args) -> Result<Self::Output, Self::Error> {
729 let create_dirs = args.create_dirs.unwrap_or(true);
730 let mut results = Vec::new();
731 let mut total_bytes = 0usize;
732 let mut total_lines = 0usize;
733 let mut skipped_files = Vec::new();
734
735 for file in &args.files {
736 let requested_path = PathBuf::from(&file.path);
737 let file_path = self.validate_path(&requested_path)?;
738
739 let old_content = if file_path.exists() {
741 fs::read_to_string(&file_path).ok()
742 } else {
743 None
744 };
745
746 let filename = std::path::Path::new(&file.path)
748 .file_name()
749 .map(|n| n.to_string_lossy().to_string())
750 .unwrap_or_else(|| file.path.clone());
751
752 let needs_confirmation = self.require_confirmation
754 && !self.allowed_patterns.is_allowed(&filename);
755
756 if needs_confirmation {
757 let confirmation = if let Some(ref client) = self.ide_client {
759 let guard = client.lock().await;
760 if guard.is_connected() {
761 confirm_file_write_with_ide(
762 &file.path,
763 old_content.as_deref(),
764 &file.content,
765 Some(&*guard),
766 ).await
767 } else {
768 drop(guard);
769 confirm_file_write(
770 &file.path,
771 old_content.as_deref(),
772 &file.content,
773 )
774 }
775 } else {
776 confirm_file_write(
777 &file.path,
778 old_content.as_deref(),
779 &file.content,
780 )
781 };
782
783 match confirmation {
784 ConfirmationResult::Proceed => {
785 }
787 ConfirmationResult::ProceedAlways(pattern) => {
788 self.allowed_patterns.allow(pattern);
789 }
790 ConfirmationResult::Modify(feedback) => {
791 skipped_files.push(json!({
792 "path": file.path,
793 "reason": "User requested changes",
794 "feedback": feedback
795 }));
796 continue;
797 }
798 ConfirmationResult::Cancel => {
799 skipped_files.push(json!({
800 "path": file.path,
801 "reason": "User cancelled"
802 }));
803 continue;
804 }
805 }
806 }
807
808 if create_dirs {
810 if let Some(parent) = file_path.parent() {
811 if !parent.exists() {
812 fs::create_dir_all(parent)
813 .map_err(|e| WriteFilesError(format!("Failed to create directories for {}: {}", file.path, e)))?;
814 }
815 }
816 }
817
818 let file_existed = file_path.exists();
819
820 fs::write(&file_path, &file.content)
821 .map_err(|e| WriteFilesError(format!("Failed to write {}: {}", file.path, e)))?;
822
823 let lines = file.content.lines().count();
824 total_bytes += file.content.len();
825 total_lines += lines;
826
827 results.push(json!({
828 "path": file.path,
829 "action": if file_existed { "updated" } else { "created" },
830 "lines": lines,
831 "bytes": file.content.len()
832 }));
833 }
834
835 let result = if skipped_files.is_empty() {
836 json!({
837 "success": true,
838 "files_written": results.len(),
839 "total_lines": total_lines,
840 "total_bytes": total_bytes,
841 "files": results
842 })
843 } else {
844 json!({
845 "success": results.len() > 0,
846 "files_written": results.len(),
847 "files_skipped": skipped_files.len(),
848 "total_lines": total_lines,
849 "total_bytes": total_bytes,
850 "files": results,
851 "skipped": skipped_files
852 })
853 };
854
855 serde_json::to_string_pretty(&result)
856 .map_err(|e| WriteFilesError(format!("Failed to serialize: {}", e)))
857 }
858}