1use crate::agent::ide::IdeClient;
12use crate::agent::ui::confirmation::ConfirmationResult;
13use crate::agent::ui::diff::{confirm_file_write, confirm_file_write_with_ide};
14use rig::completion::ToolDefinition;
15use rig::tool::Tool;
16use serde::{Deserialize, Serialize};
17use serde_json::json;
18use std::collections::HashSet;
19use std::fs;
20use std::path::PathBuf;
21use std::sync::Mutex;
22
23#[derive(Debug, Deserialize)]
28pub struct ReadFileArgs {
29 pub path: String,
30 pub start_line: Option<u64>,
31 pub end_line: Option<u64>,
32}
33
34#[derive(Debug, thiserror::Error)]
35#[error("Read file error: {0}")]
36pub struct ReadFileError(String);
37
38#[derive(Debug, Clone, Serialize, Deserialize)]
39pub struct ReadFileTool {
40 project_path: PathBuf,
41}
42
43impl ReadFileTool {
44 pub fn new(project_path: PathBuf) -> Self {
45 Self { project_path }
46 }
47
48 fn validate_path(&self, requested: &PathBuf) -> Result<PathBuf, ReadFileError> {
49 let canonical_project = self.project_path.canonicalize()
50 .map_err(|e| ReadFileError(format!("Invalid project path: {}", e)))?;
51
52 let target = if requested.is_absolute() {
53 requested.clone()
54 } else {
55 self.project_path.join(requested)
56 };
57
58 let canonical_target = target.canonicalize()
59 .map_err(|e| ReadFileError(format!("File not found: {}", e)))?;
60
61 if !canonical_target.starts_with(&canonical_project) {
62 return Err(ReadFileError("Access denied: path is outside project directory".to_string()));
63 }
64
65 Ok(canonical_target)
66 }
67}
68
69impl Tool for ReadFileTool {
70 const NAME: &'static str = "read_file";
71
72 type Error = ReadFileError;
73 type Args = ReadFileArgs;
74 type Output = String;
75
76 async fn definition(&self, _prompt: String) -> ToolDefinition {
77 ToolDefinition {
78 name: Self::NAME.to_string(),
79 description: "Read the contents of a file in the project. Use this to examine source code, configuration files, or any text file.".to_string(),
80 parameters: json!({
81 "type": "object",
82 "properties": {
83 "path": {
84 "type": "string",
85 "description": "Path to the file to read (relative to project root)"
86 },
87 "start_line": {
88 "type": "integer",
89 "description": "Optional starting line number (1-based)"
90 },
91 "end_line": {
92 "type": "integer",
93 "description": "Optional ending line number (1-based, inclusive)"
94 }
95 },
96 "required": ["path"]
97 }),
98 }
99 }
100
101 async fn call(&self, args: Self::Args) -> Result<Self::Output, Self::Error> {
102 let requested_path = PathBuf::from(&args.path);
103 let file_path = self.validate_path(&requested_path)?;
104
105 let metadata = fs::metadata(&file_path)
106 .map_err(|e| ReadFileError(format!("Cannot read file: {}", e)))?;
107
108 const MAX_SIZE: u64 = 1024 * 1024;
109 if metadata.len() > MAX_SIZE {
110 return Ok(json!({
111 "error": format!("File too large ({} bytes). Maximum size is {} bytes.", metadata.len(), MAX_SIZE)
112 }).to_string());
113 }
114
115 let content = fs::read_to_string(&file_path)
116 .map_err(|e| ReadFileError(format!("Failed to read file: {}", e)))?;
117
118 let output = if let Some(start) = args.start_line {
119 let lines: Vec<&str> = content.lines().collect();
120 let start_idx = (start as usize).saturating_sub(1);
121 let end_idx = args.end_line.map(|e| (e as usize).min(lines.len())).unwrap_or(lines.len());
122
123 if start_idx >= lines.len() {
124 return Ok(json!({
125 "error": format!("Start line {} exceeds file length ({})", start, lines.len())
126 }).to_string());
127 }
128
129 let end_idx = end_idx.max(start_idx);
131
132 let selected: Vec<String> = lines[start_idx..end_idx]
133 .iter()
134 .enumerate()
135 .map(|(i, line)| format!("{:>4} | {}", start_idx + i + 1, line))
136 .collect();
137
138 json!({
139 "file": args.path,
140 "lines": format!("{}-{}", start, end_idx),
141 "total_lines": lines.len(),
142 "content": selected.join("\n")
143 })
144 } else {
145 json!({
146 "file": args.path,
147 "total_lines": content.lines().count(),
148 "content": content
149 })
150 };
151
152 serde_json::to_string_pretty(&output)
153 .map_err(|e| ReadFileError(format!("Failed to serialize: {}", e)))
154 }
155}
156
157#[derive(Debug, Deserialize)]
162pub struct ListDirectoryArgs {
163 pub path: Option<String>,
164 pub recursive: Option<bool>,
165}
166
167#[derive(Debug, thiserror::Error)]
168#[error("List directory error: {0}")]
169pub struct ListDirectoryError(String);
170
171#[derive(Debug, Clone, Serialize, Deserialize)]
172pub struct ListDirectoryTool {
173 project_path: PathBuf,
174}
175
176impl ListDirectoryTool {
177 pub fn new(project_path: PathBuf) -> Self {
178 Self { project_path }
179 }
180
181 fn validate_path(&self, requested: &PathBuf) -> Result<PathBuf, ListDirectoryError> {
182 let canonical_project = self.project_path.canonicalize()
183 .map_err(|e| ListDirectoryError(format!("Invalid project path: {}", e)))?;
184
185 let target = if requested.is_absolute() {
186 requested.clone()
187 } else {
188 self.project_path.join(requested)
189 };
190
191 let canonical_target = target.canonicalize()
192 .map_err(|e| ListDirectoryError(format!("Directory not found: {}", e)))?;
193
194 if !canonical_target.starts_with(&canonical_project) {
195 return Err(ListDirectoryError("Access denied: path is outside project directory".to_string()));
196 }
197
198 Ok(canonical_target)
199 }
200
201 fn list_entries(
202 &self,
203 base_path: &PathBuf,
204 current_path: &PathBuf,
205 recursive: bool,
206 depth: usize,
207 max_depth: usize,
208 entries: &mut Vec<serde_json::Value>,
209 ) -> Result<(), ListDirectoryError> {
210 let skip_dirs = ["node_modules", ".git", "target", "__pycache__", ".venv", "venv", "dist", "build"];
211
212 let dir_name = current_path.file_name().and_then(|n| n.to_str()).unwrap_or("");
213
214 if depth > 0 && skip_dirs.contains(&dir_name) {
215 return Ok(());
216 }
217
218 let read_dir = fs::read_dir(current_path)
219 .map_err(|e| ListDirectoryError(format!("Cannot read directory: {}", e)))?;
220
221 for entry in read_dir {
222 let entry = entry.map_err(|e| ListDirectoryError(format!("Error reading entry: {}", e)))?;
223 let path = entry.path();
224 let metadata = entry.metadata().ok();
225
226 let relative_path = path.strip_prefix(base_path).unwrap_or(&path).to_string_lossy().to_string();
227 let is_dir = metadata.as_ref().map(|m| m.is_dir()).unwrap_or(false);
228 let size = metadata.as_ref().map(|m| m.len()).unwrap_or(0);
229
230 entries.push(json!({
231 "name": entry.file_name().to_string_lossy(),
232 "path": relative_path,
233 "type": if is_dir { "directory" } else { "file" },
234 "size": if is_dir { None::<u64> } else { Some(size) }
235 }));
236
237 if recursive && is_dir && depth < max_depth {
238 self.list_entries(base_path, &path, recursive, depth + 1, max_depth, entries)?;
239 }
240 }
241
242 Ok(())
243 }
244}
245
246impl Tool for ListDirectoryTool {
247 const NAME: &'static str = "list_directory";
248
249 type Error = ListDirectoryError;
250 type Args = ListDirectoryArgs;
251 type Output = String;
252
253 async fn definition(&self, _prompt: String) -> ToolDefinition {
254 ToolDefinition {
255 name: Self::NAME.to_string(),
256 description: "List the contents of a directory in the project. Returns file and subdirectory names with their types and sizes.".to_string(),
257 parameters: json!({
258 "type": "object",
259 "properties": {
260 "path": {
261 "type": "string",
262 "description": "Path to the directory to list (relative to project root). Use '.' for root."
263 },
264 "recursive": {
265 "type": "boolean",
266 "description": "If true, list contents recursively (max depth 3). Default is false."
267 }
268 }
269 }),
270 }
271 }
272
273 async fn call(&self, args: Self::Args) -> Result<Self::Output, Self::Error> {
274 let path_str = args.path.as_deref().unwrap_or(".");
275
276 let requested_path = if path_str.is_empty() || path_str == "." {
277 self.project_path.clone()
278 } else {
279 PathBuf::from(path_str)
280 };
281
282 let dir_path = self.validate_path(&requested_path)?;
283 let recursive = args.recursive.unwrap_or(false);
284
285 let mut entries = Vec::new();
286 self.list_entries(&dir_path, &dir_path, recursive, 0, 3, &mut entries)?;
287
288 let result = json!({
289 "path": path_str,
290 "entries": entries,
291 "total_count": entries.len()
292 });
293
294 serde_json::to_string_pretty(&result)
295 .map_err(|e| ListDirectoryError(format!("Failed to serialize: {}", e)))
296 }
297}
298
299#[derive(Debug, Deserialize)]
304pub struct WriteFileArgs {
305 pub path: String,
307 pub content: String,
309 pub create_dirs: Option<bool>,
311}
312
313#[derive(Debug, thiserror::Error)]
314#[error("Write file error: {0}")]
315pub struct WriteFileError(String);
316
317#[derive(Debug)]
319pub struct AllowedFilePatterns {
320 patterns: Mutex<HashSet<String>>,
321}
322
323impl AllowedFilePatterns {
324 pub fn new() -> Self {
325 Self {
326 patterns: Mutex::new(HashSet::new()),
327 }
328 }
329
330 pub fn is_allowed(&self, filename: &str) -> bool {
332 let patterns = self.patterns.lock().unwrap();
333 patterns.contains(filename)
334 }
335
336 pub fn allow(&self, pattern: String) {
338 let mut patterns = self.patterns.lock().unwrap();
339 patterns.insert(pattern);
340 }
341}
342
343impl Default for AllowedFilePatterns {
344 fn default() -> Self {
345 Self::new()
346 }
347}
348
349#[derive(Debug, Clone)]
350pub struct WriteFileTool {
351 project_path: PathBuf,
352 require_confirmation: bool,
354 allowed_patterns: std::sync::Arc<AllowedFilePatterns>,
356 ide_client: Option<std::sync::Arc<tokio::sync::Mutex<IdeClient>>>,
358}
359
360impl WriteFileTool {
361 pub fn new(project_path: PathBuf) -> Self {
362 Self {
363 project_path,
364 require_confirmation: true,
365 allowed_patterns: std::sync::Arc::new(AllowedFilePatterns::new()),
366 ide_client: None,
367 }
368 }
369
370 pub fn with_allowed_patterns(
372 project_path: PathBuf,
373 allowed_patterns: std::sync::Arc<AllowedFilePatterns>,
374 ) -> Self {
375 Self {
376 project_path,
377 require_confirmation: true,
378 allowed_patterns,
379 ide_client: None,
380 }
381 }
382
383 pub fn with_ide_client(
385 mut self,
386 ide_client: std::sync::Arc<tokio::sync::Mutex<IdeClient>>,
387 ) -> Self {
388 self.ide_client = Some(ide_client);
389 self
390 }
391
392 pub fn without_confirmation(mut self) -> Self {
394 self.require_confirmation = false;
395 self
396 }
397
398 fn validate_path(&self, requested: &PathBuf) -> Result<PathBuf, WriteFileError> {
399 let canonical_project = self.project_path.canonicalize()
400 .map_err(|e| WriteFileError(format!("Invalid project path: {}", e)))?;
401
402 let target = if requested.is_absolute() {
403 requested.clone()
404 } else {
405 self.project_path.join(requested)
406 };
407
408 let parent = target.parent()
410 .ok_or_else(|| WriteFileError("Invalid path: no parent directory".to_string()))?;
411
412 let is_within_project = if parent.exists() {
414 let canonical_parent = parent.canonicalize()
415 .map_err(|e| WriteFileError(format!("Invalid parent path: {}", e)))?;
416 canonical_parent.starts_with(&canonical_project)
417 } else {
418 let normalized = self.project_path.join(requested);
420 !normalized.components().any(|c| c == std::path::Component::ParentDir)
421 };
422
423 if !is_within_project {
424 return Err(WriteFileError("Access denied: path is outside project directory".to_string()));
425 }
426
427 Ok(target)
428 }
429}
430
431impl Tool for WriteFileTool {
432 const NAME: &'static str = "write_file";
433
434 type Error = WriteFileError;
435 type Args = WriteFileArgs;
436 type Output = String;
437
438 async fn definition(&self, _prompt: String) -> ToolDefinition {
439 ToolDefinition {
440 name: Self::NAME.to_string(),
441 description: r#"Write content to a file in the project. Creates the file if it doesn't exist, or overwrites if it does.
442
443**IMPORTANT**: Use this tool IMMEDIATELY when the user asks you to:
444- Create ANY file (Dockerfile, .tf, .yaml, .md, .json, etc.)
445- Generate configuration files
446- Write documentation to a specific location
447- "Put content in" or "under" a directory
448- Save analysis results or findings
449- Document anything in a file
450
451**DO NOT** just describe what you would write - actually call this tool with the content.
452
453Use cases:
454- Generate Dockerfiles for applications
455- Create Terraform configuration files (.tf)
456- Write Helm chart templates and values
457- Create docker-compose.yml files
458- Generate CI/CD configuration files (.github/workflows, .gitlab-ci.yml)
459- Write Kubernetes manifests
460- Save analysis findings to markdown files
461- Create any text file the user requests
462
463The tool will create parent directories automatically if they don't exist."#.to_string(),
464 parameters: json!({
465 "type": "object",
466 "properties": {
467 "path": {
468 "type": "string",
469 "description": "Path to the file to write (relative to project root). Example: 'Dockerfile', 'terraform/main.tf', 'helm/values.yaml'"
470 },
471 "content": {
472 "type": "string",
473 "description": "The complete content to write to the file"
474 },
475 "create_dirs": {
476 "type": "boolean",
477 "description": "If true (default), create parent directories if they don't exist"
478 }
479 },
480 "required": ["path", "content"]
481 }),
482 }
483 }
484
485 async fn call(&self, args: Self::Args) -> Result<Self::Output, Self::Error> {
486 let requested_path = PathBuf::from(&args.path);
487 let file_path = self.validate_path(&requested_path)?;
488
489 let old_content = if file_path.exists() {
491 fs::read_to_string(&file_path).ok()
492 } else {
493 None
494 };
495
496 let filename = std::path::Path::new(&args.path)
498 .file_name()
499 .map(|n| n.to_string_lossy().to_string())
500 .unwrap_or_else(|| args.path.clone());
501
502 let needs_confirmation = self.require_confirmation
504 && !self.allowed_patterns.is_allowed(&filename);
505
506 if needs_confirmation {
507 let ide_client_guard = if let Some(ref client) = self.ide_client {
509 Some(client.lock().await)
510 } else {
511 None
512 };
513 let ide_client_ref = ide_client_guard.as_deref();
514
515 let confirmation = confirm_file_write_with_ide(
517 &args.path,
518 old_content.as_deref(),
519 &args.content,
520 ide_client_ref,
521 )
522 .await;
523
524 match confirmation {
525 ConfirmationResult::Proceed => {
526 }
528 ConfirmationResult::ProceedAlways(pattern) => {
529 self.allowed_patterns.allow(pattern);
531 }
532 ConfirmationResult::Modify(feedback) => {
533 let result = json!({
535 "cancelled": true,
536 "reason": "User requested changes",
537 "user_feedback": feedback,
538 "original_path": args.path
539 });
540 return serde_json::to_string_pretty(&result)
541 .map_err(|e| WriteFileError(format!("Failed to serialize: {}", e)));
542 }
543 ConfirmationResult::Cancel => {
544 let result = json!({
546 "cancelled": true,
547 "reason": "User cancelled the operation",
548 "original_path": args.path
549 });
550 return serde_json::to_string_pretty(&result)
551 .map_err(|e| WriteFileError(format!("Failed to serialize: {}", e)));
552 }
553 }
554 }
555
556 let create_dirs = args.create_dirs.unwrap_or(true);
558 if create_dirs {
559 if let Some(parent) = file_path.parent() {
560 if !parent.exists() {
561 fs::create_dir_all(parent)
562 .map_err(|e| WriteFileError(format!("Failed to create directories: {}", e)))?;
563 }
564 }
565 }
566
567 let file_existed = file_path.exists();
569
570 fs::write(&file_path, &args.content)
572 .map_err(|e| WriteFileError(format!("Failed to write file: {}", e)))?;
573
574 let action = if file_existed { "Updated" } else { "Created" };
575 let lines = args.content.lines().count();
576
577 let result = json!({
578 "success": true,
579 "action": action,
580 "path": args.path,
581 "lines_written": lines,
582 "bytes_written": args.content.len()
583 });
584
585 serde_json::to_string_pretty(&result)
586 .map_err(|e| WriteFileError(format!("Failed to serialize: {}", e)))
587 }
588}
589
590#[derive(Debug, Deserialize)]
595pub struct FileToWrite {
596 pub path: String,
598 pub content: String,
600}
601
602#[derive(Debug, Deserialize)]
603pub struct WriteFilesArgs {
604 pub files: Vec<FileToWrite>,
606 pub create_dirs: Option<bool>,
608}
609
610#[derive(Debug, thiserror::Error)]
611#[error("Write files error: {0}")]
612pub struct WriteFilesError(String);
613
614#[derive(Debug, Clone)]
615pub struct WriteFilesTool {
616 project_path: PathBuf,
617 require_confirmation: bool,
619 allowed_patterns: std::sync::Arc<AllowedFilePatterns>,
621 ide_client: Option<std::sync::Arc<tokio::sync::Mutex<IdeClient>>>,
623}
624
625impl WriteFilesTool {
626 pub fn new(project_path: PathBuf) -> Self {
627 Self {
628 project_path,
629 require_confirmation: true,
630 allowed_patterns: std::sync::Arc::new(AllowedFilePatterns::new()),
631 ide_client: None,
632 }
633 }
634
635 pub fn with_allowed_patterns(
637 project_path: PathBuf,
638 allowed_patterns: std::sync::Arc<AllowedFilePatterns>,
639 ) -> Self {
640 Self {
641 project_path,
642 require_confirmation: true,
643 allowed_patterns,
644 ide_client: None,
645 }
646 }
647
648 pub fn without_confirmation(mut self) -> Self {
650 self.require_confirmation = false;
651 self
652 }
653
654 pub fn with_ide_client(mut self, ide_client: std::sync::Arc<tokio::sync::Mutex<IdeClient>>) -> Self {
656 self.ide_client = Some(ide_client);
657 self
658 }
659
660 fn validate_path(&self, requested: &PathBuf) -> Result<PathBuf, WriteFilesError> {
661 let canonical_project = self.project_path.canonicalize()
662 .map_err(|e| WriteFilesError(format!("Invalid project path: {}", e)))?;
663
664 let target = if requested.is_absolute() {
665 requested.clone()
666 } else {
667 self.project_path.join(requested)
668 };
669
670 let parent = target.parent()
671 .ok_or_else(|| WriteFilesError("Invalid path: no parent directory".to_string()))?;
672
673 let is_within_project = if parent.exists() {
674 let canonical_parent = parent.canonicalize()
675 .map_err(|e| WriteFilesError(format!("Invalid parent path: {}", e)))?;
676 canonical_parent.starts_with(&canonical_project)
677 } else {
678 let normalized = self.project_path.join(requested);
679 !normalized.components().any(|c| c == std::path::Component::ParentDir)
680 };
681
682 if !is_within_project {
683 return Err(WriteFilesError("Access denied: path is outside project directory".to_string()));
684 }
685
686 Ok(target)
687 }
688}
689
690impl Tool for WriteFilesTool {
691 const NAME: &'static str = "write_files";
692
693 type Error = WriteFilesError;
694 type Args = WriteFilesArgs;
695 type Output = String;
696
697 async fn definition(&self, _prompt: String) -> ToolDefinition {
698 ToolDefinition {
699 name: Self::NAME.to_string(),
700 description: r#"Write multiple files at once. Ideal for creating complete infrastructure configurations.
701
702**IMPORTANT**: Use this tool when you need to create multiple related files together.
703
704**USE THIS TOOL** (not just describe files) when the user asks for:
705- Complete Terraform modules (main.tf, variables.tf, outputs.tf, providers.tf)
706- Full Helm charts (Chart.yaml, values.yaml, templates/*.yaml)
707- Kubernetes manifests (deployment.yaml, service.yaml, configmap.yaml)
708- Multi-file docker-compose setups
709- Multiple documentation files in a directory
710- Any set of related files
711
712**DO NOT** just describe the files - actually call this tool to create them.
713
714All files are written atomically. Parent directories are created automatically."#.to_string(),
715 parameters: json!({
716 "type": "object",
717 "properties": {
718 "files": {
719 "type": "array",
720 "description": "List of files to write",
721 "items": {
722 "type": "object",
723 "properties": {
724 "path": {
725 "type": "string",
726 "description": "Path to the file (relative to project root)"
727 },
728 "content": {
729 "type": "string",
730 "description": "Content to write to the file"
731 }
732 },
733 "required": ["path", "content"]
734 }
735 },
736 "create_dirs": {
737 "type": "boolean",
738 "description": "If true (default), create parent directories if they don't exist"
739 }
740 },
741 "required": ["files"]
742 }),
743 }
744 }
745
746 async fn call(&self, args: Self::Args) -> Result<Self::Output, Self::Error> {
747 let create_dirs = args.create_dirs.unwrap_or(true);
748 let mut results = Vec::new();
749 let mut total_bytes = 0usize;
750 let mut total_lines = 0usize;
751 let mut skipped_files = Vec::new();
752
753 for file in &args.files {
754 let requested_path = PathBuf::from(&file.path);
755 let file_path = self.validate_path(&requested_path)?;
756
757 let old_content = if file_path.exists() {
759 fs::read_to_string(&file_path).ok()
760 } else {
761 None
762 };
763
764 let filename = std::path::Path::new(&file.path)
766 .file_name()
767 .map(|n| n.to_string_lossy().to_string())
768 .unwrap_or_else(|| file.path.clone());
769
770 let needs_confirmation = self.require_confirmation
772 && !self.allowed_patterns.is_allowed(&filename);
773
774 if needs_confirmation {
775 let confirmation = if let Some(ref client) = self.ide_client {
777 let guard = client.lock().await;
778 if guard.is_connected() {
779 confirm_file_write_with_ide(
780 &file.path,
781 old_content.as_deref(),
782 &file.content,
783 Some(&*guard),
784 ).await
785 } else {
786 drop(guard);
787 confirm_file_write(
788 &file.path,
789 old_content.as_deref(),
790 &file.content,
791 )
792 }
793 } else {
794 confirm_file_write(
795 &file.path,
796 old_content.as_deref(),
797 &file.content,
798 )
799 };
800
801 match confirmation {
802 ConfirmationResult::Proceed => {
803 }
805 ConfirmationResult::ProceedAlways(pattern) => {
806 self.allowed_patterns.allow(pattern);
807 }
808 ConfirmationResult::Modify(feedback) => {
809 skipped_files.push(json!({
810 "path": file.path,
811 "reason": "User requested changes",
812 "feedback": feedback
813 }));
814 continue;
815 }
816 ConfirmationResult::Cancel => {
817 skipped_files.push(json!({
818 "path": file.path,
819 "reason": "User cancelled"
820 }));
821 continue;
822 }
823 }
824 }
825
826 if create_dirs {
828 if let Some(parent) = file_path.parent() {
829 if !parent.exists() {
830 fs::create_dir_all(parent)
831 .map_err(|e| WriteFilesError(format!("Failed to create directories for {}: {}", file.path, e)))?;
832 }
833 }
834 }
835
836 let file_existed = file_path.exists();
837
838 fs::write(&file_path, &file.content)
839 .map_err(|e| WriteFilesError(format!("Failed to write {}: {}", file.path, e)))?;
840
841 let lines = file.content.lines().count();
842 total_bytes += file.content.len();
843 total_lines += lines;
844
845 results.push(json!({
846 "path": file.path,
847 "action": if file_existed { "updated" } else { "created" },
848 "lines": lines,
849 "bytes": file.content.len()
850 }));
851 }
852
853 let result = if skipped_files.is_empty() {
854 json!({
855 "success": true,
856 "files_written": results.len(),
857 "total_lines": total_lines,
858 "total_bytes": total_bytes,
859 "files": results
860 })
861 } else {
862 json!({
863 "success": results.len() > 0,
864 "files_written": results.len(),
865 "files_skipped": skipped_files.len(),
866 "total_lines": total_lines,
867 "total_bytes": total_bytes,
868 "files": results,
869 "skipped": skipped_files
870 })
871 };
872
873 serde_json::to_string_pretty(&result)
874 .map_err(|e| WriteFilesError(format!("Failed to serialize: {}", e)))
875 }
876}