1use super::traits::{CacheableTool, FileTool, ModeTool, Tool};
4use super::types::*;
5use crate::config::constants::diff;
6use crate::tools::grep_search::GrepSearchManager;
7use crate::utils::vtcodegitignore::should_exclude_file;
8use anyhow::{Context, Result, anyhow};
9use async_trait::async_trait;
10use serde_json::{Value, json};
11use similar::TextDiff;
12use std::borrow::Cow;
13use std::path::{Path, PathBuf};
14use std::sync::Arc;
15use tracing::{info, warn};
16use walkdir::WalkDir;
17
18#[derive(Clone)]
20pub struct FileOpsTool {
21 workspace_root: PathBuf,
22}
23
24impl FileOpsTool {
25 pub fn new(workspace_root: PathBuf, _grep_search: Arc<GrepSearchManager>) -> Self {
26 Self { workspace_root }
28 }
29
30 async fn execute_basic_list(&self, input: &ListInput) -> Result<Value> {
32 let base = self.workspace_root.join(&input.path);
33
34 if self.should_exclude(&base).await {
35 return Err(anyhow!(
36 "Path '{}' is excluded by .vtcodegitignore",
37 input.path
38 ));
39 }
40
41 let mut all_items = Vec::new();
42 if base.is_file() {
43 let metadata = tokio::fs::metadata(&base)
44 .await
45 .with_context(|| format!("Failed to read metadata for file: {}", input.path))?;
46 all_items.push(json!({
47 "name": base.file_name().unwrap().to_string_lossy(),
48 "path": input.path,
49 "type": "file",
50 "size": metadata.len(),
51 "modified": metadata.modified().ok().and_then(|t| t.duration_since(std::time::UNIX_EPOCH).ok()).map(|d| d.as_secs())
52 }));
53 } else if base.is_dir() {
54 let mut entries = tokio::fs::read_dir(&base)
55 .await
56 .with_context(|| format!("Failed to read directory: {}", input.path))?;
57 while let Some(entry) = entries
58 .next_entry()
59 .await
60 .with_context(|| format!("Failed to read directory entry in: {}", input.path))?
61 {
62 let path = entry.path();
63 let name = entry.file_name().to_string_lossy().to_string();
64
65 if !input.include_hidden && name.starts_with('.') {
66 continue;
67 }
68 if self.should_exclude(&path).await {
69 continue;
70 }
71
72 let metadata = entry
73 .metadata()
74 .await
75 .with_context(|| format!("Failed to read metadata for: {}", path.display()))?;
76 all_items.push(json!({
77 "name": name,
78 "path": path.strip_prefix(&self.workspace_root).unwrap_or(&path).to_string_lossy(),
79 "type": if metadata.is_dir() { "directory" } else { "file" },
80 "size": metadata.len(),
81 "modified": metadata.modified().ok().and_then(|t| t.duration_since(std::time::UNIX_EPOCH).ok()).map(|d| d.as_secs())
82 }));
83 }
84 } else {
85 warn!(
86 path = %input.path,
87 exists = base.exists(),
88 is_file = base.is_file(),
89 is_dir = base.is_dir(),
90 "Path does not exist or is neither file nor directory"
91 );
92 return Err(anyhow!("Path '{}' does not exist", input.path));
93 }
94
95 let capped_total = all_items.len().min(input.max_items);
97 let (page, per_page) = (
98 input.page.unwrap_or(1).max(1),
99 input.per_page.unwrap_or(50).max(1),
100 );
101 let start = (page - 1).saturating_mul(per_page);
102 let end = (start + per_page).min(capped_total);
103 let has_more = end < capped_total;
104
105 info!(
107 path = %input.path,
108 total_items = all_items.len(),
109 capped_total = capped_total,
110 page = page,
111 per_page = per_page,
112 start_index = start,
113 end_index = end,
114 has_more = has_more,
115 "Executing paginated file listing"
116 );
117
118 if page > 1 && start >= capped_total {
120 warn!(
121 path = %input.path,
122 page = page,
123 per_page = per_page,
124 total_items = capped_total,
125 "Requested page exceeds available data"
126 );
127 }
128
129 let mut page_items = if start < end {
130 all_items[start..end].to_vec()
131 } else {
132 warn!(
133 path = %input.path,
134 page = page,
135 per_page = per_page,
136 start_index = start,
137 end_index = end,
138 "Empty page result - no items in requested range"
139 );
140 vec![]
141 };
142
143 let concise = input
145 .response_format
146 .as_deref()
147 .map(|s| s.eq_ignore_ascii_case("concise"))
148 .unwrap_or(true);
149 if concise {
150 for obj in page_items.iter_mut() {
151 if let Some(map) = obj.as_object_mut() {
152 map.remove("modified");
153 }
154 }
155 }
156
157 let guidance = if has_more || capped_total < all_items.len() || all_items.len() > 20 {
158 Some(format!(
159 "Showing {} of {} items (page {}, per_page {}). Use 'page' and 'per_page' to page through results.",
160 page_items.len(),
161 capped_total,
162 page,
163 per_page
164 ))
165 } else {
166 None
167 };
168
169 let mut out = json!({
170 "success": true,
171 "items": page_items,
172 "count": page_items.len(),
173 "total": capped_total,
174 "page": page,
175 "per_page": per_page,
176 "has_more": has_more,
177 "mode": "list",
178 "response_format": if concise { "concise" } else { "detailed" }
179 });
180
181 if let Some(msg) = guidance {
182 out["message"] = json!(msg);
183 }
184 Ok(out)
185 }
186
187 async fn execute_recursive_search(&self, input: &ListInput) -> Result<Value> {
189 let default_pattern = "*".to_string();
191 let pattern = input.name_pattern.as_ref().unwrap_or(&default_pattern);
192 let search_path = self.workspace_root.join(&input.path);
193
194 let mut items = Vec::new();
195 let mut count = 0;
196
197 for entry in WalkDir::new(&search_path).max_depth(10) {
198 if count >= input.max_items {
199 break;
200 }
201
202 let entry = entry.map_err(|e| anyhow!("Walk error: {}", e))?;
203 let path = entry.path();
204
205 if self.should_exclude(path).await {
206 continue;
207 }
208
209 let name = path.file_name().unwrap_or_default().to_string_lossy();
210 if !input.include_hidden && name.starts_with('.') {
211 continue;
212 }
213
214 let matches = if pattern == "*" {
216 true } else if input.case_sensitive.unwrap_or(true) {
218 name.contains(pattern)
219 } else {
220 name.to_lowercase().contains(&pattern.to_lowercase())
221 };
222
223 if matches {
224 if let Some(ref extensions) = input.file_extensions {
226 if let Some(ext) = path.extension().and_then(|e| e.to_str()) {
227 if !extensions.contains(&ext.to_string()) {
228 continue;
229 }
230 } else {
231 continue;
232 }
233 }
234
235 let metadata = entry
236 .metadata()
237 .map_err(|e| anyhow!("Metadata error: {}", e))?;
238 items.push(json!({
239 "name": name,
240 "path": path.strip_prefix(&self.workspace_root).unwrap_or(path).to_string_lossy(),
241 "type": if metadata.is_dir() { "directory" } else { "file" },
242 "size": metadata.len(),
243 "depth": entry.depth()
244 }));
245 count += 1;
246 }
247 }
248
249 Ok(self.paginate_and_format(items, count, input, "recursive", Some(pattern)))
250 }
251
252 async fn execute_find_by_name(&self, input: &ListInput) -> Result<Value> {
254 let file_name = input
255 .name_pattern
256 .as_ref()
257 .ok_or_else(|| anyhow!("Error: Missing 'name_pattern'. Example: list_files(path='.', mode='find_name', name_pattern='Cargo.toml')"))?;
258 let search_path = self.workspace_root.join(&input.path);
259
260 for entry in WalkDir::new(&search_path).max_depth(10) {
261 let entry = entry.map_err(|e| anyhow!("Walk error: {}", e))?;
262 let path = entry.path();
263
264 if self.should_exclude(path).await {
265 continue;
266 }
267
268 let name = path.file_name().unwrap_or_default().to_string_lossy();
269 let matches = if input.case_sensitive.unwrap_or(true) {
270 name == file_name.as_str()
271 } else {
272 name.to_lowercase() == file_name.to_lowercase()
273 };
274
275 if matches {
276 let metadata = entry
277 .metadata()
278 .map_err(|e| anyhow!("Metadata error: {}", e))?;
279 return Ok(json!({
280 "success": true,
281 "found": true,
282 "name": name,
283 "path": path.strip_prefix(&self.workspace_root).unwrap_or(path).to_string_lossy(),
284 "type": if metadata.is_dir() { "directory" } else { "file" },
285 "size": metadata.len(),
286 "mode": "find_name"
287 }));
288 }
289 }
290
291 Ok(json!({
292 "success": true,
293 "found": false,
294 "mode": "find_name",
295 "searched_for": file_name,
296 "message": "Not found. Consider using mode='recursive' if searching in subdirectories."
297 }))
298 }
299
300 async fn execute_find_by_content(&self, input: &ListInput) -> Result<Value> {
302 let content_pattern = input
303 .content_pattern
304 .as_ref()
305 .ok_or_else(|| anyhow!("Error: Missing 'content_pattern'. Example: list_files(path='src', mode='find_content', content_pattern='fn main')"))?;
306
307 let search_path = self.workspace_root.join(&input.path);
309 let mut items = Vec::new();
310 let mut count = 0;
311
312 for entry in WalkDir::new(&search_path).max_depth(10) {
313 if count >= input.max_items {
314 break;
315 }
316
317 let entry = entry.map_err(|e| anyhow!("Walk error: {}", e))?;
318 let path = entry.path();
319
320 if !path.is_file() || self.should_exclude(path).await {
321 continue;
322 }
323
324 if let Ok(content) = tokio::fs::read_to_string(path).await {
326 let matches = if input.case_sensitive.unwrap_or(true) {
327 content.contains(content_pattern)
328 } else {
329 content
330 .to_lowercase()
331 .contains(&content_pattern.to_lowercase())
332 };
333
334 if matches && let Ok(metadata) = tokio::fs::metadata(path).await {
335 items.push(json!({
336 "name": path.file_name().unwrap_or_default().to_string_lossy(),
337 "path": path.strip_prefix(&self.workspace_root).unwrap_or(path).to_string_lossy(),
338 "type": "file",
339 "size": metadata.len(),
340 "pattern_found": true
341 }));
342 count += 1;
343 }
344 }
345 }
346
347 Ok(self.paginate_and_format(items, count, input, "find_content", Some(content_pattern)))
348 }
349
350 pub async fn read_file(&self, args: Value) -> Result<Value> {
352 let input: Input = serde_json::from_value(args)
353 .context("Error: Invalid 'read_file' arguments. Required: {{ path: string }}. Optional: {{ max_bytes: number }}. Example: read_file({{\"path\": \"src/main.rs\", \"max_bytes\": 20000}})")?;
354
355 let potential_paths = self.resolve_file_path(&input.path)?;
357
358 for candidate_path in &potential_paths {
359 if self.should_exclude(candidate_path).await {
360 continue;
361 }
362
363 if candidate_path.exists() && candidate_path.is_file() {
364 let should_chunk = if let Some(max_lines) = input.max_lines {
366 self.count_lines_with_tree_sitter(candidate_path).await? > max_lines
368 } else if let Some(chunk_lines) = input.chunk_lines {
369 self.count_lines_with_tree_sitter(candidate_path).await? > chunk_lines
371 } else {
372 self.count_lines_with_tree_sitter(candidate_path).await?
374 > crate::config::constants::chunking::MAX_LINES_THRESHOLD
375 };
376
377 let (content, truncated, total_lines) = if should_chunk {
378 let start_chunk = if let Some(max_lines) = input.max_lines {
380 max_lines / 2
381 } else if let Some(chunk_lines) = input.chunk_lines {
382 chunk_lines / 2
383 } else {
384 crate::config::constants::chunking::CHUNK_START_LINES
385 };
386 let _end_chunk = start_chunk;
387
388 let result = self.read_file_chunked(candidate_path, &input).await?;
389 self.log_chunking_operation(candidate_path, result.1, result.2)
391 .await?;
392 result
393 } else {
394 let content = if let Some(max_bytes) = input.max_bytes {
395 let mut file_content = tokio::fs::read(candidate_path).await?;
396 if file_content.len() > max_bytes {
397 file_content.truncate(max_bytes);
398 }
399 String::from_utf8_lossy(&file_content).to_string()
400 } else {
401 tokio::fs::read_to_string(candidate_path).await?
402 };
403 (content, false, None)
404 };
405
406 let mut result = json!({
407 "success": true,
408 "content": content,
409 "path": candidate_path.strip_prefix(&self.workspace_root).unwrap_or(candidate_path).to_string_lossy(),
410 "metadata": {
411 "size": content.len()
412 }
413 });
414
415 if truncated {
416 result["truncated"] = json!(true);
417 result["truncation_reason"] = json!("file_exceeds_line_threshold");
418 if let Some(total) = total_lines {
419 result["total_lines"] = json!(total);
420 let start_chunk = if let Some(max_lines) = input.max_lines {
421 max_lines / 2
422 } else if let Some(chunk_lines) = input.chunk_lines {
423 chunk_lines / 2
424 } else {
425 crate::config::constants::chunking::CHUNK_START_LINES
426 };
427 let end_chunk = start_chunk;
428 result["shown_lines"] = json!(start_chunk + end_chunk);
429 }
430 }
431
432 self.log_chunking_operation(candidate_path, truncated, total_lines)
434 .await?;
435
436 return Ok(result);
437 }
438 }
439
440 Err(anyhow!(
441 "Error: File not found: {}. Tried paths: {}. Suggestions: 1) Check the file path and case sensitivity, 2) Use 'list_files' to explore the directory structure, 3) Try case-insensitive search with just the filename. Example: read_file({{\"path\": \"src/main.rs\"}})",
442 input.path,
443 potential_paths
444 .iter()
445 .map(|p| p
446 .strip_prefix(&self.workspace_root)
447 .unwrap_or(p)
448 .to_string_lossy())
449 .collect::<Vec<_>>()
450 .join(", ")
451 ))
452 }
453
454 pub async fn write_file(&self, args: Value) -> Result<Value> {
456 let input: WriteInput = serde_json::from_value(args)
457 .context("Error: Invalid 'write_file' arguments. Required: {{ path: string, content: string }}. Optional: {{ mode: 'overwrite'|'append'|'skip_if_exists' }}. Example: write_file({{\"path\": \"README.md\", \"content\": \"Hello\", \"mode\": \"overwrite\"}})")?;
458 let file_path = self.workspace_root.join(&input.path);
459
460 let content_size = input.content.len();
462 let should_chunk =
463 content_size > crate::config::constants::chunking::MAX_WRITE_CONTENT_SIZE;
464
465 if should_chunk {
466 return self.write_file_chunked(&file_path, &input).await;
467 }
468
469 if let Some(parent) = file_path.parent() {
471 tokio::fs::create_dir_all(parent).await?;
472 }
473
474 let file_exists = tokio::fs::try_exists(&file_path).await?;
475
476 if input.mode.as_str() == "skip_if_exists" && file_exists {
477 return Ok(json!({
478 "success": true,
479 "skipped": true,
480 "reason": "File already exists"
481 }));
482 }
483
484 let mut existing_content: Option<String> = None;
485 let mut diff_preview: Option<Value> = None;
486
487 if file_exists {
488 match tokio::fs::read_to_string(&file_path).await {
489 Ok(content) => existing_content = Some(content),
490 Err(error) => {
491 diff_preview = Some(diff_preview_error_skip(
492 "failed_to_read_existing_content",
493 Some(&format!("{:?}", error.kind())),
494 ));
495 }
496 }
497 }
498
499 match input.mode.as_str() {
500 "overwrite" => {
501 tokio::fs::write(&file_path, &input.content).await?;
502 }
503 "append" => {
504 use tokio::io::AsyncWriteExt;
505 let mut file = tokio::fs::OpenOptions::new()
506 .create(true)
507 .append(true)
508 .open(&file_path)
509 .await?;
510 file.write_all(input.content.as_bytes()).await?;
511 }
512 "skip_if_exists" => {
513 tokio::fs::write(&file_path, &input.content).await?;
514 }
515 _ => {
516 return Err(anyhow!(format!(
517 "Error: Unsupported write mode '{}'. Allowed: overwrite, append, skip_if_exists.",
518 input.mode
519 )));
520 }
521 }
522
523 self.log_write_operation(&file_path, content_size, false)
525 .await?;
526
527 if diff_preview.is_none() {
528 let existing_snapshot = existing_content.as_deref();
529 let total_len = if input.mode.as_str() == "append" {
530 existing_snapshot
531 .map(|content| content.len())
532 .unwrap_or_default()
533 + input.content.len()
534 } else {
535 input.content.len()
536 };
537
538 if total_len > diff::MAX_PREVIEW_BYTES
539 || existing_snapshot
540 .map(|content| content.len() > diff::MAX_PREVIEW_BYTES)
541 .unwrap_or(false)
542 {
543 diff_preview = Some(diff_preview_size_skip());
544 } else {
545 let final_snapshot: Cow<'_, str> = if input.mode.as_str() == "append" {
546 if let Some(existing) = existing_snapshot {
547 Cow::Owned(format!("{existing}{}", input.content))
548 } else {
549 Cow::Borrowed(input.content.as_str())
550 }
551 } else {
552 Cow::Borrowed(input.content.as_str())
553 };
554
555 diff_preview = Some(build_diff_preview(
556 &input.path,
557 existing_snapshot,
558 final_snapshot.as_ref(),
559 ));
560 }
561 }
562
563 let mut response = json!({
564 "success": true,
565 "path": input.path,
566 "mode": input.mode,
567 "bytes_written": input.content.len()
568 });
569
570 if let Some(preview) = diff_preview {
571 if let Some(object) = response.as_object_mut() {
572 object.insert("diff_preview".to_string(), preview);
573 }
574 }
575
576 Ok(response)
577 }
578
579 async fn write_file_chunked(&self, file_path: &Path, input: &WriteInput) -> Result<Value> {
581 if let Some(parent) = file_path.parent() {
583 tokio::fs::create_dir_all(parent).await?;
584 }
585
586 let content_bytes = input.content.as_bytes();
587 let chunk_size = crate::config::constants::chunking::WRITE_CHUNK_SIZE;
588 let total_size = content_bytes.len();
589
590 match input.mode.as_str() {
591 "overwrite" => {
592 use tokio::io::AsyncWriteExt;
594 let mut file = tokio::fs::OpenOptions::new()
595 .create(true)
596 .write(true)
597 .truncate(true)
598 .open(file_path)
599 .await?;
600
601 for chunk in content_bytes.chunks(chunk_size) {
602 file.write_all(chunk).await?;
603 }
604 file.flush().await?;
605 }
606 "append" => {
607 use tokio::io::AsyncWriteExt;
609 let mut file = tokio::fs::OpenOptions::new()
610 .create(true)
611 .append(true)
612 .open(file_path)
613 .await?;
614
615 for chunk in content_bytes.chunks(chunk_size) {
616 file.write_all(chunk).await?;
617 }
618 file.flush().await?;
619 }
620 "skip_if_exists" => {
621 if file_path.exists() {
622 return Ok(json!({
623 "success": true,
624 "skipped": true,
625 "reason": "File already exists"
626 }));
627 }
628 use tokio::io::AsyncWriteExt;
630 let mut file = tokio::fs::File::create(file_path).await?;
631 for chunk in content_bytes.chunks(chunk_size) {
632 file.write_all(chunk).await?;
633 }
634 file.flush().await?;
635 }
636 _ => {
637 return Err(anyhow!(format!(
638 "Error: Unsupported write mode '{}'. Allowed: overwrite, append, skip_if_exists.",
639 input.mode
640 )));
641 }
642 }
643
644 self.log_write_operation(file_path, total_size, true)
646 .await?;
647
648 Ok(json!({
649 "success": true,
650 "path": file_path.strip_prefix(&self.workspace_root).unwrap_or(file_path).to_string_lossy(),
651 "mode": input.mode,
652 "bytes_written": total_size,
653 "chunked": true,
654 "chunk_size": chunk_size,
655 "chunks_written": total_size.div_ceil(chunk_size),
656 "diff_preview": diff_preview_size_skip()
657 }))
658 }
659
660 async fn log_write_operation(
662 &self,
663 file_path: &Path,
664 bytes_written: usize,
665 chunked: bool,
666 ) -> Result<()> {
667 let log_entry = json!({
668 "operation": if chunked { "write_file_chunked" } else { "write_file" },
669 "file_path": file_path.to_string_lossy(),
670 "bytes_written": bytes_written,
671 "chunked": chunked,
672 "chunk_size": if chunked { Some(crate::config::constants::chunking::WRITE_CHUNK_SIZE) } else { None },
673 "timestamp": chrono::Utc::now().to_rfc3339()
674 });
675
676 info!(
677 "File write operation: {}",
678 serde_json::to_string(&log_entry)?
679 );
680 Ok(())
681 }
682}
683
684fn diff_preview_size_skip() -> Value {
685 json!({
686 "skipped": true,
687 "reason": "content_exceeds_preview_limit",
688 "max_bytes": diff::MAX_PREVIEW_BYTES
689 })
690}
691
692fn diff_preview_error_skip(reason: &str, detail: Option<&str>) -> Value {
693 match detail {
694 Some(value) => json!({
695 "skipped": true,
696 "reason": reason,
697 "detail": value
698 }),
699 None => json!({
700 "skipped": true,
701 "reason": reason
702 }),
703 }
704}
705
706fn build_diff_preview(path: &str, before: Option<&str>, after: &str) -> Value {
707 let previous = before.unwrap_or("");
708 let mut diff_output = TextDiff::from_lines(previous, after)
709 .unified_diff()
710 .context_radius(diff::CONTEXT_RADIUS)
711 .header(&format!("a/{path}"), &format!("b/{path}"))
712 .to_string();
713
714 if diff_output.trim().is_empty() {
715 return json!({
716 "content": "",
717 "truncated": false,
718 "omitted_line_count": 0,
719 "skipped": false,
720 "is_empty": true
721 });
722 }
723
724 let mut lines: Vec<String> = diff_output.lines().map(|line| line.to_string()).collect();
725 let mut truncated = false;
726 let mut omitted = 0usize;
727
728 if lines.len() > diff::MAX_PREVIEW_LINES {
729 truncated = true;
730 let head_count = diff::HEAD_LINE_COUNT.min(lines.len());
731 let tail_count = diff::TAIL_LINE_COUNT.min(lines.len().saturating_sub(head_count));
732 let mut condensed = Vec::with_capacity(head_count + tail_count + 1);
733 condensed.extend(lines.iter().take(head_count).cloned());
734 omitted = lines.len().saturating_sub(head_count + tail_count);
735 if omitted > 0 {
736 condensed.push(format!("... {omitted} lines omitted ..."));
737 }
738 if tail_count > 0 {
739 let tail_start = lines.len().saturating_sub(tail_count);
740 condensed.extend(lines.iter().skip(tail_start).cloned());
741 }
742 lines = condensed;
743 }
744
745 diff_output = lines.join("\n");
746
747 json!({
748 "content": diff_output,
749 "truncated": truncated,
750 "omitted_line_count": omitted,
751 "skipped": false
752 })
753}
754
755#[cfg(test)]
756mod tests {
757 use super::*;
758
759 #[test]
760 fn diff_preview_reports_truncation_and_omission() {
761 let after = (0..(diff::MAX_PREVIEW_LINES + 40))
762 .map(|idx| format!("line {idx}\n"))
763 .collect::<String>();
764
765 let preview = build_diff_preview("sample.txt", None, &after);
766
767 assert_eq!(preview["skipped"], Value::Bool(false));
768 assert_eq!(preview["truncated"], Value::Bool(true));
769 assert!(preview["omitted_line_count"].as_u64().unwrap() > 0);
770
771 let content = preview["content"].as_str().unwrap();
772 assert!(content.contains("lines omitted"));
773 assert!(content.lines().count() <= diff::HEAD_LINE_COUNT + diff::TAIL_LINE_COUNT + 1);
774 }
775
776 #[test]
777 fn diff_preview_skip_handles_error_detail() {
778 let preview = diff_preview_error_skip("failed", Some("InvalidData"));
779 assert_eq!(preview["reason"], Value::String("failed".to_string()));
780 assert_eq!(preview["detail"], Value::String("InvalidData".to_string()));
781 assert_eq!(preview["skipped"], Value::Bool(true));
782 }
783}
784
785#[async_trait]
786impl Tool for FileOpsTool {
787 async fn execute(&self, args: Value) -> Result<Value> {
788 let input: ListInput = serde_json::from_value(args).context(
789 "Error: Invalid 'list_files' arguments. Required: {{ path: string }}. Optional: {{ mode, max_items, page, per_page, include_hidden, response_format }}. Example: list_files({{\"path\": \"src\", \"page\": 1, \"per_page\": 50, \"response_format\": \"concise\"}})",
790 )?;
791
792 let mode_clone = input.mode.clone();
793 let mode = mode_clone.as_deref().unwrap_or("list");
794 self.execute_mode(mode, serde_json::to_value(input)?).await
795 }
796
797 fn name(&self) -> &'static str {
798 "list_files"
799 }
800
801 fn description(&self) -> &'static str {
802 "Enhanced file discovery tool with multiple modes: list (default), recursive, find_name, find_content"
803 }
804}
805
806#[async_trait]
807impl FileTool for FileOpsTool {
808 fn workspace_root(&self) -> &PathBuf {
809 &self.workspace_root
810 }
811
812 async fn should_exclude(&self, path: &Path) -> bool {
813 should_exclude_file(path).await
814 }
815}
816
817#[async_trait]
818impl ModeTool for FileOpsTool {
819 fn supported_modes(&self) -> Vec<&'static str> {
820 vec!["list", "recursive", "find_name", "find_content"]
821 }
822
823 async fn execute_mode(&self, mode: &str, args: Value) -> Result<Value> {
824 let input: ListInput = serde_json::from_value(args)?;
825
826 match mode {
827 "list" => self.execute_basic_list(&input).await,
828 "recursive" => self.execute_recursive_search(&input).await,
829 "find_name" => self.execute_find_by_name(&input).await,
830 "find_content" => self.execute_find_by_content(&input).await,
831 _ => Err(anyhow!("Unsupported file operation mode: {}", mode)),
832 }
833 }
834}
835
836#[async_trait]
837impl CacheableTool for FileOpsTool {
838 fn cache_key(&self, args: &Value) -> String {
839 format!(
840 "files:{}:{}",
841 args.get("path").and_then(|p| p.as_str()).unwrap_or(""),
842 args.get("mode").and_then(|m| m.as_str()).unwrap_or("list")
843 )
844 }
845
846 fn should_cache(&self, args: &Value) -> bool {
847 let mode = args.get("mode").and_then(|m| m.as_str()).unwrap_or("list");
849 matches!(mode, "list" | "recursive")
850 }
851
852 fn cache_ttl(&self) -> u64 {
853 60 }
855}
856
857impl FileOpsTool {
858 fn paginate_and_format(
859 &self,
860 items: Vec<Value>,
861 total_count: usize,
862 input: &ListInput,
863 mode: &str,
864 pattern: Option<&String>,
865 ) -> Value {
866 let (page, per_page) = (
867 input.page.unwrap_or(1).max(1),
868 input.per_page.unwrap_or(50).max(1),
869 );
870 let total_capped = total_count.min(input.max_items);
871 let start = (page - 1).saturating_mul(per_page);
872 let end = (start + per_page).min(total_capped);
873 let has_more = end < total_capped;
874
875 info!(
877 mode = %mode,
878 pattern = ?pattern,
879 total_items = total_count,
880 capped_total = total_capped,
881 page = page,
882 per_page = per_page,
883 start_index = start,
884 end_index = end,
885 has_more = has_more,
886 "Executing paginated search results"
887 );
888
889 if page > 1 && start >= total_capped {
891 warn!(
892 mode = %mode,
893 page = page,
894 per_page = per_page,
895 total_items = total_capped,
896 "Requested page exceeds available search results"
897 );
898 }
899
900 let mut page_items = if start < end {
901 items[start..end].to_vec()
902 } else {
903 warn!(
904 mode = %mode,
905 page = page,
906 per_page = per_page,
907 start_index = start,
908 end_index = end,
909 "Empty page result - no search results in requested range"
910 );
911 vec![]
912 };
913
914 let concise = input
915 .response_format
916 .as_deref()
917 .map(|s| s.eq_ignore_ascii_case("concise"))
918 .unwrap_or(true);
919 if concise {
920 for obj in page_items.iter_mut() {
921 if let Some(map) = obj.as_object_mut() {
922 map.remove("modified");
923 }
924 }
925 }
926
927 let mut out = json!({
928 "success": true,
929 "items": page_items,
930 "count": page_items.len(),
931 "total": total_capped,
932 "page": page,
933 "per_page": per_page,
934 "has_more": has_more,
935 "mode": mode,
936 "response_format": if concise { "concise" } else { "detailed" }
937 });
938 if let Some(p) = pattern {
939 out["pattern"] = json!(p);
940 }
941 if has_more || total_capped > 20 {
942 out["message"] = json!(format!(
943 "Showing {} of {} results. Use 'page' to continue.",
944 out["count"].as_u64().unwrap_or(0),
945 total_capped
946 ));
947 }
948 out
949 }
950
951 async fn count_lines_with_tree_sitter(&self, file_path: &Path) -> Result<usize> {
953 let content = tokio::fs::read_to_string(file_path).await?;
954 Ok(content.lines().count())
955 }
956
957 async fn read_file_chunked(
959 &self,
960 file_path: &Path,
961 input: &Input,
962 ) -> Result<(String, bool, Option<usize>)> {
963 let content = tokio::fs::read_to_string(file_path).await?;
964 let lines: Vec<&str> = content.lines().collect();
965 let total_lines = lines.len();
966
967 let start_chunk = if let Some(chunk_lines) = input.chunk_lines {
969 chunk_lines / 2
970 } else {
971 crate::config::constants::chunking::CHUNK_START_LINES
972 };
973 let end_chunk = if let Some(chunk_lines) = input.chunk_lines {
974 chunk_lines / 2
975 } else {
976 crate::config::constants::chunking::CHUNK_END_LINES
977 };
978
979 if total_lines <= start_chunk + end_chunk {
980 return Ok((content, false, Some(total_lines)));
982 }
983
984 let mut chunked_content = String::new();
986
987 for (i, line) in lines.iter().enumerate().take(start_chunk) {
989 if i > 0 {
990 chunked_content.push('\n');
991 }
992 chunked_content.push_str(line);
993 }
994
995 chunked_content.push_str(&format!(
997 "\n\n... [{} lines truncated - showing first {} and last {} lines] ...\n\n",
998 total_lines - start_chunk - end_chunk,
999 start_chunk,
1000 end_chunk
1001 ));
1002
1003 let start_idx = total_lines.saturating_sub(end_chunk);
1005 for (i, line) in lines.iter().enumerate().skip(start_idx) {
1006 if i > start_idx {
1007 chunked_content.push('\n');
1008 }
1009 chunked_content.push_str(line);
1010 }
1011
1012 Ok((chunked_content, true, Some(total_lines)))
1013 }
1014
1015 async fn log_chunking_operation(
1017 &self,
1018 file_path: &Path,
1019 truncated: bool,
1020 total_lines: Option<usize>,
1021 ) -> Result<()> {
1022 if truncated {
1023 let log_entry = json!({
1024 "operation": "read_file_chunked",
1025 "file_path": file_path.to_string_lossy(),
1026 "truncated": true,
1027 "total_lines": total_lines,
1028 "timestamp": chrono::Utc::now().to_rfc3339()
1029 });
1030
1031 info!(
1032 "File chunking operation: {}",
1033 serde_json::to_string(&log_entry)?
1034 );
1035 }
1036 Ok(())
1037 }
1038
1039 fn resolve_file_path(&self, path: &str) -> Result<Vec<PathBuf>> {
1040 let mut paths = Vec::new();
1041
1042 paths.push(self.workspace_root.join(path));
1044
1045 if !path.contains('/') && !path.contains('\\') {
1047 paths.push(self.workspace_root.join("src").join(path));
1049 paths.push(self.workspace_root.join("lib").join(path));
1050 paths.push(self.workspace_root.join("bin").join(path));
1051 paths.push(self.workspace_root.join("app").join(path));
1052 paths.push(self.workspace_root.join("source").join(path));
1053 paths.push(self.workspace_root.join("sources").join(path));
1054 paths.push(self.workspace_root.join("include").join(path));
1055 paths.push(self.workspace_root.join("docs").join(path));
1056 paths.push(self.workspace_root.join("doc").join(path));
1057 paths.push(self.workspace_root.join("examples").join(path));
1058 paths.push(self.workspace_root.join("example").join(path));
1059 paths.push(self.workspace_root.join("tests").join(path));
1060 paths.push(self.workspace_root.join("test").join(path));
1061 }
1062
1063 if !path.contains('/')
1065 && !path.contains('\\')
1066 && let Ok(entries) = std::fs::read_dir(&self.workspace_root)
1067 {
1068 for entry in entries.flatten() {
1069 if let Ok(name) = entry.file_name().into_string()
1070 && name.to_lowercase() == path.to_lowercase()
1071 {
1072 paths.push(entry.path());
1073 }
1074 }
1075 }
1076
1077 Ok(paths)
1078 }
1079}