vtcode_core/tools/
file_ops.rs

1//! File operation tools with composable functionality
2
3use super::traits::{CacheableTool, FileTool, ModeTool, Tool};
4use super::types::*;
5use crate::tools::grep_search::GrepSearchManager;
6use crate::utils::vtcodegitignore::should_exclude_file;
7use anyhow::{Context, Result, anyhow};
8use async_trait::async_trait;
9use serde_json::{Value, json};
10use std::path::{Path, PathBuf};
11use std::sync::Arc;
12use tracing::{info, warn};
13use walkdir::WalkDir;
14
15/// File operations tool with multiple modes
16#[derive(Clone)]
17pub struct FileOpsTool {
18    workspace_root: PathBuf,
19}
20
21impl FileOpsTool {
22    pub fn new(workspace_root: PathBuf, _grep_search: Arc<GrepSearchManager>) -> Self {
23        // grep_search was unused; keep param to avoid broad call-site churn
24        Self { workspace_root }
25    }
26
27    /// Execute basic directory listing
28    async fn execute_basic_list(&self, input: &ListInput) -> Result<Value> {
29        let base = self.workspace_root.join(&input.path);
30
31        if self.should_exclude(&base).await {
32            return Err(anyhow!(
33                "Path '{}' is excluded by .vtcodegitignore",
34                input.path
35            ));
36        }
37
38        let mut all_items = Vec::new();
39        if base.is_file() {
40            let metadata = tokio::fs::metadata(&base)
41                .await
42                .with_context(|| format!("Failed to read metadata for file: {}", input.path))?;
43            all_items.push(json!({
44                "name": base.file_name().unwrap().to_string_lossy(),
45                "path": input.path,
46                "type": "file",
47                "size": metadata.len(),
48                "modified": metadata.modified().ok().and_then(|t| t.duration_since(std::time::UNIX_EPOCH).ok()).map(|d| d.as_secs())
49            }));
50        } else if base.is_dir() {
51            let mut entries = tokio::fs::read_dir(&base)
52                .await
53                .with_context(|| format!("Failed to read directory: {}", input.path))?;
54            while let Some(entry) = entries
55                .next_entry()
56                .await
57                .with_context(|| format!("Failed to read directory entry in: {}", input.path))?
58            {
59                let path = entry.path();
60                let name = entry.file_name().to_string_lossy().to_string();
61
62                if !input.include_hidden && name.starts_with('.') {
63                    continue;
64                }
65                if self.should_exclude(&path).await {
66                    continue;
67                }
68
69                let metadata = entry
70                    .metadata()
71                    .await
72                    .with_context(|| format!("Failed to read metadata for: {}", path.display()))?;
73                all_items.push(json!({
74                    "name": name,
75                    "path": path.strip_prefix(&self.workspace_root).unwrap_or(&path).to_string_lossy(),
76                    "type": if metadata.is_dir() { "directory" } else { "file" },
77                    "size": metadata.len(),
78                    "modified": metadata.modified().ok().and_then(|t| t.duration_since(std::time::UNIX_EPOCH).ok()).map(|d| d.as_secs())
79                }));
80            }
81        } else {
82            warn!(
83                path = %input.path,
84                exists = base.exists(),
85                is_file = base.is_file(),
86                is_dir = base.is_dir(),
87                "Path does not exist or is neither file nor directory"
88            );
89            return Err(anyhow!("Path '{}' does not exist", input.path));
90        }
91
92        // Apply max_items cap first for token efficiency
93        let capped_total = all_items.len().min(input.max_items);
94        let (page, per_page) = (
95            input.page.unwrap_or(1).max(1),
96            input.per_page.unwrap_or(50).max(1),
97        );
98        let start = (page - 1).saturating_mul(per_page);
99        let end = (start + per_page).min(capped_total);
100        let has_more = end < capped_total;
101
102        // Log paging operation details
103        info!(
104            path = %input.path,
105            total_items = all_items.len(),
106            capped_total = capped_total,
107            page = page,
108            per_page = per_page,
109            start_index = start,
110            end_index = end,
111            has_more = has_more,
112            "Executing paginated file listing"
113        );
114
115        // Validate paging parameters
116        if page > 1 && start >= capped_total {
117            warn!(
118                path = %input.path,
119                page = page,
120                per_page = per_page,
121                total_items = capped_total,
122                "Requested page exceeds available data"
123            );
124        }
125
126        let mut page_items = if start < end {
127            all_items[start..end].to_vec()
128        } else {
129            warn!(
130                path = %input.path,
131                page = page,
132                per_page = per_page,
133                start_index = start,
134                end_index = end,
135                "Empty page result - no items in requested range"
136            );
137            vec![]
138        };
139
140        // Respect response_format
141        let concise = input
142            .response_format
143            .as_deref()
144            .map(|s| s.eq_ignore_ascii_case("concise"))
145            .unwrap_or(true);
146        if concise {
147            for obj in page_items.iter_mut() {
148                if let Some(map) = obj.as_object_mut() {
149                    map.remove("modified");
150                }
151            }
152        }
153
154        let guidance = if has_more || capped_total < all_items.len() || all_items.len() > 20 {
155            Some(format!(
156                "Showing {} of {} items (page {}, per_page {}). Use 'page' and 'per_page' to page through results.",
157                page_items.len(),
158                capped_total,
159                page,
160                per_page
161            ))
162        } else {
163            None
164        };
165
166        let mut out = json!({
167            "success": true,
168            "items": page_items,
169            "count": page_items.len(),
170            "total": capped_total,
171            "page": page,
172            "per_page": per_page,
173            "has_more": has_more,
174            "mode": "list",
175            "response_format": if concise { "concise" } else { "detailed" }
176        });
177
178        if let Some(msg) = guidance {
179            out["message"] = json!(msg);
180        }
181        Ok(out)
182    }
183
184    /// Execute recursive file search
185    async fn execute_recursive_search(&self, input: &ListInput) -> Result<Value> {
186        // Allow recursive listing without pattern by defaulting to "*" (match all)
187        let default_pattern = "*".to_string();
188        let pattern = input.name_pattern.as_ref().unwrap_or(&default_pattern);
189        let search_path = self.workspace_root.join(&input.path);
190
191        let mut items = Vec::new();
192        let mut count = 0;
193
194        for entry in WalkDir::new(&search_path).max_depth(10) {
195            if count >= input.max_items {
196                break;
197            }
198
199            let entry = entry.map_err(|e| anyhow!("Walk error: {}", e))?;
200            let path = entry.path();
201
202            if self.should_exclude(path).await {
203                continue;
204            }
205
206            let name = path.file_name().unwrap_or_default().to_string_lossy();
207            if !input.include_hidden && name.starts_with('.') {
208                continue;
209            }
210
211            // Pattern matching - handle "*" as wildcard for all files
212            let matches = if pattern == "*" {
213                true // Match all files when pattern is "*"
214            } else if input.case_sensitive.unwrap_or(true) {
215                name.contains(pattern)
216            } else {
217                name.to_lowercase().contains(&pattern.to_lowercase())
218            };
219
220            if matches {
221                // Extension filtering
222                if let Some(ref extensions) = input.file_extensions {
223                    if let Some(ext) = path.extension().and_then(|e| e.to_str()) {
224                        if !extensions.contains(&ext.to_string()) {
225                            continue;
226                        }
227                    } else {
228                        continue;
229                    }
230                }
231
232                let metadata = entry
233                    .metadata()
234                    .map_err(|e| anyhow!("Metadata error: {}", e))?;
235                items.push(json!({
236                    "name": name,
237                    "path": path.strip_prefix(&self.workspace_root).unwrap_or(path).to_string_lossy(),
238                    "type": if metadata.is_dir() { "directory" } else { "file" },
239                    "size": metadata.len(),
240                    "depth": entry.depth()
241                }));
242                count += 1;
243            }
244        }
245
246        Ok(self.paginate_and_format(items, count, input, "recursive", Some(pattern)))
247    }
248
249    /// Execute find by exact name
250    async fn execute_find_by_name(&self, input: &ListInput) -> Result<Value> {
251        let file_name = input
252            .name_pattern
253            .as_ref()
254            .ok_or_else(|| anyhow!("Error: Missing 'name_pattern'. Example: list_files(path='.', mode='find_name', name_pattern='Cargo.toml')"))?;
255        let search_path = self.workspace_root.join(&input.path);
256
257        for entry in WalkDir::new(&search_path).max_depth(10) {
258            let entry = entry.map_err(|e| anyhow!("Walk error: {}", e))?;
259            let path = entry.path();
260
261            if self.should_exclude(path).await {
262                continue;
263            }
264
265            let name = path.file_name().unwrap_or_default().to_string_lossy();
266            let matches = if input.case_sensitive.unwrap_or(true) {
267                name == file_name.as_str()
268            } else {
269                name.to_lowercase() == file_name.to_lowercase()
270            };
271
272            if matches {
273                let metadata = entry
274                    .metadata()
275                    .map_err(|e| anyhow!("Metadata error: {}", e))?;
276                return Ok(json!({
277                    "success": true,
278                    "found": true,
279                    "name": name,
280                    "path": path.strip_prefix(&self.workspace_root).unwrap_or(path).to_string_lossy(),
281                    "type": if metadata.is_dir() { "directory" } else { "file" },
282                    "size": metadata.len(),
283                    "mode": "find_name"
284                }));
285            }
286        }
287
288        Ok(json!({
289            "success": true,
290            "found": false,
291            "mode": "find_name",
292            "searched_for": file_name,
293            "message": "Not found. Consider using mode='recursive' if searching in subdirectories."
294        }))
295    }
296
297    /// Execute find by content pattern
298    async fn execute_find_by_content(&self, input: &ListInput) -> Result<Value> {
299        let content_pattern = input
300            .content_pattern
301            .as_ref()
302            .ok_or_else(|| anyhow!("Error: Missing 'content_pattern'. Example: list_files(path='src', mode='find_content', content_pattern='fn main')"))?;
303
304        // Simple content search implementation
305        let search_path = self.workspace_root.join(&input.path);
306        let mut items = Vec::new();
307        let mut count = 0;
308
309        for entry in WalkDir::new(&search_path).max_depth(10) {
310            if count >= input.max_items {
311                break;
312            }
313
314            let entry = entry.map_err(|e| anyhow!("Walk error: {}", e))?;
315            let path = entry.path();
316
317            if !path.is_file() || self.should_exclude(path).await {
318                continue;
319            }
320
321            // Read file content and search for pattern
322            if let Ok(content) = tokio::fs::read_to_string(path).await {
323                let matches = if input.case_sensitive.unwrap_or(true) {
324                    content.contains(content_pattern)
325                } else {
326                    content
327                        .to_lowercase()
328                        .contains(&content_pattern.to_lowercase())
329                };
330
331                if matches && let Ok(metadata) = tokio::fs::metadata(path).await {
332                    items.push(json!({
333                        "name": path.file_name().unwrap_or_default().to_string_lossy(),
334                        "path": path.strip_prefix(&self.workspace_root).unwrap_or(path).to_string_lossy(),
335                        "type": "file",
336                        "size": metadata.len(),
337                        "pattern_found": true
338                    }));
339                    count += 1;
340                }
341            }
342        }
343
344        Ok(self.paginate_and_format(items, count, input, "find_content", Some(content_pattern)))
345    }
346
347    /// Read file with intelligent path resolution
348    pub async fn read_file(&self, args: Value) -> Result<Value> {
349        let input: Input = serde_json::from_value(args)
350            .context("Error: Invalid 'read_file' arguments. Required: {{ path: string }}. Optional: {{ max_bytes: number }}. Example: read_file({{\"path\": \"src/main.rs\", \"max_bytes\": 20000}})")?;
351
352        // Try to resolve the file path
353        let potential_paths = self.resolve_file_path(&input.path)?;
354
355        for candidate_path in &potential_paths {
356            if self.should_exclude(candidate_path).await {
357                continue;
358            }
359
360            if candidate_path.exists() && candidate_path.is_file() {
361                // Check if chunking is needed
362                let should_chunk = if let Some(max_lines) = input.max_lines {
363                    // User specified max_lines threshold
364                    self.count_lines_with_tree_sitter(candidate_path).await? > max_lines
365                } else if let Some(chunk_lines) = input.chunk_lines {
366                    // User specified chunk_lines (legacy parameter)
367                    self.count_lines_with_tree_sitter(candidate_path).await? > chunk_lines
368                } else {
369                    // Use default threshold
370                    self.count_lines_with_tree_sitter(candidate_path).await?
371                        > crate::config::constants::chunking::MAX_LINES_THRESHOLD
372                };
373
374                let (content, truncated, total_lines) = if should_chunk {
375                    // Calculate chunk sizes for logging
376                    let start_chunk = if let Some(max_lines) = input.max_lines {
377                        max_lines / 2
378                    } else if let Some(chunk_lines) = input.chunk_lines {
379                        chunk_lines / 2
380                    } else {
381                        crate::config::constants::chunking::CHUNK_START_LINES
382                    };
383                    let _end_chunk = start_chunk;
384
385                    let result = self.read_file_chunked(candidate_path, &input).await?;
386                    // Log chunking operation
387                    self.log_chunking_operation(candidate_path, result.1, result.2)
388                        .await?;
389                    result
390                } else {
391                    let content = if let Some(max_bytes) = input.max_bytes {
392                        let mut file_content = tokio::fs::read(candidate_path).await?;
393                        if file_content.len() > max_bytes {
394                            file_content.truncate(max_bytes);
395                        }
396                        String::from_utf8_lossy(&file_content).to_string()
397                    } else {
398                        tokio::fs::read_to_string(candidate_path).await?
399                    };
400                    (content, false, None)
401                };
402
403                let mut result = json!({
404                    "success": true,
405                    "content": content,
406                    "path": candidate_path.strip_prefix(&self.workspace_root).unwrap_or(candidate_path).to_string_lossy(),
407                    "metadata": {
408                        "size": content.len()
409                    }
410                });
411
412                if truncated {
413                    result["truncated"] = json!(true);
414                    result["truncation_reason"] = json!("file_exceeds_line_threshold");
415                    if let Some(total) = total_lines {
416                        result["total_lines"] = json!(total);
417                        let start_chunk = if let Some(max_lines) = input.max_lines {
418                            max_lines / 2
419                        } else if let Some(chunk_lines) = input.chunk_lines {
420                            chunk_lines / 2
421                        } else {
422                            crate::config::constants::chunking::CHUNK_START_LINES
423                        };
424                        let end_chunk = start_chunk;
425                        result["shown_lines"] = json!(start_chunk + end_chunk);
426                    }
427                }
428
429                // Log chunking operation
430                self.log_chunking_operation(candidate_path, truncated, total_lines)
431                    .await?;
432
433                return Ok(result);
434            }
435        }
436
437        Err(anyhow!(
438            "Error: File not found: {}. Tried paths: {}. Suggestions: 1) Check the file path and case sensitivity, 2) Use 'list_files' to explore the directory structure, 3) Try case-insensitive search with just the filename. Example: read_file({{\"path\": \"src/main.rs\"}})",
439            input.path,
440            potential_paths
441                .iter()
442                .map(|p| p
443                    .strip_prefix(&self.workspace_root)
444                    .unwrap_or(p)
445                    .to_string_lossy())
446                .collect::<Vec<_>>()
447                .join(", ")
448        ))
449    }
450
451    /// Write file with various modes and chunking support for large content
452    pub async fn write_file(&self, args: Value) -> Result<Value> {
453        let input: WriteInput = serde_json::from_value(args)
454            .context("Error: Invalid 'write_file' arguments. Required: {{ path: string, content: string }}. Optional: {{ mode: 'overwrite'|'append'|'skip_if_exists' }}. Example: write_file({{\"path\": \"README.md\", \"content\": \"Hello\", \"mode\": \"overwrite\"}})")?;
455        let file_path = self.workspace_root.join(&input.path);
456
457        // Check if content needs chunking
458        let content_size = input.content.len();
459        let should_chunk =
460            content_size > crate::config::constants::chunking::MAX_WRITE_CONTENT_SIZE;
461
462        if should_chunk {
463            return self.write_file_chunked(&file_path, &input).await;
464        }
465
466        // Create parent directories if needed
467        if let Some(parent) = file_path.parent() {
468            tokio::fs::create_dir_all(parent).await?;
469        }
470
471        match input.mode.as_str() {
472            "overwrite" => {
473                tokio::fs::write(&file_path, &input.content).await?;
474            }
475            "append" => {
476                use tokio::io::AsyncWriteExt;
477                let mut file = tokio::fs::OpenOptions::new()
478                    .create(true)
479                    .append(true)
480                    .open(&file_path)
481                    .await?;
482                file.write_all(input.content.as_bytes()).await?;
483            }
484            "skip_if_exists" => {
485                if file_path.exists() {
486                    return Ok(json!({
487                        "success": true,
488                        "skipped": true,
489                        "reason": "File already exists"
490                    }));
491                }
492                tokio::fs::write(&file_path, &input.content).await?;
493            }
494            _ => {
495                return Err(anyhow!(format!(
496                    "Error: Unsupported write mode '{}'. Allowed: overwrite, append, skip_if_exists.",
497                    input.mode
498                )));
499            }
500        }
501
502        // Log write operation
503        self.log_write_operation(&file_path, content_size, false)
504            .await?;
505
506        Ok(json!({
507            "success": true,
508            "path": input.path,
509            "mode": input.mode,
510            "bytes_written": input.content.len()
511        }))
512    }
513
514    /// Write large file in chunks for atomicity and memory efficiency
515    async fn write_file_chunked(&self, file_path: &Path, input: &WriteInput) -> Result<Value> {
516        // Create parent directories if needed
517        if let Some(parent) = file_path.parent() {
518            tokio::fs::create_dir_all(parent).await?;
519        }
520
521        let content_bytes = input.content.as_bytes();
522        let chunk_size = crate::config::constants::chunking::WRITE_CHUNK_SIZE;
523        let total_size = content_bytes.len();
524
525        match input.mode.as_str() {
526            "overwrite" => {
527                // Write in chunks for large files
528                use tokio::io::AsyncWriteExt;
529                let mut file = tokio::fs::OpenOptions::new()
530                    .create(true)
531                    .write(true)
532                    .truncate(true)
533                    .open(file_path)
534                    .await?;
535
536                for chunk in content_bytes.chunks(chunk_size) {
537                    file.write_all(chunk).await?;
538                }
539                file.flush().await?;
540            }
541            "append" => {
542                // Append in chunks
543                use tokio::io::AsyncWriteExt;
544                let mut file = tokio::fs::OpenOptions::new()
545                    .create(true)
546                    .append(true)
547                    .open(file_path)
548                    .await?;
549
550                for chunk in content_bytes.chunks(chunk_size) {
551                    file.write_all(chunk).await?;
552                }
553                file.flush().await?;
554            }
555            "skip_if_exists" => {
556                if file_path.exists() {
557                    return Ok(json!({
558                        "success": true,
559                        "skipped": true,
560                        "reason": "File already exists"
561                    }));
562                }
563                // Write in chunks for new file
564                use tokio::io::AsyncWriteExt;
565                let mut file = tokio::fs::File::create(file_path).await?;
566                for chunk in content_bytes.chunks(chunk_size) {
567                    file.write_all(chunk).await?;
568                }
569                file.flush().await?;
570            }
571            _ => {
572                return Err(anyhow!(format!(
573                    "Error: Unsupported write mode '{}'. Allowed: overwrite, append, skip_if_exists.",
574                    input.mode
575                )));
576            }
577        }
578
579        // Log chunked write operation
580        self.log_write_operation(file_path, total_size, true)
581            .await?;
582
583        Ok(json!({
584            "success": true,
585            "path": file_path.strip_prefix(&self.workspace_root).unwrap_or(file_path).to_string_lossy(),
586            "mode": input.mode,
587            "bytes_written": total_size,
588            "chunked": true,
589            "chunk_size": chunk_size,
590            "chunks_written": total_size.div_ceil(chunk_size)
591        }))
592    }
593
594    /// Log write operations for debugging
595    async fn log_write_operation(
596        &self,
597        file_path: &Path,
598        bytes_written: usize,
599        chunked: bool,
600    ) -> Result<()> {
601        let log_entry = json!({
602            "operation": if chunked { "write_file_chunked" } else { "write_file" },
603            "file_path": file_path.to_string_lossy(),
604            "bytes_written": bytes_written,
605            "chunked": chunked,
606            "chunk_size": if chunked { Some(crate::config::constants::chunking::WRITE_CHUNK_SIZE) } else { None },
607            "timestamp": chrono::Utc::now().to_rfc3339()
608        });
609
610        info!(
611            "File write operation: {}",
612            serde_json::to_string(&log_entry)?
613        );
614        Ok(())
615    }
616}
617
618#[async_trait]
619impl Tool for FileOpsTool {
620    async fn execute(&self, args: Value) -> Result<Value> {
621        let input: ListInput = serde_json::from_value(args).context(
622            "Error: Invalid 'list_files' arguments. Required: {{ path: string }}. Optional: {{ mode, max_items, page, per_page, include_hidden, response_format }}. Example: list_files({{\"path\": \"src\", \"page\": 1, \"per_page\": 50, \"response_format\": \"concise\"}})",
623        )?;
624
625        let mode_clone = input.mode.clone();
626        let mode = mode_clone.as_deref().unwrap_or("list");
627        self.execute_mode(mode, serde_json::to_value(input)?).await
628    }
629
630    fn name(&self) -> &'static str {
631        "list_files"
632    }
633
634    fn description(&self) -> &'static str {
635        "Enhanced file discovery tool with multiple modes: list (default), recursive, find_name, find_content"
636    }
637}
638
639#[async_trait]
640impl FileTool for FileOpsTool {
641    fn workspace_root(&self) -> &PathBuf {
642        &self.workspace_root
643    }
644
645    async fn should_exclude(&self, path: &Path) -> bool {
646        should_exclude_file(path).await
647    }
648}
649
650#[async_trait]
651impl ModeTool for FileOpsTool {
652    fn supported_modes(&self) -> Vec<&'static str> {
653        vec!["list", "recursive", "find_name", "find_content"]
654    }
655
656    async fn execute_mode(&self, mode: &str, args: Value) -> Result<Value> {
657        let input: ListInput = serde_json::from_value(args)?;
658
659        match mode {
660            "list" => self.execute_basic_list(&input).await,
661            "recursive" => self.execute_recursive_search(&input).await,
662            "find_name" => self.execute_find_by_name(&input).await,
663            "find_content" => self.execute_find_by_content(&input).await,
664            _ => Err(anyhow!("Unsupported file operation mode: {}", mode)),
665        }
666    }
667}
668
669#[async_trait]
670impl CacheableTool for FileOpsTool {
671    fn cache_key(&self, args: &Value) -> String {
672        format!(
673            "files:{}:{}",
674            args.get("path").and_then(|p| p.as_str()).unwrap_or(""),
675            args.get("mode").and_then(|m| m.as_str()).unwrap_or("list")
676        )
677    }
678
679    fn should_cache(&self, args: &Value) -> bool {
680        // Cache list and recursive modes, but not content-based searches
681        let mode = args.get("mode").and_then(|m| m.as_str()).unwrap_or("list");
682        matches!(mode, "list" | "recursive")
683    }
684
685    fn cache_ttl(&self) -> u64 {
686        60 // 1 minute for file listings
687    }
688}
689
690impl FileOpsTool {
691    fn paginate_and_format(
692        &self,
693        items: Vec<Value>,
694        total_count: usize,
695        input: &ListInput,
696        mode: &str,
697        pattern: Option<&String>,
698    ) -> Value {
699        let (page, per_page) = (
700            input.page.unwrap_or(1).max(1),
701            input.per_page.unwrap_or(50).max(1),
702        );
703        let total_capped = total_count.min(input.max_items);
704        let start = (page - 1).saturating_mul(per_page);
705        let end = (start + per_page).min(total_capped);
706        let has_more = end < total_capped;
707
708        // Log pagination operation details
709        info!(
710            mode = %mode,
711            pattern = ?pattern,
712            total_items = total_count,
713            capped_total = total_capped,
714            page = page,
715            per_page = per_page,
716            start_index = start,
717            end_index = end,
718            has_more = has_more,
719            "Executing paginated search results"
720        );
721
722        // Validate pagination parameters
723        if page > 1 && start >= total_capped {
724            warn!(
725                mode = %mode,
726                page = page,
727                per_page = per_page,
728                total_items = total_capped,
729                "Requested page exceeds available search results"
730            );
731        }
732
733        let mut page_items = if start < end {
734            items[start..end].to_vec()
735        } else {
736            warn!(
737                mode = %mode,
738                page = page,
739                per_page = per_page,
740                start_index = start,
741                end_index = end,
742                "Empty page result - no search results in requested range"
743            );
744            vec![]
745        };
746
747        let concise = input
748            .response_format
749            .as_deref()
750            .map(|s| s.eq_ignore_ascii_case("concise"))
751            .unwrap_or(true);
752        if concise {
753            for obj in page_items.iter_mut() {
754                if let Some(map) = obj.as_object_mut() {
755                    map.remove("modified");
756                }
757            }
758        }
759
760        let mut out = json!({
761            "success": true,
762            "items": page_items,
763            "count": page_items.len(),
764            "total": total_capped,
765            "page": page,
766            "per_page": per_page,
767            "has_more": has_more,
768            "mode": mode,
769            "response_format": if concise { "concise" } else { "detailed" }
770        });
771        if let Some(p) = pattern {
772            out["pattern"] = json!(p);
773        }
774        if has_more || total_capped > 20 {
775            out["message"] = json!(format!(
776                "Showing {} of {} results. Use 'page' to continue.",
777                out["count"].as_u64().unwrap_or(0),
778                total_capped
779            ));
780        }
781        out
782    }
783
784    /// Count lines in a file using tree-sitter for accurate parsing
785    async fn count_lines_with_tree_sitter(&self, file_path: &Path) -> Result<usize> {
786        let content = tokio::fs::read_to_string(file_path).await?;
787        Ok(content.lines().count())
788    }
789
790    /// Read file with chunking (first N + last N lines)
791    async fn read_file_chunked(
792        &self,
793        file_path: &Path,
794        input: &Input,
795    ) -> Result<(String, bool, Option<usize>)> {
796        let content = tokio::fs::read_to_string(file_path).await?;
797        let lines: Vec<&str> = content.lines().collect();
798        let total_lines = lines.len();
799
800        // Use custom chunk sizes if provided, otherwise use defaults
801        let start_chunk = if let Some(chunk_lines) = input.chunk_lines {
802            chunk_lines / 2
803        } else {
804            crate::config::constants::chunking::CHUNK_START_LINES
805        };
806        let end_chunk = if let Some(chunk_lines) = input.chunk_lines {
807            chunk_lines / 2
808        } else {
809            crate::config::constants::chunking::CHUNK_END_LINES
810        };
811
812        if total_lines <= start_chunk + end_chunk {
813            // File is small enough, return all content
814            return Ok((content, false, Some(total_lines)));
815        }
816
817        // Create chunked content
818        let mut chunked_content = String::new();
819
820        // Add first N lines
821        for (i, line) in lines.iter().enumerate().take(start_chunk) {
822            if i > 0 {
823                chunked_content.push('\n');
824            }
825            chunked_content.push_str(line);
826        }
827
828        // Add truncation indicator
829        chunked_content.push_str(&format!(
830            "\n\n... [{} lines truncated - showing first {} and last {} lines] ...\n\n",
831            total_lines - start_chunk - end_chunk,
832            start_chunk,
833            end_chunk
834        ));
835
836        // Add last N lines
837        let start_idx = total_lines.saturating_sub(end_chunk);
838        for (i, line) in lines.iter().enumerate().skip(start_idx) {
839            if i > start_idx {
840                chunked_content.push('\n');
841            }
842            chunked_content.push_str(line);
843        }
844
845        Ok((chunked_content, true, Some(total_lines)))
846    }
847
848    /// Log chunking operations for debugging
849    async fn log_chunking_operation(
850        &self,
851        file_path: &Path,
852        truncated: bool,
853        total_lines: Option<usize>,
854    ) -> Result<()> {
855        if truncated {
856            let log_entry = json!({
857                "operation": "read_file_chunked",
858                "file_path": file_path.to_string_lossy(),
859                "truncated": true,
860                "total_lines": total_lines,
861                "timestamp": chrono::Utc::now().to_rfc3339()
862            });
863
864            info!(
865                "File chunking operation: {}",
866                serde_json::to_string(&log_entry)?
867            );
868        }
869        Ok(())
870    }
871
872    fn resolve_file_path(&self, path: &str) -> Result<Vec<PathBuf>> {
873        let mut paths = Vec::new();
874
875        // Try exact path first
876        paths.push(self.workspace_root.join(path));
877
878        // If it's just a filename, try common directories that exist in most projects
879        if !path.contains('/') && !path.contains('\\') {
880            // Generic source directories found in most projects
881            paths.push(self.workspace_root.join("src").join(path));
882            paths.push(self.workspace_root.join("lib").join(path));
883            paths.push(self.workspace_root.join("bin").join(path));
884            paths.push(self.workspace_root.join("app").join(path));
885            paths.push(self.workspace_root.join("source").join(path));
886            paths.push(self.workspace_root.join("sources").join(path));
887            paths.push(self.workspace_root.join("include").join(path));
888            paths.push(self.workspace_root.join("docs").join(path));
889            paths.push(self.workspace_root.join("doc").join(path));
890            paths.push(self.workspace_root.join("examples").join(path));
891            paths.push(self.workspace_root.join("example").join(path));
892            paths.push(self.workspace_root.join("tests").join(path));
893            paths.push(self.workspace_root.join("test").join(path));
894        }
895
896        // Try case-insensitive variants for filenames
897        if !path.contains('/')
898            && !path.contains('\\')
899            && let Ok(entries) = std::fs::read_dir(&self.workspace_root)
900        {
901            for entry in entries.flatten() {
902                if let Ok(name) = entry.file_name().into_string()
903                    && name.to_lowercase() == path.to_lowercase()
904                {
905                    paths.push(entry.path());
906                }
907            }
908        }
909
910        Ok(paths)
911    }
912}