vtcode_core/tools/
file_ops.rs

1//! File operation tools with composable functionality
2
3use super::traits::{CacheableTool, FileTool, ModeTool, Tool};
4use super::types::*;
5use crate::tools::grep_search::GrepSearchManager;
6use crate::utils::vtcodegitignore::should_exclude_file;
7use anyhow::{Context, Result, anyhow};
8use async_trait::async_trait;
9use serde_json::{Value, json};
10use std::path::{Path, PathBuf};
11use std::sync::Arc;
12use tracing::{info, warn};
13use walkdir::WalkDir;
14
15/// File operations tool with multiple modes
16#[derive(Clone)]
17pub struct FileOpsTool {
18    workspace_root: PathBuf,
19}
20
21impl FileOpsTool {
22    pub fn new(workspace_root: PathBuf, _grep_search: Arc<GrepSearchManager>) -> Self {
23        // grep_search was unused; keep param to avoid broad call-site churn
24        Self { workspace_root }
25    }
26
27    /// Execute basic directory listing
28    async fn execute_basic_list(&self, input: &ListInput) -> Result<Value> {
29        let base = self.workspace_root.join(&input.path);
30
31        if self.should_exclude(&base).await {
32            return Err(anyhow!(
33                "Path '{}' is excluded by .vtcodegitignore",
34                input.path
35            ));
36        }
37
38        let mut all_items = Vec::new();
39        if base.is_file() {
40            let metadata = tokio::fs::metadata(&base)
41                .await
42                .with_context(|| format!("Failed to read metadata for file: {}", input.path))?;
43            all_items.push(json!({
44                "name": base.file_name().unwrap().to_string_lossy(),
45                "path": input.path,
46                "type": "file",
47                "size": metadata.len(),
48                "modified": metadata.modified().ok().and_then(|t| t.duration_since(std::time::UNIX_EPOCH).ok()).map(|d| d.as_secs())
49            }));
50        } else if base.is_dir() {
51            let mut entries = tokio::fs::read_dir(&base)
52                .await
53                .with_context(|| format!("Failed to read directory: {}", input.path))?;
54            while let Some(entry) = entries
55                .next_entry()
56                .await
57                .with_context(|| format!("Failed to read directory entry in: {}", input.path))?
58            {
59                let path = entry.path();
60                let name = entry.file_name().to_string_lossy().to_string();
61
62                if !input.include_hidden && name.starts_with('.') {
63                    continue;
64                }
65                if self.should_exclude(&path).await {
66                    continue;
67                }
68
69                let metadata = entry
70                    .metadata()
71                    .await
72                    .with_context(|| format!("Failed to read metadata for: {}", path.display()))?;
73                all_items.push(json!({
74                    "name": name,
75                    "path": path.strip_prefix(&self.workspace_root).unwrap_or(&path).to_string_lossy(),
76                    "type": if metadata.is_dir() { "directory" } else { "file" },
77                    "size": metadata.len(),
78                    "modified": metadata.modified().ok().and_then(|t| t.duration_since(std::time::UNIX_EPOCH).ok()).map(|d| d.as_secs())
79                }));
80            }
81        } else {
82            warn!(
83                path = %input.path,
84                exists = base.exists(),
85                is_file = base.is_file(),
86                is_dir = base.is_dir(),
87                "Path does not exist or is neither file nor directory"
88            );
89            return Err(anyhow!("Path '{}' does not exist", input.path));
90        }
91
92        // Apply max_items cap first for token efficiency
93        let capped_total = all_items.len().min(input.max_items);
94        let (page, per_page) = (
95            input.page.unwrap_or(1).max(1),
96            input.per_page.unwrap_or(50).max(1),
97        );
98        let start = (page - 1).saturating_mul(per_page);
99        let end = (start + per_page).min(capped_total);
100        let has_more = end < capped_total;
101
102        // Log paging operation details
103        info!(
104            path = %input.path,
105            total_items = all_items.len(),
106            capped_total = capped_total,
107            page = page,
108            per_page = per_page,
109            start_index = start,
110            end_index = end,
111            has_more = has_more,
112            "Executing paginated file listing"
113        );
114
115        // Validate paging parameters
116        if page > 1 && start >= capped_total {
117            warn!(
118                path = %input.path,
119                page = page,
120                per_page = per_page,
121                total_items = capped_total,
122                "Requested page exceeds available data"
123            );
124        }
125
126        let mut page_items = if start < end {
127            all_items[start..end].to_vec()
128        } else {
129            warn!(
130                path = %input.path,
131                page = page,
132                per_page = per_page,
133                start_index = start,
134                end_index = end,
135                "Empty page result - no items in requested range"
136            );
137            vec![]
138        };
139
140        // Respect response_format
141        let concise = input
142            .response_format
143            .as_deref()
144            .map(|s| s.eq_ignore_ascii_case("concise"))
145            .unwrap_or(true);
146        if concise {
147            for obj in page_items.iter_mut() {
148                if let Some(map) = obj.as_object_mut() {
149                    map.remove("modified");
150                }
151            }
152        }
153
154        let guidance = if has_more || capped_total < all_items.len() || all_items.len() > 20 {
155            Some(format!(
156                "Showing {} of {} items (page {}, per_page {}). Use 'page' and 'per_page' to page through results.",
157                page_items.len(),
158                capped_total,
159                page,
160                per_page
161            ))
162        } else {
163            None
164        };
165
166        let mut out = json!({
167            "success": true,
168            "items": page_items,
169            "count": page_items.len(),
170            "total": capped_total,
171            "page": page,
172            "per_page": per_page,
173            "has_more": has_more,
174            "mode": "list",
175            "response_format": if concise { "concise" } else { "detailed" }
176        });
177
178        if let Some(msg) = guidance {
179            out["message"] = json!(msg);
180        }
181        Ok(out)
182    }
183
184    /// Execute recursive file search
185    async fn execute_recursive_search(&self, input: &ListInput) -> Result<Value> {
186        // Allow recursive listing without pattern by defaulting to "*" (match all)
187        let default_pattern = "*".to_string();
188        let pattern = input.name_pattern.as_ref().unwrap_or(&default_pattern);
189        let search_path = self.workspace_root.join(&input.path);
190
191        let mut items = Vec::new();
192        let mut count = 0;
193
194        for entry in WalkDir::new(&search_path).max_depth(10) {
195            if count >= input.max_items {
196                break;
197            }
198
199            let entry = entry.map_err(|e| anyhow!("Walk error: {}", e))?;
200            let path = entry.path();
201
202            if self.should_exclude(path).await {
203                continue;
204            }
205
206            let name = path.file_name().unwrap_or_default().to_string_lossy();
207            if !input.include_hidden && name.starts_with('.') {
208                continue;
209            }
210
211            // Pattern matching - handle "*" as wildcard for all files
212            let matches = if pattern == "*" {
213                true // Match all files when pattern is "*"
214            } else if input.case_sensitive.unwrap_or(true) {
215                name.contains(pattern)
216            } else {
217                name.to_lowercase().contains(&pattern.to_lowercase())
218            };
219
220            if matches {
221                // Extension filtering
222                if let Some(ref extensions) = input.file_extensions {
223                    if let Some(ext) = path.extension().and_then(|e| e.to_str()) {
224                        if !extensions.contains(&ext.to_string()) {
225                            continue;
226                        }
227                    } else {
228                        continue;
229                    }
230                }
231
232                let metadata = entry
233                    .metadata()
234                    .map_err(|e| anyhow!("Metadata error: {}", e))?;
235                items.push(json!({
236                    "name": name,
237                    "path": path.strip_prefix(&self.workspace_root).unwrap_or(path).to_string_lossy(),
238                    "type": if metadata.is_dir() { "directory" } else { "file" },
239                    "size": metadata.len(),
240                    "depth": entry.depth()
241                }));
242                count += 1;
243            }
244        }
245
246        Ok(self.paginate_and_format(items, count, input, "recursive", Some(pattern)))
247    }
248
249    /// Execute find by exact name
250    async fn execute_find_by_name(&self, input: &ListInput) -> Result<Value> {
251        let file_name = input
252            .name_pattern
253            .as_ref()
254            .ok_or_else(|| anyhow!("Error: Missing 'name_pattern'. Example: list_files(path='.', mode='find_name', name_pattern='Cargo.toml')"))?;
255        let search_path = self.workspace_root.join(&input.path);
256
257        for entry in WalkDir::new(&search_path).max_depth(10) {
258            let entry = entry.map_err(|e| anyhow!("Walk error: {}", e))?;
259            let path = entry.path();
260
261            if self.should_exclude(path).await {
262                continue;
263            }
264
265            let name = path.file_name().unwrap_or_default().to_string_lossy();
266            let matches = if input.case_sensitive.unwrap_or(true) {
267                name == file_name.as_str()
268            } else {
269                name.to_lowercase() == file_name.to_lowercase()
270            };
271
272            if matches {
273                let metadata = entry
274                    .metadata()
275                    .map_err(|e| anyhow!("Metadata error: {}", e))?;
276                return Ok(json!({
277                    "success": true,
278                    "found": true,
279                    "name": name,
280                    "path": path.strip_prefix(&self.workspace_root).unwrap_or(path).to_string_lossy(),
281                    "type": if metadata.is_dir() { "directory" } else { "file" },
282                    "size": metadata.len(),
283                    "mode": "find_name"
284                }));
285            }
286        }
287
288        Ok(json!({
289            "success": true,
290            "found": false,
291            "mode": "find_name",
292            "searched_for": file_name,
293            "message": "Not found. Consider using mode='recursive' if searching in subdirectories."
294        }))
295    }
296
297    /// Execute find by content pattern
298    async fn execute_find_by_content(&self, input: &ListInput) -> Result<Value> {
299        let content_pattern = input
300            .content_pattern
301            .as_ref()
302            .ok_or_else(|| anyhow!("Error: Missing 'content_pattern'. Example: list_files(path='src', mode='find_content', content_pattern='fn main')"))?;
303
304        // Simple content search implementation
305        let search_path = self.workspace_root.join(&input.path);
306        let mut items = Vec::new();
307        let mut count = 0;
308
309        for entry in WalkDir::new(&search_path).max_depth(10) {
310            if count >= input.max_items {
311                break;
312            }
313
314            let entry = entry.map_err(|e| anyhow!("Walk error: {}", e))?;
315            let path = entry.path();
316
317            if !path.is_file() || self.should_exclude(path).await {
318                continue;
319            }
320
321            // Read file content and search for pattern
322            if let Ok(content) = tokio::fs::read_to_string(path).await {
323                let matches = if input.case_sensitive.unwrap_or(true) {
324                    content.contains(content_pattern)
325                } else {
326                    content
327                        .to_lowercase()
328                        .contains(&content_pattern.to_lowercase())
329                };
330
331                if matches {
332                    if let Ok(metadata) = tokio::fs::metadata(path).await {
333                        items.push(json!({
334                            "name": path.file_name().unwrap_or_default().to_string_lossy(),
335                            "path": path.strip_prefix(&self.workspace_root).unwrap_or(path).to_string_lossy(),
336                            "type": "file",
337                            "size": metadata.len(),
338                            "pattern_found": true
339                        }));
340                        count += 1;
341                    }
342                }
343            }
344        }
345
346        Ok(self.paginate_and_format(items, count, input, "find_content", Some(content_pattern)))
347    }
348
349    /// Read file with intelligent path resolution
350    pub async fn read_file(&self, args: Value) -> Result<Value> {
351        let input: Input = serde_json::from_value(args)
352            .context("Error: Invalid 'read_file' arguments. Required: {{ path: string }}. Optional: {{ max_bytes: number }}. Example: read_file({{\"path\": \"src/main.rs\", \"max_bytes\": 20000}})")?;
353
354        // Try to resolve the file path
355        let potential_paths = self.resolve_file_path(&input.path)?;
356
357        for candidate_path in &potential_paths {
358            if self.should_exclude(candidate_path).await {
359                continue;
360            }
361
362            if candidate_path.exists() && candidate_path.is_file() {
363                // Check if chunking is needed
364                let should_chunk = if let Some(max_lines) = input.max_lines {
365                    // User specified max_lines threshold
366                    self.count_lines_with_tree_sitter(candidate_path).await? > max_lines
367                } else if let Some(chunk_lines) = input.chunk_lines {
368                    // User specified chunk_lines (legacy parameter)
369                    self.count_lines_with_tree_sitter(candidate_path).await? > chunk_lines
370                } else {
371                    // Use default threshold
372                    self.count_lines_with_tree_sitter(candidate_path).await?
373                        > crate::config::constants::chunking::MAX_LINES_THRESHOLD
374                };
375
376                let (content, truncated, total_lines) = if should_chunk {
377                    // Calculate chunk sizes for logging
378                    let start_chunk = if let Some(max_lines) = input.max_lines {
379                        max_lines / 2
380                    } else if let Some(chunk_lines) = input.chunk_lines {
381                        chunk_lines / 2
382                    } else {
383                        crate::config::constants::chunking::CHUNK_START_LINES
384                    };
385                    let _end_chunk = start_chunk;
386
387                    let result = self.read_file_chunked(candidate_path, &input).await?;
388                    // Log chunking operation
389                    self.log_chunking_operation(candidate_path, result.1, result.2)
390                        .await?;
391                    result
392                } else {
393                    let content = if let Some(max_bytes) = input.max_bytes {
394                        let mut file_content = tokio::fs::read(candidate_path).await?;
395                        if file_content.len() > max_bytes {
396                            file_content.truncate(max_bytes);
397                        }
398                        String::from_utf8_lossy(&file_content).to_string()
399                    } else {
400                        tokio::fs::read_to_string(candidate_path).await?
401                    };
402                    (content, false, None)
403                };
404
405                let mut result = json!({
406                    "success": true,
407                    "content": content,
408                    "path": candidate_path.strip_prefix(&self.workspace_root).unwrap_or(candidate_path).to_string_lossy(),
409                    "metadata": {
410                        "size": content.len()
411                    }
412                });
413
414                if truncated {
415                    result["truncated"] = json!(true);
416                    result["truncation_reason"] = json!("file_exceeds_line_threshold");
417                    if let Some(total) = total_lines {
418                        result["total_lines"] = json!(total);
419                        let start_chunk = if let Some(max_lines) = input.max_lines {
420                            max_lines / 2
421                        } else if let Some(chunk_lines) = input.chunk_lines {
422                            chunk_lines / 2
423                        } else {
424                            crate::config::constants::chunking::CHUNK_START_LINES
425                        };
426                        let end_chunk = start_chunk;
427                        result["shown_lines"] = json!(start_chunk + end_chunk);
428                    }
429                }
430
431                // Log chunking operation
432                self.log_chunking_operation(candidate_path, truncated, total_lines)
433                    .await?;
434
435                return Ok(result);
436            }
437        }
438
439        Err(anyhow!(
440            "Error: File not found: {}. Tried paths: {}. Suggestions: 1) Check the file path and case sensitivity, 2) Use 'list_files' to explore the directory structure, 3) Try case-insensitive search with just the filename. Example: read_file({{\"path\": \"src/main.rs\"}})",
441            input.path,
442            potential_paths
443                .iter()
444                .map(|p| p
445                    .strip_prefix(&self.workspace_root)
446                    .unwrap_or(p)
447                    .to_string_lossy())
448                .collect::<Vec<_>>()
449                .join(", ")
450        ))
451    }
452
453    /// Write file with various modes and chunking support for large content
454    pub async fn write_file(&self, args: Value) -> Result<Value> {
455        let input: WriteInput = serde_json::from_value(args)
456            .context("Error: Invalid 'write_file' arguments. Required: {{ path: string, content: string }}. Optional: {{ mode: 'overwrite'|'append'|'skip_if_exists' }}. Example: write_file({{\"path\": \"README.md\", \"content\": \"Hello\", \"mode\": \"overwrite\"}})")?;
457        let file_path = self.workspace_root.join(&input.path);
458
459        // Check if content needs chunking
460        let content_size = input.content.len();
461        let should_chunk =
462            content_size > crate::config::constants::chunking::MAX_WRITE_CONTENT_SIZE;
463
464        if should_chunk {
465            return self.write_file_chunked(&file_path, &input).await;
466        }
467
468        // Create parent directories if needed
469        if let Some(parent) = file_path.parent() {
470            tokio::fs::create_dir_all(parent).await?;
471        }
472
473        match input.mode.as_str() {
474            "overwrite" => {
475                tokio::fs::write(&file_path, &input.content).await?;
476            }
477            "append" => {
478                use tokio::io::AsyncWriteExt;
479                let mut file = tokio::fs::OpenOptions::new()
480                    .create(true)
481                    .append(true)
482                    .open(&file_path)
483                    .await?;
484                file.write_all(input.content.as_bytes()).await?;
485            }
486            "skip_if_exists" => {
487                if file_path.exists() {
488                    return Ok(json!({
489                        "success": true,
490                        "skipped": true,
491                        "reason": "File already exists"
492                    }));
493                }
494                tokio::fs::write(&file_path, &input.content).await?;
495            }
496            _ => {
497                return Err(anyhow!(format!(
498                    "Error: Unsupported write mode '{}'. Allowed: overwrite, append, skip_if_exists.",
499                    input.mode
500                )));
501            }
502        }
503
504        // Log write operation
505        self.log_write_operation(&file_path, content_size, false)
506            .await?;
507
508        Ok(json!({
509            "success": true,
510            "path": input.path,
511            "mode": input.mode,
512            "bytes_written": input.content.len()
513        }))
514    }
515
516    /// Write large file in chunks for atomicity and memory efficiency
517    async fn write_file_chunked(&self, file_path: &Path, input: &WriteInput) -> Result<Value> {
518        // Create parent directories if needed
519        if let Some(parent) = file_path.parent() {
520            tokio::fs::create_dir_all(parent).await?;
521        }
522
523        let content_bytes = input.content.as_bytes();
524        let chunk_size = crate::config::constants::chunking::WRITE_CHUNK_SIZE;
525        let total_size = content_bytes.len();
526
527        match input.mode.as_str() {
528            "overwrite" => {
529                // Write in chunks for large files
530                use tokio::io::AsyncWriteExt;
531                let mut file = tokio::fs::OpenOptions::new()
532                    .create(true)
533                    .write(true)
534                    .truncate(true)
535                    .open(file_path)
536                    .await?;
537
538                for chunk in content_bytes.chunks(chunk_size) {
539                    file.write_all(chunk).await?;
540                }
541                file.flush().await?;
542            }
543            "append" => {
544                // Append in chunks
545                use tokio::io::AsyncWriteExt;
546                let mut file = tokio::fs::OpenOptions::new()
547                    .create(true)
548                    .append(true)
549                    .open(file_path)
550                    .await?;
551
552                for chunk in content_bytes.chunks(chunk_size) {
553                    file.write_all(chunk).await?;
554                }
555                file.flush().await?;
556            }
557            "skip_if_exists" => {
558                if file_path.exists() {
559                    return Ok(json!({
560                        "success": true,
561                        "skipped": true,
562                        "reason": "File already exists"
563                    }));
564                }
565                // Write in chunks for new file
566                use tokio::io::AsyncWriteExt;
567                let mut file = tokio::fs::File::create(file_path).await?;
568                for chunk in content_bytes.chunks(chunk_size) {
569                    file.write_all(chunk).await?;
570                }
571                file.flush().await?;
572            }
573            _ => {
574                return Err(anyhow!(format!(
575                    "Error: Unsupported write mode '{}'. Allowed: overwrite, append, skip_if_exists.",
576                    input.mode
577                )));
578            }
579        }
580
581        // Log chunked write operation
582        self.log_write_operation(file_path, total_size, true)
583            .await?;
584
585        Ok(json!({
586            "success": true,
587            "path": file_path.strip_prefix(&self.workspace_root).unwrap_or(file_path).to_string_lossy(),
588            "mode": input.mode,
589            "bytes_written": total_size,
590            "chunked": true,
591            "chunk_size": chunk_size,
592            "chunks_written": (total_size + chunk_size - 1) / chunk_size
593        }))
594    }
595
596    /// Log write operations for debugging
597    async fn log_write_operation(
598        &self,
599        file_path: &Path,
600        bytes_written: usize,
601        chunked: bool,
602    ) -> Result<()> {
603        let log_entry = json!({
604            "operation": if chunked { "write_file_chunked" } else { "write_file" },
605            "file_path": file_path.to_string_lossy(),
606            "bytes_written": bytes_written,
607            "chunked": chunked,
608            "chunk_size": if chunked { Some(crate::config::constants::chunking::WRITE_CHUNK_SIZE) } else { None },
609            "timestamp": chrono::Utc::now().to_rfc3339()
610        });
611
612        info!(
613            "File write operation: {}",
614            serde_json::to_string(&log_entry)?
615        );
616        Ok(())
617    }
618}
619
620#[async_trait]
621impl Tool for FileOpsTool {
622    async fn execute(&self, args: Value) -> Result<Value> {
623        let input: ListInput = serde_json::from_value(args).context(
624            "Error: Invalid 'list_files' arguments. Required: {{ path: string }}. Optional: {{ mode, max_items, page, per_page, include_hidden, response_format }}. Example: list_files({{\"path\": \"src\", \"page\": 1, \"per_page\": 50, \"response_format\": \"concise\"}})",
625        )?;
626
627        let mode_clone = input.mode.clone();
628        let mode = mode_clone.as_deref().unwrap_or("list");
629        self.execute_mode(mode, serde_json::to_value(input)?).await
630    }
631
632    fn name(&self) -> &'static str {
633        "list_files"
634    }
635
636    fn description(&self) -> &'static str {
637        "Enhanced file discovery tool with multiple modes: list (default), recursive, find_name, find_content"
638    }
639}
640
641#[async_trait]
642impl FileTool for FileOpsTool {
643    fn workspace_root(&self) -> &PathBuf {
644        &self.workspace_root
645    }
646
647    async fn should_exclude(&self, path: &Path) -> bool {
648        should_exclude_file(path).await
649    }
650}
651
652#[async_trait]
653impl ModeTool for FileOpsTool {
654    fn supported_modes(&self) -> Vec<&'static str> {
655        vec!["list", "recursive", "find_name", "find_content"]
656    }
657
658    async fn execute_mode(&self, mode: &str, args: Value) -> Result<Value> {
659        let input: ListInput = serde_json::from_value(args)?;
660
661        match mode {
662            "list" => self.execute_basic_list(&input).await,
663            "recursive" => self.execute_recursive_search(&input).await,
664            "find_name" => self.execute_find_by_name(&input).await,
665            "find_content" => self.execute_find_by_content(&input).await,
666            _ => Err(anyhow!("Unsupported file operation mode: {}", mode)),
667        }
668    }
669}
670
671#[async_trait]
672impl CacheableTool for FileOpsTool {
673    fn cache_key(&self, args: &Value) -> String {
674        format!(
675            "files:{}:{}",
676            args.get("path").and_then(|p| p.as_str()).unwrap_or(""),
677            args.get("mode").and_then(|m| m.as_str()).unwrap_or("list")
678        )
679    }
680
681    fn should_cache(&self, args: &Value) -> bool {
682        // Cache list and recursive modes, but not content-based searches
683        let mode = args.get("mode").and_then(|m| m.as_str()).unwrap_or("list");
684        matches!(mode, "list" | "recursive")
685    }
686
687    fn cache_ttl(&self) -> u64 {
688        60 // 1 minute for file listings
689    }
690}
691
692impl FileOpsTool {
693    fn paginate_and_format(
694        &self,
695        items: Vec<Value>,
696        total_count: usize,
697        input: &ListInput,
698        mode: &str,
699        pattern: Option<&String>,
700    ) -> Value {
701        let (page, per_page) = (
702            input.page.unwrap_or(1).max(1),
703            input.per_page.unwrap_or(50).max(1),
704        );
705        let total_capped = total_count.min(input.max_items);
706        let start = (page - 1).saturating_mul(per_page);
707        let end = (start + per_page).min(total_capped);
708        let has_more = end < total_capped;
709
710        // Log pagination operation details
711        info!(
712            mode = %mode,
713            pattern = ?pattern,
714            total_items = total_count,
715            capped_total = total_capped,
716            page = page,
717            per_page = per_page,
718            start_index = start,
719            end_index = end,
720            has_more = has_more,
721            "Executing paginated search results"
722        );
723
724        // Validate pagination parameters
725        if page > 1 && start >= total_capped {
726            warn!(
727                mode = %mode,
728                page = page,
729                per_page = per_page,
730                total_items = total_capped,
731                "Requested page exceeds available search results"
732            );
733        }
734
735        let mut page_items = if start < end {
736            items[start..end].to_vec()
737        } else {
738            warn!(
739                mode = %mode,
740                page = page,
741                per_page = per_page,
742                start_index = start,
743                end_index = end,
744                "Empty page result - no search results in requested range"
745            );
746            vec![]
747        };
748
749        let concise = input
750            .response_format
751            .as_deref()
752            .map(|s| s.eq_ignore_ascii_case("concise"))
753            .unwrap_or(true);
754        if concise {
755            for obj in page_items.iter_mut() {
756                if let Some(map) = obj.as_object_mut() {
757                    map.remove("modified");
758                }
759            }
760        }
761
762        let mut out = json!({
763            "success": true,
764            "items": page_items,
765            "count": page_items.len(),
766            "total": total_capped,
767            "page": page,
768            "per_page": per_page,
769            "has_more": has_more,
770            "mode": mode,
771            "response_format": if concise { "concise" } else { "detailed" }
772        });
773        if let Some(p) = pattern {
774            out["pattern"] = json!(p);
775        }
776        if has_more || total_capped > 20 {
777            out["message"] = json!(format!(
778                "Showing {} of {} results. Use 'page' to continue.",
779                out["count"].as_u64().unwrap_or(0),
780                total_capped
781            ));
782        }
783        out
784    }
785
786    /// Count lines in a file using tree-sitter for accurate parsing
787    async fn count_lines_with_tree_sitter(&self, file_path: &Path) -> Result<usize> {
788        let content = tokio::fs::read_to_string(file_path).await?;
789        Ok(content.lines().count())
790    }
791
792    /// Read file with chunking (first N + last N lines)
793    async fn read_file_chunked(
794        &self,
795        file_path: &Path,
796        input: &Input,
797    ) -> Result<(String, bool, Option<usize>)> {
798        let content = tokio::fs::read_to_string(file_path).await?;
799        let lines: Vec<&str> = content.lines().collect();
800        let total_lines = lines.len();
801
802        // Use custom chunk sizes if provided, otherwise use defaults
803        let start_chunk = if let Some(chunk_lines) = input.chunk_lines {
804            chunk_lines / 2
805        } else {
806            crate::config::constants::chunking::CHUNK_START_LINES
807        };
808        let end_chunk = if let Some(chunk_lines) = input.chunk_lines {
809            chunk_lines / 2
810        } else {
811            crate::config::constants::chunking::CHUNK_END_LINES
812        };
813
814        if total_lines <= start_chunk + end_chunk {
815            // File is small enough, return all content
816            return Ok((content, false, Some(total_lines)));
817        }
818
819        // Create chunked content
820        let mut chunked_content = String::new();
821
822        // Add first N lines
823        for (i, line) in lines.iter().enumerate().take(start_chunk) {
824            if i > 0 {
825                chunked_content.push('\n');
826            }
827            chunked_content.push_str(line);
828        }
829
830        // Add truncation indicator
831        chunked_content.push_str(&format!(
832            "\n\n... [{} lines truncated - showing first {} and last {} lines] ...\n\n",
833            total_lines - start_chunk - end_chunk,
834            start_chunk,
835            end_chunk
836        ));
837
838        // Add last N lines
839        let start_idx = total_lines.saturating_sub(end_chunk);
840        for (i, line) in lines.iter().enumerate().skip(start_idx) {
841            if i > start_idx {
842                chunked_content.push('\n');
843            }
844            chunked_content.push_str(line);
845        }
846
847        Ok((chunked_content, true, Some(total_lines)))
848    }
849
850    /// Log chunking operations for debugging
851    async fn log_chunking_operation(
852        &self,
853        file_path: &Path,
854        truncated: bool,
855        total_lines: Option<usize>,
856    ) -> Result<()> {
857        if truncated {
858            let log_entry = json!({
859                "operation": "read_file_chunked",
860                "file_path": file_path.to_string_lossy(),
861                "truncated": true,
862                "total_lines": total_lines,
863                "timestamp": chrono::Utc::now().to_rfc3339()
864            });
865
866            info!(
867                "File chunking operation: {}",
868                serde_json::to_string(&log_entry)?
869            );
870        }
871        Ok(())
872    }
873
874    fn resolve_file_path(&self, path: &str) -> Result<Vec<PathBuf>> {
875        let mut paths = Vec::new();
876
877        // Try exact path first
878        paths.push(self.workspace_root.join(path));
879
880        // If it's just a filename, try common directories that exist in most projects
881        if !path.contains('/') && !path.contains('\\') {
882            // Generic source directories found in most projects
883            paths.push(self.workspace_root.join("src").join(path));
884            paths.push(self.workspace_root.join("lib").join(path));
885            paths.push(self.workspace_root.join("bin").join(path));
886            paths.push(self.workspace_root.join("app").join(path));
887            paths.push(self.workspace_root.join("source").join(path));
888            paths.push(self.workspace_root.join("sources").join(path));
889            paths.push(self.workspace_root.join("include").join(path));
890            paths.push(self.workspace_root.join("docs").join(path));
891            paths.push(self.workspace_root.join("doc").join(path));
892            paths.push(self.workspace_root.join("examples").join(path));
893            paths.push(self.workspace_root.join("example").join(path));
894            paths.push(self.workspace_root.join("tests").join(path));
895            paths.push(self.workspace_root.join("test").join(path));
896        }
897
898        // Try case-insensitive variants for filenames
899        if !path.contains('/') && !path.contains('\\') {
900            if let Ok(entries) = std::fs::read_dir(&self.workspace_root) {
901                for entry in entries.flatten() {
902                    if let Ok(name) = entry.file_name().into_string() {
903                        if name.to_lowercase() == path.to_lowercase() {
904                            paths.push(entry.path());
905                        }
906                    }
907                }
908            }
909        }
910
911        Ok(paths)
912    }
913}