Skip to main content

steer_workspace/local/
workspace.rs

1use async_trait::async_trait;
2use std::collections::HashMap;
3use std::path::{Path, PathBuf};
4use std::str::FromStr;
5use std::sync::Arc;
6use std::time::Duration;
7use thiserror::Error;
8use tokio::io::{AsyncBufReadExt, AsyncReadExt, BufReader};
9use tokio::sync::{Mutex, RwLock};
10use tokio::task;
11use tokio_util::sync::CancellationToken;
12use tracing::info;
13
14use crate::error::{Result as WorkspaceResult, WorkspaceError};
15use crate::ops::{
16    ApplyEditsRequest, AstGrepRequest, GlobRequest, GrepRequest, ListDirectoryRequest,
17    ReadFileRequest, WorkspaceOpContext, WriteFileRequest,
18};
19use crate::result::{
20    EditResult, FileContentResult, FileEntry, FileListResult, GlobResult, SearchMatch, SearchResult,
21};
22use crate::{CachedEnvironment, EnvironmentInfo, Workspace, WorkspaceMetadata, WorkspaceType};
23
24use ast_grep_core::tree_sitter::StrDoc;
25use ast_grep_core::{AstGrep, Pattern};
26use ast_grep_language::{LanguageExt, SupportLang};
27use grep_regex::RegexMatcherBuilder;
28use grep_searcher::sinks::UTF8;
29use grep_searcher::{BinaryDetection, SearcherBuilder};
30use ignore::WalkBuilder;
31
32/// Local filesystem workspace
33pub struct LocalWorkspace {
34    path: PathBuf,
35    environment_cache: Arc<RwLock<Option<CachedEnvironment>>>,
36    metadata: WorkspaceMetadata,
37}
38
39const MAX_READ_BYTES: usize = 50 * 1024;
40const MAX_LINE_LENGTH: usize = 2000;
41
42static FILE_LOCKS: std::sync::LazyLock<Mutex<HashMap<String, Arc<Mutex<()>>>>> =
43    std::sync::LazyLock::new(|| Mutex::new(HashMap::new()));
44
45async fn get_file_lock(file_path: &str) -> Arc<Mutex<()>> {
46    let mut locks_map_guard = FILE_LOCKS.lock().await;
47    locks_map_guard
48        .entry(file_path.to_string())
49        .or_insert_with(|| Arc::new(Mutex::new(())))
50        .clone()
51}
52
53fn resolve_path(base: &Path, path: &str) -> PathBuf {
54    if Path::new(path).is_absolute() {
55        PathBuf::from(path)
56    } else {
57        base.join(path)
58    }
59}
60
61#[derive(Error, Debug)]
62enum ViewError {
63    #[error("Failed to open file '{path}': {source}")]
64    FileOpen {
65        path: String,
66        #[source]
67        source: std::io::Error,
68    },
69    #[error("Failed to get file metadata for '{path}': {source}")]
70    Metadata {
71        path: String,
72        #[source]
73        source: std::io::Error,
74    },
75    #[error("File read cancelled")]
76    Cancelled,
77    #[error("Error reading file line by line: {source}")]
78    ReadLine {
79        #[source]
80        source: std::io::Error,
81    },
82    #[error("Error reading file: {source}")]
83    Read {
84        #[source]
85        source: std::io::Error,
86    },
87}
88
89#[derive(Error, Debug)]
90enum LsError {
91    #[error("Path is not a directory: {path}")]
92    NotADirectory { path: String },
93    #[error("Operation was cancelled")]
94    Cancelled,
95    #[error("Task join error: {source}")]
96    TaskJoinError {
97        #[from]
98        #[source]
99        source: tokio::task::JoinError,
100    },
101}
102
103async fn view_file_internal(
104    file_path: &Path,
105    offset: Option<u64>,
106    limit: Option<u64>,
107    cancellation_token: &CancellationToken,
108) -> std::result::Result<FileContentResult, ViewError> {
109    let mut file = tokio::fs::File::open(file_path)
110        .await
111        .map_err(|e| ViewError::FileOpen {
112            path: file_path.display().to_string(),
113            source: e,
114        })?;
115
116    let file_size = file
117        .metadata()
118        .await
119        .map_err(|e| ViewError::Metadata {
120            path: file_path.display().to_string(),
121            source: e,
122        })?
123        .len();
124
125    let start_line = offset.unwrap_or(1).max(1) as usize;
126    let line_limit = limit.map(|v| v.max(1) as usize);
127
128    let (content, total_lines, truncated) = if start_line > 1 || line_limit.is_some() {
129        let mut reader = BufReader::new(file);
130        let mut current_line_num = 1usize;
131        let mut lines_read = 0usize;
132        let mut lines = Vec::new();
133
134        loop {
135            if cancellation_token.is_cancelled() {
136                return Err(ViewError::Cancelled);
137            }
138
139            let mut line = String::new();
140            match reader.read_line(&mut line).await {
141                Ok(0) => break,
142                Ok(_) => {
143                    if current_line_num >= start_line {
144                        if line.len() > MAX_LINE_LENGTH {
145                            line.truncate(MAX_LINE_LENGTH);
146                            line.push_str("... [line truncated]");
147                        }
148                        lines.push(line.trim_end().to_string());
149                        lines_read += 1;
150                        if line_limit.is_some_and(|l| lines_read >= l) {
151                            break;
152                        }
153                    }
154                    current_line_num += 1;
155                }
156                Err(e) => return Err(ViewError::ReadLine { source: e }),
157            }
158        }
159
160        let total_lines = lines.len();
161        let truncated = line_limit.is_some_and(|l| lines_read >= l);
162        let numbered_lines: Vec<String> = lines
163            .into_iter()
164            .enumerate()
165            .map(|(i, line)| format!("{:5}\t{}", start_line + i, line))
166            .collect();
167
168        (numbered_lines.join("\n"), total_lines, truncated)
169    } else {
170        let read_size = std::cmp::min(file_size as usize, MAX_READ_BYTES);
171        let mut buffer = vec![0u8; read_size];
172        let mut bytes_read = 0usize;
173
174        while bytes_read < read_size {
175            if cancellation_token.is_cancelled() {
176                return Err(ViewError::Cancelled);
177            }
178            let n = file
179                .read(&mut buffer[bytes_read..])
180                .await
181                .map_err(|e| ViewError::Read { source: e })?;
182            if n == 0 {
183                break;
184            }
185            bytes_read += n;
186        }
187
188        buffer.truncate(bytes_read);
189        let content = String::from_utf8_lossy(&buffer);
190        let lines: Vec<&str> = content.lines().collect();
191        let total_lines = lines.len();
192        let truncated = file_size as usize > MAX_READ_BYTES;
193        let numbered_lines: Vec<String> = lines
194            .into_iter()
195            .enumerate()
196            .map(|(i, line)| format!("{:5}\t{}", i + 1, line))
197            .collect();
198
199        (numbered_lines.join("\n"), total_lines, truncated)
200    };
201
202    Ok(FileContentResult {
203        content,
204        file_path: file_path.display().to_string(),
205        line_count: total_lines,
206        truncated,
207    })
208}
209
210fn list_directory_internal(
211    path_str: &str,
212    ignore_patterns: &[String],
213    cancellation_token: &CancellationToken,
214) -> std::result::Result<FileListResult, LsError> {
215    let path = Path::new(path_str);
216    if !path.is_dir() {
217        return Err(LsError::NotADirectory {
218            path: path_str.to_string(),
219        });
220    }
221
222    if cancellation_token.is_cancelled() {
223        return Err(LsError::Cancelled);
224    }
225
226    let mut walk_builder = WalkBuilder::new(path);
227    walk_builder.max_depth(Some(1));
228    walk_builder.git_ignore(true);
229    walk_builder.ignore(true);
230    walk_builder.hidden(false);
231
232    for pattern in ignore_patterns {
233        walk_builder.add_ignore(pattern);
234    }
235
236    let walker = walk_builder.build();
237    let mut entries = Vec::new();
238
239    for result in walker.skip(1) {
240        if cancellation_token.is_cancelled() {
241            return Err(LsError::Cancelled);
242        }
243
244        match result {
245            Ok(entry) => {
246                let file_path = entry.path();
247                let file_name = file_path.file_name().unwrap_or_default().to_string_lossy();
248                let metadata = file_path.metadata().ok();
249                let size = if file_path.is_dir() {
250                    None
251                } else {
252                    metadata.as_ref().map(|m| m.len())
253                };
254
255                entries.push(FileEntry {
256                    path: file_name.to_string(),
257                    is_directory: file_path.is_dir(),
258                    size,
259                    permissions: None,
260                });
261            }
262            Err(e) => {
263                tracing::warn!("Error accessing entry: {e}");
264            }
265        }
266    }
267
268    entries.sort_by(|a, b| match (a.is_directory, b.is_directory) {
269        (true, false) => std::cmp::Ordering::Less,
270        (false, true) => std::cmp::Ordering::Greater,
271        _ => a.path.cmp(&b.path),
272    });
273
274    Ok(FileListResult {
275        entries,
276        base_path: path_str.to_string(),
277    })
278}
279
280fn grep_search_internal(
281    pattern: &str,
282    include: Option<&str>,
283    base_path: &Path,
284    cancellation_token: &CancellationToken,
285) -> std::result::Result<SearchResult, String> {
286    if !base_path.exists() {
287        return Err(format!("Path does not exist: {}", base_path.display()));
288    }
289
290    let matcher = if let Ok(m) = RegexMatcherBuilder::new()
291        .line_terminator(Some(b'\n'))
292        .build(pattern)
293    {
294        m
295    } else {
296        let escaped = regex::escape(pattern);
297        RegexMatcherBuilder::new()
298            .line_terminator(Some(b'\n'))
299            .build(&escaped)
300            .map_err(|e| format!("Failed to create matcher: {e}"))?
301    };
302
303    let mut searcher = SearcherBuilder::new()
304        .binary_detection(BinaryDetection::quit(b'\x00'))
305        .line_number(true)
306        .build();
307
308    let mut walker = WalkBuilder::new(base_path);
309    walker.hidden(false);
310    walker.git_ignore(true);
311    walker.git_global(true);
312    walker.git_exclude(true);
313
314    let include_pattern = include
315        .map(|p| glob::Pattern::new(p).map_err(|e| format!("Invalid glob pattern: {e}")))
316        .transpose()?;
317
318    let mut all_matches = Vec::new();
319    let mut files_searched = 0usize;
320
321    for result in walker.build() {
322        if cancellation_token.is_cancelled() {
323            return Ok(SearchResult {
324                matches: all_matches,
325                total_files_searched: files_searched,
326                search_completed: false,
327            });
328        }
329
330        let entry = match result {
331            Ok(e) => e,
332            Err(_) => continue,
333        };
334
335        let path = entry.path();
336        if !path.is_file() {
337            continue;
338        }
339
340        if let Some(ref pattern) = include_pattern
341            && !path_matches_glob(path, pattern, base_path)
342        {
343            continue;
344        }
345
346        files_searched += 1;
347
348        let mut matches_in_file = Vec::new();
349        let search_result = searcher.search_path(
350            &matcher,
351            path,
352            UTF8(|line_num, line| {
353                let display_path = match path.canonicalize() {
354                    Ok(canonical) => canonical.display().to_string(),
355                    Err(_) => path.display().to_string(),
356                };
357                matches_in_file.push(SearchMatch {
358                    file_path: display_path,
359                    line_number: line_num as usize,
360                    line_content: line.trim_end().to_string(),
361                    column_range: None,
362                });
363                Ok(true)
364            }),
365        );
366
367        if let Err(e) = search_result
368            && e.kind() == std::io::ErrorKind::InvalidData
369        {
370            continue;
371        }
372
373        all_matches.extend(matches_in_file);
374    }
375
376    if !all_matches.is_empty() {
377        let mut file_groups: HashMap<String, Vec<SearchMatch>> = HashMap::new();
378        for match_item in all_matches {
379            file_groups
380                .entry(match_item.file_path.clone())
381                .or_default()
382                .push(match_item);
383        }
384
385        let mut sorted_files: Vec<(String, std::time::SystemTime)> = Vec::new();
386        for file_path in file_groups.keys() {
387            if cancellation_token.is_cancelled() {
388                return Ok(SearchResult {
389                    matches: Vec::new(),
390                    total_files_searched: files_searched,
391                    search_completed: false,
392                });
393            }
394
395            let mtime = Path::new(file_path)
396                .metadata()
397                .and_then(|m| m.modified())
398                .unwrap_or(std::time::SystemTime::UNIX_EPOCH);
399            sorted_files.push((file_path.clone(), mtime));
400        }
401        sorted_files.sort_by(|a, b| b.1.cmp(&a.1));
402
403        let mut sorted_matches = Vec::new();
404        for (file_path, _) in sorted_files {
405            if let Some(file_matches) = file_groups.remove(&file_path) {
406                sorted_matches.extend(file_matches);
407            }
408        }
409        all_matches = sorted_matches;
410    }
411
412    Ok(SearchResult {
413        matches: all_matches,
414        total_files_searched: files_searched,
415        search_completed: true,
416    })
417}
418
419fn astgrep_search_internal(
420    pattern: &str,
421    lang: Option<&str>,
422    include: Option<&str>,
423    exclude: Option<&str>,
424    base_path: &Path,
425    cancellation_token: &CancellationToken,
426) -> std::result::Result<SearchResult, String> {
427    if !base_path.exists() {
428        return Err(format!("Path does not exist: {}", base_path.display()));
429    }
430
431    let mut walker = WalkBuilder::new(base_path);
432    walker.hidden(false);
433    walker.git_ignore(true);
434    walker.git_global(true);
435    walker.git_exclude(true);
436
437    let include_pattern = include
438        .map(|p| glob::Pattern::new(p).map_err(|e| format!("Invalid include glob pattern: {e}")))
439        .transpose()?;
440
441    let exclude_pattern = exclude
442        .map(|p| glob::Pattern::new(p).map_err(|e| format!("Invalid exclude glob pattern: {e}")))
443        .transpose()?;
444
445    let mut all_matches = Vec::new();
446    let mut files_searched = 0usize;
447
448    for result in walker.build() {
449        if cancellation_token.is_cancelled() {
450            return Ok(SearchResult {
451                matches: all_matches,
452                total_files_searched: files_searched,
453                search_completed: false,
454            });
455        }
456
457        let entry = match result {
458            Ok(e) => e,
459            Err(_) => continue,
460        };
461
462        let path = entry.path();
463        if !path.is_file() {
464            continue;
465        }
466
467        if let Some(ref pattern) = include_pattern
468            && !path_matches_glob(path, pattern, base_path)
469        {
470            continue;
471        }
472
473        if let Some(ref pattern) = exclude_pattern
474            && path_matches_glob(path, pattern, base_path)
475        {
476            continue;
477        }
478
479        let detected_lang = if let Some(l) = lang {
480            match SupportLang::from_str(l) {
481                Ok(lang) => Some(lang),
482                Err(_) => continue,
483            }
484        } else {
485            SupportLang::from_extension(path).or_else(|| {
486                path.extension()
487                    .and_then(|ext| ext.to_str())
488                    .and_then(|ext| match ext {
489                        "jsx" => Some(SupportLang::JavaScript),
490                        "mjs" => Some(SupportLang::JavaScript),
491                        _ => None,
492                    })
493            })
494        };
495
496        let Some(language) = detected_lang else {
497            continue;
498        };
499
500        files_searched += 1;
501        let content = match std::fs::read_to_string(path) {
502            Ok(c) => c,
503            Err(_) => continue,
504        };
505
506        let ast_grep = language.ast_grep(&content);
507        let pattern_matcher = match Pattern::try_new(pattern, language) {
508            Ok(p) => p,
509            Err(e) => return Err(format!("Invalid pattern: {e}")),
510        };
511
512        let relative_path = path.strip_prefix(base_path).unwrap_or(path);
513        let file_matches = find_matches(&ast_grep, &pattern_matcher, relative_path, &content);
514
515        for m in file_matches {
516            all_matches.push(SearchMatch {
517                file_path: m.file,
518                line_number: m.line,
519                line_content: m.context.trim().to_string(),
520                column_range: Some((m.column, m.column + m.matched_code.len())),
521            });
522        }
523    }
524
525    all_matches.sort_by(|a, b| {
526        a.file_path
527            .cmp(&b.file_path)
528            .then(a.line_number.cmp(&b.line_number))
529    });
530
531    Ok(SearchResult {
532        matches: all_matches,
533        total_files_searched: files_searched,
534        search_completed: true,
535    })
536}
537
538#[derive(Debug)]
539struct AstGrepMatch {
540    file: String,
541    line: usize,
542    column: usize,
543    matched_code: String,
544    context: String,
545}
546
547fn find_matches(
548    ast_grep: &AstGrep<StrDoc<SupportLang>>,
549    pattern: &Pattern,
550    path: &Path,
551    content: &str,
552) -> Vec<AstGrepMatch> {
553    let root = ast_grep.root();
554    let matches = root.find_all(pattern);
555
556    let mut results = Vec::new();
557    for node_match in matches {
558        let node = node_match.get_node();
559        let range = node.range();
560        let start_pos = node.start_pos();
561        let matched_code = node.text();
562
563        let line_start = content[..range.start].rfind('\n').map_or(0, |i| i + 1);
564        let line_end = content[range.end..]
565            .find('\n')
566            .map_or(content.len(), |i| range.end + i);
567        let context = &content[line_start..line_end];
568
569        results.push(AstGrepMatch {
570            file: path.display().to_string(),
571            line: start_pos.line() + 1,
572            column: start_pos.column(node) + 1,
573            matched_code: matched_code.to_string(),
574            context: context.to_string(),
575        });
576    }
577
578    results
579}
580
581fn path_matches_glob(path: &Path, pattern: &glob::Pattern, base_path: &Path) -> bool {
582    if pattern.matches_path(path) {
583        return true;
584    }
585
586    if let Ok(relative_path) = path.strip_prefix(base_path)
587        && pattern.matches_path(relative_path)
588    {
589        return true;
590    }
591
592    if let Some(filename) = path.file_name()
593        && pattern.matches(&filename.to_string_lossy())
594    {
595        return true;
596    }
597
598    false
599}
600
601trait LanguageHelpers {
602    fn from_extension(path: &Path) -> Option<SupportLang>;
603}
604
605impl LanguageHelpers for SupportLang {
606    fn from_extension(path: &Path) -> Option<SupportLang> {
607        ast_grep_language::Language::from_path(path)
608    }
609}
610
611async fn perform_edit_operations(
612    file_path: &Path,
613    operations: &[crate::ops::EditOperation],
614    token: Option<&CancellationToken>,
615) -> WorkspaceResult<(String, usize, bool)> {
616    if token.is_some_and(|t| t.is_cancelled()) {
617        return Err(WorkspaceError::ToolExecution(
618            "Operation cancelled".to_string(),
619        ));
620    }
621
622    let mut current_content: String;
623    let mut file_created_this_op = false;
624
625    match tokio::fs::read_to_string(file_path).await {
626        Ok(content_from_file) => {
627            current_content = content_from_file;
628        }
629        Err(e) if e.kind() == std::io::ErrorKind::NotFound => {
630            if operations.is_empty() {
631                return Err(WorkspaceError::ToolExecution(format!(
632                    "File {} does not exist and no operations provided to create it.",
633                    file_path.display()
634                )));
635            }
636            let first_op = &operations[0];
637            if first_op.old_string.is_empty() {
638                if let Some(parent) = file_path.parent()
639                    && !tokio::fs::metadata(parent)
640                        .await
641                        .map(|m| m.is_dir())
642                        .unwrap_or(false)
643                {
644                    if token.is_some_and(|t| t.is_cancelled()) {
645                        return Err(WorkspaceError::ToolExecution(
646                            "Operation cancelled".to_string(),
647                        ));
648                    }
649                    tokio::fs::create_dir_all(parent).await.map_err(|e| {
650                        WorkspaceError::Io(format!(
651                            "Failed to create directory {}: {}",
652                            parent.display(),
653                            e
654                        ))
655                    })?;
656                }
657                current_content = first_op.new_string.clone();
658                file_created_this_op = true;
659            } else {
660                return Err(WorkspaceError::Io(format!(
661                    "File {} not found, and the first/only operation's old_string is not empty (required for creation).",
662                    file_path.display()
663                )));
664            }
665        }
666        Err(e) => {
667            return Err(WorkspaceError::Io(format!(
668                "Failed to read file {}: {e}",
669                file_path.display()
670            )));
671        }
672    }
673
674    if operations.is_empty() {
675        return Ok((current_content, 0, false));
676    }
677
678    let mut edits_applied_count = 0usize;
679    for (index, edit_op) in operations.iter().enumerate() {
680        if token.is_some_and(|t| t.is_cancelled()) {
681            return Err(WorkspaceError::ToolExecution(
682                "Operation cancelled".to_string(),
683            ));
684        }
685
686        if edit_op.old_string.is_empty() {
687            if index == 0 && file_created_this_op {
688                // creation step
689            } else if index == 0 && operations.len() == 1 {
690                current_content = edit_op.new_string.clone();
691                if !file_created_this_op {
692                    file_created_this_op = true;
693                }
694            } else {
695                return Err(WorkspaceError::ToolExecution(format!(
696                    "Edit #{} for file {} has an empty old_string. This is only allowed for the first operation if the file is being created or for a single operation to overwrite the file.",
697                    index + 1,
698                    file_path.display()
699                )));
700            }
701        } else {
702            let occurrences = current_content.matches(&edit_op.old_string).count();
703            if occurrences == 0 {
704                return Err(WorkspaceError::ToolExecution(format!(
705                    "For edit #{}, string not found in file {} (after {} previous successful edits). String to find (first 50 chars): '{}'",
706                    index + 1,
707                    file_path.display(),
708                    edits_applied_count,
709                    edit_op.old_string.chars().take(50).collect::<String>()
710                )));
711            }
712            if occurrences > 1 {
713                return Err(WorkspaceError::ToolExecution(format!(
714                    "For edit #{}, found {} occurrences of string in file {} (after {} previous successful edits). String to find (first 50 chars): '{}'. Please provide more context.",
715                    index + 1,
716                    occurrences,
717                    file_path.display(),
718                    edits_applied_count,
719                    edit_op.old_string.chars().take(50).collect::<String>()
720                )));
721            }
722            current_content = current_content.replace(&edit_op.old_string, &edit_op.new_string);
723        }
724        edits_applied_count += 1;
725    }
726
727    Ok((current_content, edits_applied_count, file_created_this_op))
728}
729
730impl LocalWorkspace {
731    pub async fn with_path(path: PathBuf) -> WorkspaceResult<Self> {
732        let metadata = WorkspaceMetadata {
733            id: format!("local:{}", path.display()),
734            workspace_type: WorkspaceType::Local,
735            location: path.display().to_string(),
736        };
737
738        Ok(Self {
739            path,
740            environment_cache: Arc::new(RwLock::new(None)),
741            metadata,
742        })
743    }
744
745    /// Collect environment information for the local workspace
746    async fn collect_environment(&self) -> WorkspaceResult<EnvironmentInfo> {
747        EnvironmentInfo::collect_for_path(&self.path)
748    }
749}
750
751impl std::fmt::Debug for LocalWorkspace {
752    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
753        f.debug_struct("LocalWorkspace")
754            .field("path", &self.path)
755            .field("metadata", &self.metadata)
756            .finish_non_exhaustive()
757    }
758}
759
760#[async_trait]
761impl Workspace for LocalWorkspace {
762    async fn environment(&self) -> WorkspaceResult<EnvironmentInfo> {
763        let mut cache = self.environment_cache.write().await;
764
765        // Check if we have valid cached data
766        if let Some(cached) = cache.as_ref()
767            && !cached.is_expired()
768        {
769            return Ok(cached.info.clone());
770        }
771
772        // Collect fresh environment info
773        let env_info = self.collect_environment().await?;
774
775        // Cache it with 5 minute TTL
776        *cache = Some(CachedEnvironment::new(
777            env_info.clone(),
778            Duration::from_secs(300), // 5 minutes
779        ));
780
781        Ok(env_info)
782    }
783
784    fn metadata(&self) -> WorkspaceMetadata {
785        self.metadata.clone()
786    }
787
788    async fn invalidate_environment_cache(&self) {
789        let mut cache = self.environment_cache.write().await;
790        *cache = None;
791    }
792
793    async fn list_files(
794        &self,
795        query: Option<&str>,
796        max_results: Option<usize>,
797    ) -> WorkspaceResult<Vec<String>> {
798        use crate::utils::FileListingUtils;
799
800        info!(target: "workspace.list_files", "Listing files in workspace: {:?}", self.path);
801
802        FileListingUtils::list_files(&self.path, query, max_results).map_err(WorkspaceError::from)
803    }
804
805    fn working_directory(&self) -> &std::path::Path {
806        &self.path
807    }
808
809    async fn read_file(
810        &self,
811        request: ReadFileRequest,
812        ctx: &WorkspaceOpContext,
813    ) -> WorkspaceResult<FileContentResult> {
814        let abs_path = resolve_path(&self.path, &request.file_path);
815        view_file_internal(
816            &abs_path,
817            request.offset,
818            request.limit,
819            &ctx.cancellation_token,
820        )
821        .await
822        .map_err(|e| WorkspaceError::Io(e.to_string()))
823    }
824
825    async fn list_directory(
826        &self,
827        request: ListDirectoryRequest,
828        ctx: &WorkspaceOpContext,
829    ) -> WorkspaceResult<FileListResult> {
830        let target_path = resolve_path(&self.path, &request.path);
831        let target_path_str = target_path.to_string_lossy().to_string();
832        let ignore_patterns = request.ignore.unwrap_or_default();
833        let cancellation_token = ctx.cancellation_token.clone();
834
835        let result = task::spawn_blocking(move || {
836            list_directory_internal(&target_path_str, &ignore_patterns, &cancellation_token)
837        })
838        .await;
839
840        match result {
841            Ok(listing_result) => listing_result.map_err(|e| WorkspaceError::Io(e.to_string())),
842            Err(join_error) => Err(WorkspaceError::Io(format!("Task join error: {join_error}"))),
843        }
844    }
845
846    async fn glob(
847        &self,
848        request: GlobRequest,
849        ctx: &WorkspaceOpContext,
850    ) -> WorkspaceResult<GlobResult> {
851        if ctx.cancellation_token.is_cancelled() {
852            return Err(WorkspaceError::ToolExecution(
853                "Operation cancelled".to_string(),
854            ));
855        }
856
857        let search_path = request.path.as_deref().unwrap_or(".");
858        let base_path = resolve_path(&self.path, search_path);
859
860        let glob_pattern = format!("{}/{}", base_path.display(), request.pattern);
861
862        let mut results = Vec::new();
863        match glob::glob(&glob_pattern) {
864            Ok(paths) => {
865                for entry in paths {
866                    if ctx.cancellation_token.is_cancelled() {
867                        return Err(WorkspaceError::ToolExecution(
868                            "Operation cancelled".to_string(),
869                        ));
870                    }
871
872                    match entry {
873                        Ok(path) => results.push(path.display().to_string()),
874                        Err(e) => {
875                            return Err(WorkspaceError::ToolExecution(format!(
876                                "Error matching glob pattern '{glob_pattern}': {e}"
877                            )));
878                        }
879                    }
880                }
881            }
882            Err(e) => {
883                return Err(WorkspaceError::ToolExecution(format!(
884                    "Invalid glob pattern '{glob_pattern}': {e}"
885                )));
886            }
887        }
888
889        results.sort();
890        Ok(GlobResult {
891            matches: results,
892            pattern: request.pattern,
893        })
894    }
895
896    async fn grep(
897        &self,
898        request: GrepRequest,
899        ctx: &WorkspaceOpContext,
900    ) -> WorkspaceResult<SearchResult> {
901        let search_path = request.path.as_deref().unwrap_or(".");
902        let base_path = resolve_path(&self.path, search_path);
903
904        let pattern = request.pattern.clone();
905        let include = request.include.clone();
906        let cancellation_token = ctx.cancellation_token.clone();
907
908        let result = task::spawn_blocking(move || {
909            grep_search_internal(
910                &pattern,
911                include.as_deref(),
912                &base_path,
913                &cancellation_token,
914            )
915        })
916        .await;
917
918        match result {
919            Ok(search_result) => search_result.map_err(WorkspaceError::ToolExecution),
920            Err(e) => Err(WorkspaceError::ToolExecution(format!(
921                "Task join error: {e}"
922            ))),
923        }
924    }
925
926    async fn astgrep(
927        &self,
928        request: AstGrepRequest,
929        ctx: &WorkspaceOpContext,
930    ) -> WorkspaceResult<SearchResult> {
931        let search_path = request.path.as_deref().unwrap_or(".");
932        let base_path = resolve_path(&self.path, search_path);
933
934        let pattern = request.pattern.clone();
935        let lang = request.lang.clone();
936        let include = request.include.clone();
937        let exclude = request.exclude.clone();
938        let cancellation_token = ctx.cancellation_token.clone();
939
940        let result = task::spawn_blocking(move || {
941            astgrep_search_internal(
942                &pattern,
943                lang.as_deref(),
944                include.as_deref(),
945                exclude.as_deref(),
946                &base_path,
947                &cancellation_token,
948            )
949        })
950        .await;
951
952        match result {
953            Ok(search_result) => search_result.map_err(WorkspaceError::ToolExecution),
954            Err(e) => Err(WorkspaceError::ToolExecution(format!(
955                "Task join error: {e}"
956            ))),
957        }
958    }
959
960    async fn apply_edits(
961        &self,
962        request: ApplyEditsRequest,
963        ctx: &WorkspaceOpContext,
964    ) -> WorkspaceResult<EditResult> {
965        let abs_path = resolve_path(&self.path, &request.file_path);
966        let abs_path_str = abs_path.display().to_string();
967        let file_lock = get_file_lock(&abs_path_str).await;
968        let _lock_guard = file_lock.lock().await;
969
970        let (final_content, num_ops, created_or_overwritten) =
971            perform_edit_operations(&abs_path, &request.edits, Some(&ctx.cancellation_token))
972                .await?;
973
974        if created_or_overwritten || num_ops > 0 {
975            if ctx.cancellation_token.is_cancelled() {
976                return Err(WorkspaceError::ToolExecution(
977                    "Operation cancelled".to_string(),
978                ));
979            }
980            tokio::fs::write(&abs_path, &final_content)
981                .await
982                .map_err(|e| {
983                    WorkspaceError::Io(format!(
984                        "Failed to write file {}: {}",
985                        abs_path.display(),
986                        e
987                    ))
988                })?;
989
990            Ok(EditResult {
991                file_path: abs_path_str,
992                changes_made: num_ops,
993                file_created: created_or_overwritten,
994                old_content: None,
995                new_content: Some(final_content),
996            })
997        } else {
998            Ok(EditResult {
999                file_path: abs_path_str,
1000                changes_made: 0,
1001                file_created: false,
1002                old_content: None,
1003                new_content: None,
1004            })
1005        }
1006    }
1007
1008    async fn write_file(
1009        &self,
1010        request: WriteFileRequest,
1011        ctx: &WorkspaceOpContext,
1012    ) -> WorkspaceResult<EditResult> {
1013        let abs_path = resolve_path(&self.path, &request.file_path);
1014        let abs_path_str = abs_path.display().to_string();
1015        let file_lock = get_file_lock(&abs_path_str).await;
1016        let _lock_guard = file_lock.lock().await;
1017
1018        if ctx.cancellation_token.is_cancelled() {
1019            return Err(WorkspaceError::ToolExecution(
1020                "Operation cancelled".to_string(),
1021            ));
1022        }
1023
1024        if let Some(parent) = abs_path.parent()
1025            && !parent.exists()
1026        {
1027            tokio::fs::create_dir_all(parent).await.map_err(|e| {
1028                WorkspaceError::Io(format!(
1029                    "Failed to create parent directory {}: {e}",
1030                    parent.display()
1031                ))
1032            })?;
1033        }
1034
1035        let file_existed = abs_path.exists();
1036        tokio::fs::write(&abs_path, &request.content)
1037            .await
1038            .map_err(|e| {
1039                WorkspaceError::Io(format!("Failed to write file {}: {e}", abs_path.display()))
1040            })?;
1041
1042        Ok(EditResult {
1043            file_path: abs_path_str,
1044            changes_made: 1,
1045            file_created: !file_existed,
1046            old_content: None,
1047            new_content: Some(request.content),
1048        })
1049    }
1050}
1051
1052#[cfg(test)]
1053mod tests {
1054    use super::*;
1055    use tempfile::tempdir;
1056
1057    #[tokio::test]
1058    async fn test_local_workspace_creation() {
1059        let temp_dir = tempdir().unwrap();
1060        let workspace = LocalWorkspace::with_path(temp_dir.path().to_path_buf())
1061            .await
1062            .unwrap();
1063        assert!(matches!(
1064            workspace.metadata().workspace_type,
1065            WorkspaceType::Local
1066        ));
1067    }
1068
1069    #[tokio::test]
1070    async fn test_local_workspace_with_path() {
1071        let temp_dir = tempdir().unwrap();
1072        let workspace = LocalWorkspace::with_path(temp_dir.path().to_path_buf())
1073            .await
1074            .unwrap();
1075
1076        assert!(matches!(
1077            workspace.metadata().workspace_type,
1078            WorkspaceType::Local
1079        ));
1080        assert_eq!(
1081            workspace.metadata().location,
1082            temp_dir.path().display().to_string()
1083        );
1084    }
1085
1086    #[tokio::test]
1087    async fn test_environment_caching() {
1088        let temp_dir = tempdir().unwrap();
1089        let workspace = LocalWorkspace::with_path(temp_dir.path().to_path_buf())
1090            .await
1091            .unwrap();
1092
1093        // First call should collect fresh data
1094        let env1 = workspace.environment().await.unwrap();
1095
1096        // Second call should return cached data
1097        let env2 = workspace.environment().await.unwrap();
1098
1099        // Should be identical
1100        assert_eq!(env1.working_directory, env2.working_directory);
1101        assert_eq!(env1.vcs.is_some(), env2.vcs.is_some());
1102        assert_eq!(env1.platform, env2.platform);
1103    }
1104
1105    #[tokio::test]
1106    async fn test_cache_invalidation() {
1107        let temp_dir = tempdir().unwrap();
1108        let workspace = LocalWorkspace::with_path(temp_dir.path().to_path_buf())
1109            .await
1110            .unwrap();
1111
1112        // Get initial environment
1113        let _ = workspace.environment().await.unwrap();
1114
1115        // Invalidate cache
1116        workspace.invalidate_environment_cache().await;
1117
1118        // Should work fine and fetch fresh data
1119        let env = workspace.environment().await.unwrap();
1120        assert!(!env.working_directory.as_os_str().is_empty());
1121    }
1122
1123    #[tokio::test]
1124    async fn test_environment_collection() {
1125        let temp_dir = tempdir().unwrap();
1126        let workspace = LocalWorkspace::with_path(temp_dir.path().to_path_buf())
1127            .await
1128            .unwrap();
1129
1130        let env = workspace.environment().await.unwrap();
1131
1132        // Verify basic environment info
1133        let expected_path = temp_dir
1134            .path()
1135            .canonicalize()
1136            .unwrap_or_else(|_| temp_dir.path().to_path_buf());
1137
1138        // Canonicalize both paths for comparison on macOS
1139        let actual_canonical = env
1140            .working_directory
1141            .canonicalize()
1142            .unwrap_or(env.working_directory.clone());
1143        let expected_canonical = expected_path
1144            .canonicalize()
1145            .unwrap_or(expected_path.clone());
1146
1147        assert_eq!(actual_canonical, expected_canonical);
1148    }
1149
1150    #[tokio::test]
1151    async fn test_list_files() {
1152        let temp_dir = tempdir().unwrap();
1153        let workspace = LocalWorkspace::with_path(temp_dir.path().to_path_buf())
1154            .await
1155            .unwrap();
1156
1157        // Create some test files
1158        std::fs::write(temp_dir.path().join("test.rs"), "test").unwrap();
1159        std::fs::write(temp_dir.path().join("main.rs"), "main").unwrap();
1160        std::fs::create_dir(temp_dir.path().join("src")).unwrap();
1161        std::fs::write(temp_dir.path().join("src/lib.rs"), "lib").unwrap();
1162
1163        // List all files
1164        let files = workspace.list_files(None, None).await.unwrap();
1165        assert_eq!(files.len(), 4); // 3 files + 1 directory
1166        assert!(files.contains(&"test.rs".to_string()));
1167        assert!(files.contains(&"main.rs".to_string()));
1168        assert!(files.contains(&"src/".to_string())); // Directory with trailing slash
1169        assert!(files.contains(&"src/lib.rs".to_string()));
1170
1171        // Test with query
1172        let files = workspace.list_files(Some("test"), None).await.unwrap();
1173        assert_eq!(files.len(), 1);
1174        assert_eq!(files[0], "test.rs");
1175
1176        // Test with max_results
1177        let files = workspace.list_files(None, Some(2)).await.unwrap();
1178        assert_eq!(files.len(), 2);
1179    }
1180
1181    #[tokio::test]
1182    async fn test_list_files_includes_dotfiles() {
1183        let temp_dir = tempdir().unwrap();
1184        let workspace = LocalWorkspace::with_path(temp_dir.path().to_path_buf())
1185            .await
1186            .unwrap();
1187
1188        // Create a dotfile
1189        std::fs::write(temp_dir.path().join(".gitignore"), "target/").unwrap();
1190
1191        let files = workspace.list_files(None, None).await.unwrap();
1192        assert!(files.contains(&".gitignore".to_string()));
1193    }
1194
1195    #[tokio::test]
1196    async fn test_working_directory() {
1197        let temp_dir = tempdir().unwrap();
1198        let workspace = LocalWorkspace::with_path(temp_dir.path().to_path_buf())
1199            .await
1200            .unwrap();
1201
1202        assert_eq!(workspace.working_directory(), temp_dir.path());
1203    }
1204}