Skip to main content

steer_workspace/local/
workspace.rs

1use async_trait::async_trait;
2use std::collections::{BTreeMap, HashMap};
3use std::path::{Path, PathBuf};
4use std::str::FromStr;
5use std::sync::Arc;
6use std::time::Duration;
7use thiserror::Error;
8use tokio::io::{AsyncBufReadExt, AsyncReadExt, BufReader};
9use tokio::sync::{Mutex, RwLock};
10use tokio::task;
11use tokio_util::sync::CancellationToken;
12use tracing::info;
13
14use crate::error::{Result as WorkspaceResult, WorkspaceError};
15use crate::ops::{
16    ApplyEditsRequest, AstGrepRequest, GlobRequest, GrepRequest, ListDirectoryRequest,
17    ReadFileRequest, WorkspaceOpContext, WriteFileRequest,
18};
19use crate::result::{
20    EditResult, FileContentResult, FileEntry, FileListResult, GlobResult, SearchMatch, SearchResult,
21};
22use crate::{CachedEnvironment, EnvironmentInfo, Workspace, WorkspaceMetadata, WorkspaceType};
23
24use ast_grep_core::tree_sitter::StrDoc;
25use ast_grep_core::{AstGrep, Pattern};
26use ast_grep_language::{LanguageExt, SupportLang};
27use grep_regex::RegexMatcherBuilder;
28use grep_searcher::sinks::UTF8;
29use grep_searcher::{BinaryDetection, SearcherBuilder, SinkError};
30use ignore::WalkBuilder;
31
32/// Local filesystem workspace
33pub struct LocalWorkspace {
34    path: PathBuf,
35    environment_cache: Arc<RwLock<Option<CachedEnvironment>>>,
36    metadata: WorkspaceMetadata,
37}
38
39const MAX_READ_BYTES: usize = 50 * 1024;
40const MAX_LINE_LENGTH: usize = 2000;
41
42static FILE_LOCKS: std::sync::LazyLock<Mutex<HashMap<String, Arc<Mutex<()>>>>> =
43    std::sync::LazyLock::new(|| Mutex::new(HashMap::new()));
44
45async fn get_file_lock(file_path: &str) -> Arc<Mutex<()>> {
46    let mut locks_map_guard = FILE_LOCKS.lock().await;
47    locks_map_guard
48        .entry(file_path.to_string())
49        .or_insert_with(|| Arc::new(Mutex::new(())))
50        .clone()
51}
52
53fn resolve_path(base: &Path, path: &str) -> PathBuf {
54    if Path::new(path).is_absolute() {
55        PathBuf::from(path)
56    } else {
57        base.join(path)
58    }
59}
60
61#[derive(Error, Debug)]
62enum ViewError {
63    #[error("Failed to open file '{path}': {source}")]
64    FileOpen {
65        path: String,
66        #[source]
67        source: std::io::Error,
68    },
69    #[error("Failed to get file metadata for '{path}': {source}")]
70    Metadata {
71        path: String,
72        #[source]
73        source: std::io::Error,
74    },
75    #[error("File read cancelled")]
76    Cancelled,
77    #[error("Error reading file line by line: {source}")]
78    ReadLine {
79        #[source]
80        source: std::io::Error,
81    },
82    #[error("Error reading file: {source}")]
83    Read {
84        #[source]
85        source: std::io::Error,
86    },
87}
88
89#[derive(Error, Debug)]
90enum LsError {
91    #[error("Path is not a directory: {path}")]
92    NotADirectory { path: String },
93    #[error("Operation was cancelled")]
94    Cancelled,
95    #[error("Task join error: {source}")]
96    TaskJoinError {
97        #[from]
98        #[source]
99        source: tokio::task::JoinError,
100    },
101}
102
103async fn view_file_internal(
104    file_path: &Path,
105    offset: Option<u64>,
106    limit: Option<u64>,
107    cancellation_token: &CancellationToken,
108) -> std::result::Result<FileContentResult, ViewError> {
109    let mut file = tokio::fs::File::open(file_path)
110        .await
111        .map_err(|e| ViewError::FileOpen {
112            path: file_path.display().to_string(),
113            source: e,
114        })?;
115
116    let file_size = file
117        .metadata()
118        .await
119        .map_err(|e| ViewError::Metadata {
120            path: file_path.display().to_string(),
121            source: e,
122        })?
123        .len();
124
125    let start_line = offset.unwrap_or(1).max(1) as usize;
126    let line_limit = limit.map(|v| v.max(1) as usize);
127
128    let (content, total_lines, truncated) = if start_line > 1 || line_limit.is_some() {
129        let mut reader = BufReader::new(file);
130        let mut current_line_num = 1usize;
131        let mut lines_read = 0usize;
132        let mut lines = Vec::new();
133
134        loop {
135            if cancellation_token.is_cancelled() {
136                return Err(ViewError::Cancelled);
137            }
138
139            let mut line = String::new();
140            match reader.read_line(&mut line).await {
141                Ok(0) => break,
142                Ok(_) => {
143                    if current_line_num >= start_line {
144                        if line.len() > MAX_LINE_LENGTH {
145                            line.truncate(MAX_LINE_LENGTH);
146                            line.push_str("... [line truncated]");
147                        }
148                        lines.push(line.trim_end().to_string());
149                        lines_read += 1;
150                        if line_limit.is_some_and(|l| lines_read >= l) {
151                            break;
152                        }
153                    }
154                    current_line_num += 1;
155                }
156                Err(e) => return Err(ViewError::ReadLine { source: e }),
157            }
158        }
159
160        let total_lines = lines.len();
161        let truncated = line_limit.is_some_and(|l| lines_read >= l);
162        let numbered_lines: Vec<String> = lines
163            .into_iter()
164            .enumerate()
165            .map(|(i, line)| format!("{:5}\t{}", start_line + i, line))
166            .collect();
167
168        (numbered_lines.join("\n"), total_lines, truncated)
169    } else {
170        let read_size = std::cmp::min(file_size as usize, MAX_READ_BYTES);
171        let mut buffer = vec![0u8; read_size];
172        let mut bytes_read = 0usize;
173
174        while bytes_read < read_size {
175            if cancellation_token.is_cancelled() {
176                return Err(ViewError::Cancelled);
177            }
178            let n = file
179                .read(&mut buffer[bytes_read..])
180                .await
181                .map_err(|e| ViewError::Read { source: e })?;
182            if n == 0 {
183                break;
184            }
185            bytes_read += n;
186        }
187
188        buffer.truncate(bytes_read);
189        let content = String::from_utf8_lossy(&buffer);
190        let lines: Vec<&str> = content.lines().collect();
191        let total_lines = lines.len();
192        let truncated = file_size as usize > MAX_READ_BYTES;
193        let numbered_lines: Vec<String> = lines
194            .into_iter()
195            .enumerate()
196            .map(|(i, line)| format!("{:5}\t{}", i + 1, line))
197            .collect();
198
199        (numbered_lines.join("\n"), total_lines, truncated)
200    };
201
202    Ok(FileContentResult {
203        content,
204        file_path: file_path.display().to_string(),
205        line_count: total_lines,
206        truncated,
207    })
208}
209
210fn list_directory_internal(
211    path_str: &str,
212    ignore_patterns: &[String],
213    cancellation_token: &CancellationToken,
214) -> std::result::Result<FileListResult, LsError> {
215    let path = Path::new(path_str);
216    if !path.is_dir() {
217        return Err(LsError::NotADirectory {
218            path: path_str.to_string(),
219        });
220    }
221
222    if cancellation_token.is_cancelled() {
223        return Err(LsError::Cancelled);
224    }
225
226    let mut walk_builder = WalkBuilder::new(path);
227    walk_builder.max_depth(Some(1));
228    walk_builder.git_ignore(true);
229    walk_builder.ignore(true);
230    walk_builder.hidden(false);
231
232    for pattern in ignore_patterns {
233        walk_builder.add_ignore(pattern);
234    }
235
236    let walker = walk_builder.build();
237    let mut entries = Vec::new();
238
239    for result in walker.skip(1) {
240        if cancellation_token.is_cancelled() {
241            return Err(LsError::Cancelled);
242        }
243
244        match result {
245            Ok(entry) => {
246                let file_path = entry.path();
247                let file_name = file_path.file_name().unwrap_or_default().to_string_lossy();
248                let metadata = file_path.metadata().ok();
249                let size = if file_path.is_dir() {
250                    None
251                } else {
252                    metadata.as_ref().map(|m| m.len())
253                };
254
255                entries.push(FileEntry {
256                    path: file_name.to_string(),
257                    is_directory: file_path.is_dir(),
258                    size,
259                    permissions: None,
260                });
261            }
262            Err(e) => {
263                tracing::warn!("Error accessing entry: {e}");
264            }
265        }
266    }
267
268    entries.sort_by(|a, b| match (a.is_directory, b.is_directory) {
269        (true, false) => std::cmp::Ordering::Less,
270        (false, true) => std::cmp::Ordering::Greater,
271        _ => a.path.cmp(&b.path),
272    });
273
274    Ok(FileListResult {
275        entries,
276        base_path: path_str.to_string(),
277    })
278}
279
280fn grep_search_internal(
281    pattern: &str,
282    include: Option<&str>,
283    base_path: &Path,
284    cancellation_token: &CancellationToken,
285) -> std::result::Result<SearchResult, String> {
286    struct FileMatchBucket {
287        mtime: std::time::SystemTime,
288        matches: Vec<(usize, String)>,
289    }
290
291    if !base_path.exists() {
292        return Err(format!("Path does not exist: {}", base_path.display()));
293    }
294
295    let matcher_pattern = if RegexMatcherBuilder::new()
296        .line_terminator(Some(b'\n'))
297        .build(pattern)
298        .is_ok()
299    {
300        pattern.to_string()
301    } else {
302        let escaped = regex::escape(pattern);
303        RegexMatcherBuilder::new()
304            .line_terminator(Some(b'\n'))
305            .build(&escaped)
306            .map_err(|e| format!("Failed to create matcher: {e}"))?;
307        escaped
308    };
309
310    let include_glob = include.map(ToOwned::to_owned);
311    if let Some(include_pattern) = include_glob.as_deref() {
312        glob::Pattern::new(include_pattern).map_err(|e| format!("Invalid glob pattern: {e}"))?;
313    }
314
315    let mut walker = WalkBuilder::new(base_path);
316    walker.hidden(false);
317    walker.git_ignore(true);
318    walker.git_global(true);
319    walker.git_exclude(true);
320
321    let include_pattern = include
322        .map(|p| glob::Pattern::new(p).map_err(|e| format!("Invalid glob pattern: {e}")))
323        .transpose()?;
324
325    let mut file_buckets: BTreeMap<String, FileMatchBucket> = BTreeMap::new();
326    let mut files_searched = 0usize;
327
328    for result in walker.build() {
329        if cancellation_token.is_cancelled() {
330            break;
331        }
332
333        let entry = match result {
334            Ok(e) => e,
335            Err(_) => continue,
336        };
337
338        let path = entry.path();
339        if !path.is_file() {
340            continue;
341        }
342
343        if let Some(ref pattern) = include_pattern
344            && !path_matches_glob(path, pattern, base_path)
345        {
346            continue;
347        }
348
349        files_searched += 1;
350
351        let display_path = match path.canonicalize() {
352            Ok(canonical) => canonical.display().to_string(),
353            Err(_) => path.display().to_string(),
354        };
355        let file_mtime = path
356            .metadata()
357            .and_then(|m| m.modified())
358            .unwrap_or(std::time::SystemTime::UNIX_EPOCH);
359
360        let mut lines_in_file = Vec::new();
361        let matcher = RegexMatcherBuilder::new()
362            .line_terminator(Some(b'\n'))
363            .build(&matcher_pattern)
364            .map_err(|e| format!("Failed to create matcher: {e}"))?;
365        let mut searcher = SearcherBuilder::new()
366            .binary_detection(BinaryDetection::quit(b'\x00'))
367            .line_number(true)
368            .build();
369
370        let search_result = searcher.search_path(
371            &matcher,
372            path,
373            UTF8(|line_num, line| {
374                if cancellation_token.is_cancelled() {
375                    return Err(SinkError::error_message("Operation cancelled".to_string()));
376                }
377
378                lines_in_file.push((line_num as usize, line.trim_end().to_string()));
379                Ok(true)
380            }),
381        );
382
383        let append_file_matches =
384            |buckets: &mut BTreeMap<String, FileMatchBucket>,
385             file_matches: Vec<(usize, String)>| {
386                if file_matches.is_empty() {
387                    return;
388                }
389
390                let bucket =
391                    buckets
392                        .entry(display_path.clone())
393                        .or_insert_with(|| FileMatchBucket {
394                            mtime: file_mtime,
395                            matches: Vec::new(),
396                        });
397                if file_mtime > bucket.mtime {
398                    bucket.mtime = file_mtime;
399                }
400                bucket.matches.extend(file_matches);
401            };
402
403        match search_result {
404            Err(err)
405                if cancellation_token.is_cancelled()
406                    && err.to_string().contains("Operation cancelled") =>
407            {
408                append_file_matches(&mut file_buckets, lines_in_file);
409                break;
410            }
411            Err(err) if err.kind() == std::io::ErrorKind::InvalidData => {}
412            Err(_) | Ok(()) => {
413                append_file_matches(&mut file_buckets, lines_in_file);
414            }
415        }
416    }
417
418    let search_completed = !cancellation_token.is_cancelled();
419    if file_buckets.is_empty() {
420        return Ok(SearchResult {
421            matches: Vec::new(),
422            total_files_searched: files_searched,
423            search_completed,
424        });
425    }
426
427    let mut sorted_files: Vec<(String, FileMatchBucket)> = file_buckets.into_iter().collect();
428    if sorted_files.len() > 1 {
429        sorted_files.sort_by(|a, b| b.1.mtime.cmp(&a.1.mtime).then_with(|| a.0.cmp(&b.0)));
430    }
431
432    let total_matches = sorted_files
433        .iter()
434        .map(|(_, bucket)| bucket.matches.len())
435        .sum();
436    let mut matches = Vec::with_capacity(total_matches);
437    for (file_path, mut bucket) in sorted_files {
438        for (line_number, line_content) in bucket.matches.drain(..) {
439            matches.push(SearchMatch {
440                file_path: file_path.clone(),
441                line_number,
442                line_content,
443                column_range: None,
444            });
445        }
446    }
447
448    Ok(SearchResult {
449        matches,
450        total_files_searched: files_searched,
451        search_completed,
452    })
453}
454
455fn astgrep_search_internal(
456    pattern: &str,
457    lang: Option<&str>,
458    include: Option<&str>,
459    exclude: Option<&str>,
460    base_path: &Path,
461    cancellation_token: &CancellationToken,
462) -> std::result::Result<SearchResult, String> {
463    if !base_path.exists() {
464        return Err(format!("Path does not exist: {}", base_path.display()));
465    }
466
467    let mut walker = WalkBuilder::new(base_path);
468    walker.hidden(false);
469    walker.git_ignore(true);
470    walker.git_global(true);
471    walker.git_exclude(true);
472
473    let include_pattern = include
474        .map(|p| glob::Pattern::new(p).map_err(|e| format!("Invalid include glob pattern: {e}")))
475        .transpose()?;
476
477    let exclude_pattern = exclude
478        .map(|p| glob::Pattern::new(p).map_err(|e| format!("Invalid exclude glob pattern: {e}")))
479        .transpose()?;
480
481    let mut all_matches = Vec::new();
482    let mut files_searched = 0usize;
483
484    for result in walker.build() {
485        if cancellation_token.is_cancelled() {
486            return Ok(SearchResult {
487                matches: all_matches,
488                total_files_searched: files_searched,
489                search_completed: false,
490            });
491        }
492
493        let entry = match result {
494            Ok(e) => e,
495            Err(_) => continue,
496        };
497
498        let path = entry.path();
499        if !path.is_file() {
500            continue;
501        }
502
503        if let Some(ref pattern) = include_pattern
504            && !path_matches_glob(path, pattern, base_path)
505        {
506            continue;
507        }
508
509        if let Some(ref pattern) = exclude_pattern
510            && path_matches_glob(path, pattern, base_path)
511        {
512            continue;
513        }
514
515        let detected_lang = if let Some(l) = lang {
516            match SupportLang::from_str(l) {
517                Ok(lang) => Some(lang),
518                Err(_) => continue,
519            }
520        } else {
521            SupportLang::from_extension(path).or_else(|| {
522                path.extension()
523                    .and_then(|ext| ext.to_str())
524                    .and_then(|ext| match ext {
525                        "jsx" => Some(SupportLang::JavaScript),
526                        "mjs" => Some(SupportLang::JavaScript),
527                        _ => None,
528                    })
529            })
530        };
531
532        let Some(language) = detected_lang else {
533            continue;
534        };
535
536        files_searched += 1;
537        let content = match std::fs::read_to_string(path) {
538            Ok(c) => c,
539            Err(_) => continue,
540        };
541
542        let ast_grep = language.ast_grep(&content);
543        let pattern_matcher = match Pattern::try_new(pattern, language) {
544            Ok(p) => p,
545            Err(e) => return Err(format!("Invalid pattern: {e}")),
546        };
547
548        let relative_path = path.strip_prefix(base_path).unwrap_or(path);
549        let file_matches = find_matches(&ast_grep, &pattern_matcher, relative_path, &content);
550
551        for m in file_matches {
552            all_matches.push(SearchMatch {
553                file_path: m.file,
554                line_number: m.line,
555                line_content: m.context.trim().to_string(),
556                column_range: Some((m.column, m.column + m.matched_code.len())),
557            });
558        }
559    }
560
561    all_matches.sort_by(|a, b| {
562        a.file_path
563            .cmp(&b.file_path)
564            .then(a.line_number.cmp(&b.line_number))
565    });
566
567    Ok(SearchResult {
568        matches: all_matches,
569        total_files_searched: files_searched,
570        search_completed: true,
571    })
572}
573
574#[derive(Debug)]
575struct AstGrepMatch {
576    file: String,
577    line: usize,
578    column: usize,
579    matched_code: String,
580    context: String,
581}
582
583fn find_matches(
584    ast_grep: &AstGrep<StrDoc<SupportLang>>,
585    pattern: &Pattern,
586    path: &Path,
587    content: &str,
588) -> Vec<AstGrepMatch> {
589    let root = ast_grep.root();
590    let matches = root.find_all(pattern);
591
592    let mut results = Vec::new();
593    for node_match in matches {
594        let node = node_match.get_node();
595        let range = node.range();
596        let start_pos = node.start_pos();
597        let matched_code = node.text();
598
599        let line_start = content[..range.start].rfind('\n').map_or(0, |i| i + 1);
600        let line_end = content[range.end..]
601            .find('\n')
602            .map_or(content.len(), |i| range.end + i);
603        let context = &content[line_start..line_end];
604
605        results.push(AstGrepMatch {
606            file: path.display().to_string(),
607            line: start_pos.line() + 1,
608            column: start_pos.column(node) + 1,
609            matched_code: matched_code.to_string(),
610            context: context.to_string(),
611        });
612    }
613
614    results
615}
616
617fn path_matches_glob(path: &Path, pattern: &glob::Pattern, base_path: &Path) -> bool {
618    if pattern.matches_path(path) {
619        return true;
620    }
621
622    if let Ok(relative_path) = path.strip_prefix(base_path)
623        && pattern.matches_path(relative_path)
624    {
625        return true;
626    }
627
628    if let Some(filename) = path.file_name()
629        && pattern.matches(&filename.to_string_lossy())
630    {
631        return true;
632    }
633
634    false
635}
636
637trait LanguageHelpers {
638    fn from_extension(path: &Path) -> Option<SupportLang>;
639}
640
641impl LanguageHelpers for SupportLang {
642    fn from_extension(path: &Path) -> Option<SupportLang> {
643        ast_grep_language::Language::from_path(path)
644    }
645}
646
647async fn perform_edit_operations(
648    file_path: &Path,
649    operations: &[crate::ops::EditOperation],
650    token: Option<&CancellationToken>,
651) -> WorkspaceResult<(String, usize, bool)> {
652    if token.is_some_and(|t| t.is_cancelled()) {
653        return Err(WorkspaceError::ToolExecution(
654            "Operation cancelled".to_string(),
655        ));
656    }
657
658    let mut current_content: String;
659    let mut file_created_this_op = false;
660
661    match tokio::fs::read_to_string(file_path).await {
662        Ok(content_from_file) => {
663            current_content = content_from_file;
664        }
665        Err(e) if e.kind() == std::io::ErrorKind::NotFound => {
666            if operations.is_empty() {
667                return Err(WorkspaceError::ToolExecution(format!(
668                    "File {} does not exist and no operations provided to create it.",
669                    file_path.display()
670                )));
671            }
672            let first_op = &operations[0];
673            if first_op.old_string.is_empty() {
674                if let Some(parent) = file_path.parent()
675                    && !tokio::fs::metadata(parent)
676                        .await
677                        .map(|m| m.is_dir())
678                        .unwrap_or(false)
679                {
680                    if token.is_some_and(|t| t.is_cancelled()) {
681                        return Err(WorkspaceError::ToolExecution(
682                            "Operation cancelled".to_string(),
683                        ));
684                    }
685                    tokio::fs::create_dir_all(parent).await.map_err(|e| {
686                        WorkspaceError::Io(format!(
687                            "Failed to create directory {}: {}",
688                            parent.display(),
689                            e
690                        ))
691                    })?;
692                }
693                current_content = first_op.new_string.clone();
694                file_created_this_op = true;
695            } else {
696                return Err(WorkspaceError::Io(format!(
697                    "File {} not found, and the first/only operation's old_string is not empty (required for creation).",
698                    file_path.display()
699                )));
700            }
701        }
702        Err(e) => {
703            return Err(WorkspaceError::Io(format!(
704                "Failed to read file {}: {e}",
705                file_path.display()
706            )));
707        }
708    }
709
710    if operations.is_empty() {
711        return Ok((current_content, 0, false));
712    }
713
714    let mut edits_applied_count = 0usize;
715    for (index, edit_op) in operations.iter().enumerate() {
716        if token.is_some_and(|t| t.is_cancelled()) {
717            return Err(WorkspaceError::ToolExecution(
718                "Operation cancelled".to_string(),
719            ));
720        }
721
722        if edit_op.old_string.is_empty() {
723            if index == 0 && file_created_this_op {
724                // creation step
725            } else if index == 0 && operations.len() == 1 {
726                current_content = edit_op.new_string.clone();
727                if !file_created_this_op {
728                    file_created_this_op = true;
729                }
730            } else {
731                return Err(WorkspaceError::ToolExecution(format!(
732                    "Edit #{} for file {} has an empty old_string. This is only allowed for the first operation if the file is being created or for a single operation to overwrite the file.",
733                    index + 1,
734                    file_path.display()
735                )));
736            }
737        } else {
738            let occurrences = current_content.matches(&edit_op.old_string).count();
739            if occurrences == 0 {
740                return Err(WorkspaceError::ToolExecution(format!(
741                    "For edit #{}, string not found in file {} (after {} previous successful edits). String to find (first 50 chars): '{}'",
742                    index + 1,
743                    file_path.display(),
744                    edits_applied_count,
745                    edit_op.old_string.chars().take(50).collect::<String>()
746                )));
747            }
748            if occurrences > 1 {
749                return Err(WorkspaceError::ToolExecution(format!(
750                    "For edit #{}, found {} occurrences of string in file {} (after {} previous successful edits). String to find (first 50 chars): '{}'. Please provide more context.",
751                    index + 1,
752                    occurrences,
753                    file_path.display(),
754                    edits_applied_count,
755                    edit_op.old_string.chars().take(50).collect::<String>()
756                )));
757            }
758            current_content = current_content.replace(&edit_op.old_string, &edit_op.new_string);
759        }
760        edits_applied_count += 1;
761    }
762
763    Ok((current_content, edits_applied_count, file_created_this_op))
764}
765
766impl LocalWorkspace {
767    pub async fn with_path(path: PathBuf) -> WorkspaceResult<Self> {
768        let metadata = WorkspaceMetadata {
769            id: format!("local:{}", path.display()),
770            workspace_type: WorkspaceType::Local,
771            location: path.display().to_string(),
772        };
773
774        Ok(Self {
775            path,
776            environment_cache: Arc::new(RwLock::new(None)),
777            metadata,
778        })
779    }
780
781    /// Collect environment information for the local workspace
782    async fn collect_environment(&self) -> WorkspaceResult<EnvironmentInfo> {
783        EnvironmentInfo::collect_for_path(&self.path)
784    }
785}
786
787impl std::fmt::Debug for LocalWorkspace {
788    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
789        f.debug_struct("LocalWorkspace")
790            .field("path", &self.path)
791            .field("metadata", &self.metadata)
792            .finish_non_exhaustive()
793    }
794}
795
796#[async_trait]
797impl Workspace for LocalWorkspace {
798    async fn environment(&self) -> WorkspaceResult<EnvironmentInfo> {
799        let mut cache = self.environment_cache.write().await;
800
801        // Check if we have valid cached data
802        if let Some(cached) = cache.as_ref()
803            && !cached.is_expired()
804        {
805            return Ok(cached.info.clone());
806        }
807
808        // Collect fresh environment info
809        let env_info = self.collect_environment().await?;
810
811        // Cache it with 5 minute TTL
812        *cache = Some(CachedEnvironment::new(
813            env_info.clone(),
814            Duration::from_secs(300), // 5 minutes
815        ));
816
817        Ok(env_info)
818    }
819
820    fn metadata(&self) -> WorkspaceMetadata {
821        self.metadata.clone()
822    }
823
824    async fn invalidate_environment_cache(&self) {
825        let mut cache = self.environment_cache.write().await;
826        *cache = None;
827    }
828
829    async fn list_files(
830        &self,
831        query: Option<&str>,
832        max_results: Option<usize>,
833    ) -> WorkspaceResult<Vec<String>> {
834        use crate::utils::FileListingUtils;
835
836        info!(target: "workspace.list_files", "Listing files in workspace: {:?}", self.path);
837
838        FileListingUtils::list_files(&self.path, query, max_results).map_err(WorkspaceError::from)
839    }
840
841    fn working_directory(&self) -> &std::path::Path {
842        &self.path
843    }
844
845    async fn read_file(
846        &self,
847        request: ReadFileRequest,
848        ctx: &WorkspaceOpContext,
849    ) -> WorkspaceResult<FileContentResult> {
850        let abs_path = resolve_path(&self.path, &request.file_path);
851        view_file_internal(
852            &abs_path,
853            request.offset,
854            request.limit,
855            &ctx.cancellation_token,
856        )
857        .await
858        .map_err(|e| WorkspaceError::Io(e.to_string()))
859    }
860
861    async fn list_directory(
862        &self,
863        request: ListDirectoryRequest,
864        ctx: &WorkspaceOpContext,
865    ) -> WorkspaceResult<FileListResult> {
866        let target_path = resolve_path(&self.path, &request.path);
867        let target_path_str = target_path.to_string_lossy().to_string();
868        let ignore_patterns = request.ignore.unwrap_or_default();
869        let cancellation_token = ctx.cancellation_token.clone();
870
871        let result = task::spawn_blocking(move || {
872            list_directory_internal(&target_path_str, &ignore_patterns, &cancellation_token)
873        })
874        .await;
875
876        match result {
877            Ok(listing_result) => listing_result.map_err(|e| WorkspaceError::Io(e.to_string())),
878            Err(join_error) => Err(WorkspaceError::Io(format!("Task join error: {join_error}"))),
879        }
880    }
881
882    async fn glob(
883        &self,
884        request: GlobRequest,
885        ctx: &WorkspaceOpContext,
886    ) -> WorkspaceResult<GlobResult> {
887        if ctx.cancellation_token.is_cancelled() {
888            return Err(WorkspaceError::ToolExecution(
889                "Operation cancelled".to_string(),
890            ));
891        }
892
893        let search_path = request.path.as_deref().unwrap_or(".");
894        let base_path = resolve_path(&self.path, search_path);
895
896        let glob_pattern = format!("{}/{}", base_path.display(), request.pattern);
897
898        let mut results = Vec::new();
899        match glob::glob(&glob_pattern) {
900            Ok(paths) => {
901                for entry in paths {
902                    if ctx.cancellation_token.is_cancelled() {
903                        return Err(WorkspaceError::ToolExecution(
904                            "Operation cancelled".to_string(),
905                        ));
906                    }
907
908                    match entry {
909                        Ok(path) => results.push(path.display().to_string()),
910                        Err(e) => {
911                            return Err(WorkspaceError::ToolExecution(format!(
912                                "Error matching glob pattern '{glob_pattern}': {e}"
913                            )));
914                        }
915                    }
916                }
917            }
918            Err(e) => {
919                return Err(WorkspaceError::ToolExecution(format!(
920                    "Invalid glob pattern '{glob_pattern}': {e}"
921                )));
922            }
923        }
924
925        results.sort();
926        Ok(GlobResult {
927            matches: results,
928            pattern: request.pattern,
929        })
930    }
931
932    async fn grep(
933        &self,
934        request: GrepRequest,
935        ctx: &WorkspaceOpContext,
936    ) -> WorkspaceResult<SearchResult> {
937        let search_path = request.path.as_deref().unwrap_or(".");
938        let base_path = resolve_path(&self.path, search_path);
939
940        let pattern = request.pattern.clone();
941        let include = request.include.clone();
942        let cancellation_token = ctx.cancellation_token.clone();
943
944        let result = task::spawn_blocking(move || {
945            grep_search_internal(
946                &pattern,
947                include.as_deref(),
948                &base_path,
949                &cancellation_token,
950            )
951        })
952        .await;
953
954        match result {
955            Ok(search_result) => search_result.map_err(WorkspaceError::ToolExecution),
956            Err(e) => Err(WorkspaceError::ToolExecution(format!(
957                "Task join error: {e}"
958            ))),
959        }
960    }
961
962    async fn astgrep(
963        &self,
964        request: AstGrepRequest,
965        ctx: &WorkspaceOpContext,
966    ) -> WorkspaceResult<SearchResult> {
967        let search_path = request.path.as_deref().unwrap_or(".");
968        let base_path = resolve_path(&self.path, search_path);
969
970        let pattern = request.pattern.clone();
971        let lang = request.lang.clone();
972        let include = request.include.clone();
973        let exclude = request.exclude.clone();
974        let cancellation_token = ctx.cancellation_token.clone();
975
976        let result = task::spawn_blocking(move || {
977            astgrep_search_internal(
978                &pattern,
979                lang.as_deref(),
980                include.as_deref(),
981                exclude.as_deref(),
982                &base_path,
983                &cancellation_token,
984            )
985        })
986        .await;
987
988        match result {
989            Ok(search_result) => search_result.map_err(WorkspaceError::ToolExecution),
990            Err(e) => Err(WorkspaceError::ToolExecution(format!(
991                "Task join error: {e}"
992            ))),
993        }
994    }
995
996    async fn apply_edits(
997        &self,
998        request: ApplyEditsRequest,
999        ctx: &WorkspaceOpContext,
1000    ) -> WorkspaceResult<EditResult> {
1001        let abs_path = resolve_path(&self.path, &request.file_path);
1002        let abs_path_str = abs_path.display().to_string();
1003        let file_lock = get_file_lock(&abs_path_str).await;
1004        let _lock_guard = file_lock.lock().await;
1005
1006        let (final_content, num_ops, created_or_overwritten) =
1007            perform_edit_operations(&abs_path, &request.edits, Some(&ctx.cancellation_token))
1008                .await?;
1009
1010        if created_or_overwritten || num_ops > 0 {
1011            if ctx.cancellation_token.is_cancelled() {
1012                return Err(WorkspaceError::ToolExecution(
1013                    "Operation cancelled".to_string(),
1014                ));
1015            }
1016            tokio::fs::write(&abs_path, &final_content)
1017                .await
1018                .map_err(|e| {
1019                    WorkspaceError::Io(format!(
1020                        "Failed to write file {}: {}",
1021                        abs_path.display(),
1022                        e
1023                    ))
1024                })?;
1025
1026            Ok(EditResult {
1027                file_path: abs_path_str,
1028                changes_made: num_ops,
1029                file_created: created_or_overwritten,
1030                old_content: None,
1031                new_content: Some(final_content),
1032            })
1033        } else {
1034            Ok(EditResult {
1035                file_path: abs_path_str,
1036                changes_made: 0,
1037                file_created: false,
1038                old_content: None,
1039                new_content: None,
1040            })
1041        }
1042    }
1043
1044    async fn write_file(
1045        &self,
1046        request: WriteFileRequest,
1047        ctx: &WorkspaceOpContext,
1048    ) -> WorkspaceResult<EditResult> {
1049        let abs_path = resolve_path(&self.path, &request.file_path);
1050        let abs_path_str = abs_path.display().to_string();
1051        let file_lock = get_file_lock(&abs_path_str).await;
1052        let _lock_guard = file_lock.lock().await;
1053
1054        if ctx.cancellation_token.is_cancelled() {
1055            return Err(WorkspaceError::ToolExecution(
1056                "Operation cancelled".to_string(),
1057            ));
1058        }
1059
1060        if let Some(parent) = abs_path.parent()
1061            && !parent.exists()
1062        {
1063            tokio::fs::create_dir_all(parent).await.map_err(|e| {
1064                WorkspaceError::Io(format!(
1065                    "Failed to create parent directory {}: {e}",
1066                    parent.display()
1067                ))
1068            })?;
1069        }
1070
1071        let file_existed = abs_path.exists();
1072        tokio::fs::write(&abs_path, &request.content)
1073            .await
1074            .map_err(|e| {
1075                WorkspaceError::Io(format!("Failed to write file {}: {e}", abs_path.display()))
1076            })?;
1077
1078        Ok(EditResult {
1079            file_path: abs_path_str,
1080            changes_made: 1,
1081            file_created: !file_existed,
1082            old_content: None,
1083            new_content: Some(request.content),
1084        })
1085    }
1086}
1087
1088#[cfg(test)]
1089mod tests {
1090    use super::*;
1091    use tempfile::tempdir;
1092    use tokio_util::sync::CancellationToken;
1093
1094    #[tokio::test]
1095    async fn test_local_workspace_creation() {
1096        let temp_dir = tempdir().unwrap();
1097        let workspace = LocalWorkspace::with_path(temp_dir.path().to_path_buf())
1098            .await
1099            .unwrap();
1100        assert!(matches!(
1101            workspace.metadata().workspace_type,
1102            WorkspaceType::Local
1103        ));
1104    }
1105
1106    #[tokio::test]
1107    async fn test_local_workspace_with_path() {
1108        let temp_dir = tempdir().unwrap();
1109        let workspace = LocalWorkspace::with_path(temp_dir.path().to_path_buf())
1110            .await
1111            .unwrap();
1112
1113        assert!(matches!(
1114            workspace.metadata().workspace_type,
1115            WorkspaceType::Local
1116        ));
1117        assert_eq!(
1118            workspace.metadata().location,
1119            temp_dir.path().display().to_string()
1120        );
1121    }
1122
1123    #[tokio::test]
1124    async fn test_environment_caching() {
1125        let temp_dir = tempdir().unwrap();
1126        let workspace = LocalWorkspace::with_path(temp_dir.path().to_path_buf())
1127            .await
1128            .unwrap();
1129
1130        // First call should collect fresh data
1131        let env1 = workspace.environment().await.unwrap();
1132
1133        // Second call should return cached data
1134        let env2 = workspace.environment().await.unwrap();
1135
1136        // Should be identical
1137        assert_eq!(env1.working_directory, env2.working_directory);
1138        assert_eq!(env1.vcs.is_some(), env2.vcs.is_some());
1139        assert_eq!(env1.platform, env2.platform);
1140    }
1141
1142    #[tokio::test]
1143    async fn test_cache_invalidation() {
1144        let temp_dir = tempdir().unwrap();
1145        let workspace = LocalWorkspace::with_path(temp_dir.path().to_path_buf())
1146            .await
1147            .unwrap();
1148
1149        // Get initial environment
1150        let _ = workspace.environment().await.unwrap();
1151
1152        // Invalidate cache
1153        workspace.invalidate_environment_cache().await;
1154
1155        // Should work fine and fetch fresh data
1156        let env = workspace.environment().await.unwrap();
1157        assert!(!env.working_directory.as_os_str().is_empty());
1158    }
1159
1160    #[tokio::test]
1161    async fn test_environment_collection() {
1162        let temp_dir = tempdir().unwrap();
1163        let workspace = LocalWorkspace::with_path(temp_dir.path().to_path_buf())
1164            .await
1165            .unwrap();
1166
1167        let env = workspace.environment().await.unwrap();
1168
1169        // Verify basic environment info
1170        let expected_path = temp_dir
1171            .path()
1172            .canonicalize()
1173            .unwrap_or_else(|_| temp_dir.path().to_path_buf());
1174
1175        // Canonicalize both paths for comparison on macOS
1176        let actual_canonical = env
1177            .working_directory
1178            .canonicalize()
1179            .unwrap_or(env.working_directory.clone());
1180        let expected_canonical = expected_path
1181            .canonicalize()
1182            .unwrap_or(expected_path.clone());
1183
1184        assert_eq!(actual_canonical, expected_canonical);
1185    }
1186
1187    #[tokio::test]
1188    async fn test_list_files() {
1189        let temp_dir = tempdir().unwrap();
1190        let workspace = LocalWorkspace::with_path(temp_dir.path().to_path_buf())
1191            .await
1192            .unwrap();
1193
1194        // Create some test files
1195        std::fs::write(temp_dir.path().join("test.rs"), "test").unwrap();
1196        std::fs::write(temp_dir.path().join("main.rs"), "main").unwrap();
1197        std::fs::create_dir(temp_dir.path().join("src")).unwrap();
1198        std::fs::write(temp_dir.path().join("src/lib.rs"), "lib").unwrap();
1199
1200        // List all files
1201        let files = workspace.list_files(None, None).await.unwrap();
1202        assert_eq!(files.len(), 4); // 3 files + 1 directory
1203        assert!(files.contains(&"test.rs".to_string()));
1204        assert!(files.contains(&"main.rs".to_string()));
1205        assert!(files.contains(&"src/".to_string())); // Directory with trailing slash
1206        assert!(files.contains(&"src/lib.rs".to_string()));
1207
1208        // Test with query
1209        let files = workspace.list_files(Some("test"), None).await.unwrap();
1210        assert_eq!(files.len(), 1);
1211        assert_eq!(files[0], "test.rs");
1212
1213        // Test with max_results
1214        let files = workspace.list_files(None, Some(2)).await.unwrap();
1215        assert_eq!(files.len(), 2);
1216    }
1217
1218    #[tokio::test]
1219    async fn test_list_files_includes_dotfiles() {
1220        let temp_dir = tempdir().unwrap();
1221        let workspace = LocalWorkspace::with_path(temp_dir.path().to_path_buf())
1222            .await
1223            .unwrap();
1224
1225        // Create a dotfile
1226        std::fs::write(temp_dir.path().join(".gitignore"), "target/").unwrap();
1227
1228        let files = workspace.list_files(None, None).await.unwrap();
1229        assert!(files.contains(&".gitignore".to_string()));
1230    }
1231
1232    #[tokio::test]
1233    async fn test_working_directory() {
1234        let temp_dir = tempdir().unwrap();
1235        let workspace = LocalWorkspace::with_path(temp_dir.path().to_path_buf())
1236            .await
1237            .unwrap();
1238
1239        assert_eq!(workspace.working_directory(), temp_dir.path());
1240    }
1241
1242    #[tokio::test]
1243    async fn test_grep_orders_matches_by_mtime_then_path() {
1244        let temp_dir = tempdir().unwrap();
1245        let root = temp_dir.path();
1246
1247        let b_file = root.join("b.rs");
1248        let a_file = root.join("a.rs");
1249
1250        std::fs::write(&b_file, "needle from b\n").unwrap();
1251        std::thread::sleep(std::time::Duration::from_millis(20));
1252        std::fs::write(&a_file, "needle from a\n").unwrap();
1253        std::thread::sleep(std::time::Duration::from_millis(20));
1254
1255        // Refresh b so it has the newest mtime and should appear first.
1256        std::fs::write(&b_file, "needle from b updated\n").unwrap();
1257
1258        let workspace = LocalWorkspace::with_path(root.to_path_buf()).await.unwrap();
1259
1260        let context = WorkspaceOpContext::new("test-grep-order", CancellationToken::new());
1261        let result = workspace
1262            .grep(
1263                GrepRequest {
1264                    pattern: "needle".to_string(),
1265                    include: Some("*.rs".to_string()),
1266                    path: Some(".".to_string()),
1267                },
1268                &context,
1269            )
1270            .await
1271            .unwrap();
1272
1273        assert!(result.search_completed);
1274        assert_eq!(result.total_files_searched, 2);
1275        assert_eq!(result.matches.len(), 2);
1276
1277        let first = std::path::Path::new(&result.matches[0].file_path)
1278            .file_name()
1279            .unwrap()
1280            .to_string_lossy()
1281            .to_string();
1282        let second = std::path::Path::new(&result.matches[1].file_path)
1283            .file_name()
1284            .unwrap()
1285            .to_string_lossy()
1286            .to_string();
1287
1288        assert_eq!(first, "b.rs");
1289        assert_eq!(second, "a.rs");
1290    }
1291
1292    #[tokio::test]
1293    async fn test_grep_include_filters_files() {
1294        let temp_dir = tempdir().unwrap();
1295        let root = temp_dir.path();
1296
1297        std::fs::create_dir_all(root.join("src")).unwrap();
1298        std::fs::create_dir_all(root.join("docs")).unwrap();
1299
1300        std::fs::write(root.join("src/lib.rs"), "needle in rust\n").unwrap();
1301        std::fs::write(root.join("src/readme.txt"), "needle in text\n").unwrap();
1302        std::fs::write(root.join("docs/guide.md"), "needle in markdown\n").unwrap();
1303
1304        let workspace = LocalWorkspace::with_path(root.to_path_buf()).await.unwrap();
1305        let context = WorkspaceOpContext::new("test-grep-include", CancellationToken::new());
1306        let result = workspace
1307            .grep(
1308                GrepRequest {
1309                    pattern: "needle".to_string(),
1310                    include: Some("*.rs".to_string()),
1311                    path: Some(".".to_string()),
1312                },
1313                &context,
1314            )
1315            .await
1316            .unwrap();
1317
1318        assert!(result.search_completed);
1319        assert_eq!(result.total_files_searched, 1);
1320        assert_eq!(result.matches.len(), 1);
1321
1322        let file_name = std::path::Path::new(&result.matches[0].file_path)
1323            .file_name()
1324            .unwrap()
1325            .to_string_lossy()
1326            .to_string();
1327        assert_eq!(file_name, "lib.rs");
1328    }
1329
1330    #[tokio::test]
1331    async fn test_grep_pre_cancelled_returns_incomplete_result() {
1332        let temp_dir = tempdir().unwrap();
1333        let root = temp_dir.path();
1334
1335        std::fs::write(root.join("a.rs"), "needle\n").unwrap();
1336        std::fs::write(root.join("b.rs"), "needle\n").unwrap();
1337
1338        let workspace = LocalWorkspace::with_path(root.to_path_buf()).await.unwrap();
1339        let cancellation_token = CancellationToken::new();
1340        cancellation_token.cancel();
1341        let context = WorkspaceOpContext::new("test-grep-cancelled", cancellation_token);
1342
1343        let result = workspace
1344            .grep(
1345                GrepRequest {
1346                    pattern: "needle".to_string(),
1347                    include: Some("*.rs".to_string()),
1348                    path: Some(".".to_string()),
1349                },
1350                &context,
1351            )
1352            .await
1353            .unwrap();
1354
1355        assert!(!result.search_completed);
1356        assert_eq!(result.total_files_searched, 0);
1357        assert!(result.matches.is_empty());
1358    }
1359}