Skip to main content

steer_workspace/local/
workspace.rs

1use async_trait::async_trait;
2use std::collections::{BTreeMap, HashMap};
3use std::path::{Path, PathBuf};
4use std::str::FromStr;
5use std::sync::Arc;
6use std::time::Duration;
7use thiserror::Error;
8use tokio::io::{AsyncBufReadExt, AsyncReadExt, BufReader};
9use tokio::sync::{Mutex, RwLock};
10use tokio::task;
11use tokio_util::sync::CancellationToken;
12use tracing::info;
13
14use crate::error::{EditFailure, Result as WorkspaceResult, WorkspaceError};
15use crate::ops::{
16    ApplyEditsRequest, AstGrepRequest, GlobRequest, GrepRequest, ListDirectoryRequest,
17    ReadFileRequest, WorkspaceOpContext, WriteFileRequest,
18};
19use crate::result::{
20    EditResult, FileContentResult, FileEntry, FileListResult, GlobResult, SearchMatch, SearchResult,
21};
22use crate::{CachedEnvironment, EnvironmentInfo, Workspace, WorkspaceMetadata, WorkspaceType};
23
24use ast_grep_core::tree_sitter::StrDoc;
25use ast_grep_core::{AstGrep, Pattern};
26use ast_grep_language::{LanguageExt, SupportLang};
27use grep_regex::RegexMatcherBuilder;
28use grep_searcher::sinks::UTF8;
29use grep_searcher::{BinaryDetection, SearcherBuilder, SinkError};
30use ignore::WalkBuilder;
31
32/// Local filesystem workspace
33pub struct LocalWorkspace {
34    path: PathBuf,
35    environment_cache: Arc<RwLock<Option<CachedEnvironment>>>,
36    metadata: WorkspaceMetadata,
37}
38
39const MAX_READ_BYTES: usize = 50 * 1024;
40const MAX_LINE_LENGTH: usize = 2000;
41
42static FILE_LOCKS: std::sync::LazyLock<Mutex<HashMap<String, Arc<Mutex<()>>>>> =
43    std::sync::LazyLock::new(|| Mutex::new(HashMap::new()));
44
45async fn get_file_lock(file_path: &str) -> Arc<Mutex<()>> {
46    let mut locks_map_guard = FILE_LOCKS.lock().await;
47    locks_map_guard
48        .entry(file_path.to_string())
49        .or_insert_with(|| Arc::new(Mutex::new(())))
50        .clone()
51}
52
53fn resolve_path(base: &Path, path: &str) -> PathBuf {
54    if Path::new(path).is_absolute() {
55        PathBuf::from(path)
56    } else {
57        base.join(path)
58    }
59}
60
61#[derive(Error, Debug)]
62enum ViewError {
63    #[error("Failed to open file '{path}': {source}")]
64    FileOpen {
65        path: String,
66        #[source]
67        source: std::io::Error,
68    },
69    #[error("Failed to get file metadata for '{path}': {source}")]
70    Metadata {
71        path: String,
72        #[source]
73        source: std::io::Error,
74    },
75    #[error("File read cancelled")]
76    Cancelled,
77    #[error("Error reading file line by line: {source}")]
78    ReadLine {
79        #[source]
80        source: std::io::Error,
81    },
82    #[error("Error reading file: {source}")]
83    Read {
84        #[source]
85        source: std::io::Error,
86    },
87}
88
89#[derive(Error, Debug)]
90enum LsError {
91    #[error("Path is not a directory: {path}")]
92    NotADirectory { path: String },
93    #[error("Operation was cancelled")]
94    Cancelled,
95    #[error("Task join error: {source}")]
96    TaskJoinError {
97        #[from]
98        #[source]
99        source: tokio::task::JoinError,
100    },
101}
102
103async fn view_file_internal(
104    file_path: &Path,
105    offset: Option<u64>,
106    limit: Option<u64>,
107    raw: Option<bool>,
108    cancellation_token: &CancellationToken,
109) -> std::result::Result<FileContentResult, ViewError> {
110    let mut file = tokio::fs::File::open(file_path)
111        .await
112        .map_err(|e| ViewError::FileOpen {
113            path: file_path.display().to_string(),
114            source: e,
115        })?;
116
117    let file_size = file
118        .metadata()
119        .await
120        .map_err(|e| ViewError::Metadata {
121            path: file_path.display().to_string(),
122            source: e,
123        })?
124        .len();
125
126    let start_line = offset.unwrap_or(1).max(1) as usize;
127    let line_limit = limit.map(|v| v.max(1) as usize);
128    let is_raw = raw.unwrap_or(false);
129
130    let (content, total_lines, truncated) = if start_line > 1 || line_limit.is_some() {
131        let mut reader = BufReader::new(file);
132        let mut current_line_num = 1usize;
133        let mut lines_read = 0usize;
134        let mut lines = Vec::new();
135
136        loop {
137            if cancellation_token.is_cancelled() {
138                return Err(ViewError::Cancelled);
139            }
140
141            let mut line = String::new();
142            match reader.read_line(&mut line).await {
143                Ok(0) => break,
144                Ok(_) => {
145                    if current_line_num >= start_line {
146                        if is_raw {
147                            lines.push(line);
148                        } else {
149                            if line.len() > MAX_LINE_LENGTH {
150                                line.truncate(MAX_LINE_LENGTH);
151                                line.push_str("... [line truncated]");
152                            }
153                            lines.push(line.trim_end().to_string());
154                        }
155                        lines_read += 1;
156                        if line_limit.is_some_and(|l| lines_read >= l) {
157                            break;
158                        }
159                    }
160                    current_line_num += 1;
161                }
162                Err(e) => return Err(ViewError::ReadLine { source: e }),
163            }
164        }
165
166        let total_lines = lines.len();
167        let truncated = line_limit.is_some_and(|l| lines_read >= l);
168        let content = if is_raw {
169            lines.concat()
170        } else {
171            let numbered_lines: Vec<String> = lines
172                .into_iter()
173                .enumerate()
174                .map(|(i, line)| format!("{:5}\t{}", start_line + i, line))
175                .collect();
176            numbered_lines.join("\n")
177        };
178
179        (content, total_lines, truncated)
180    } else if is_raw {
181        let mut buffer = Vec::new();
182        let mut chunk = [0u8; 8192];
183
184        loop {
185            if cancellation_token.is_cancelled() {
186                return Err(ViewError::Cancelled);
187            }
188
189            let n = file
190                .read(&mut chunk)
191                .await
192                .map_err(|e| ViewError::Read { source: e })?;
193            if n == 0 {
194                break;
195            }
196            buffer.extend_from_slice(&chunk[..n]);
197        }
198
199        let content = String::from_utf8_lossy(&buffer).into_owned();
200        let total_lines = content.lines().count();
201
202        (content, total_lines, false)
203    } else {
204        let read_size = std::cmp::min(file_size as usize, MAX_READ_BYTES);
205        let mut buffer = vec![0u8; read_size];
206        let mut bytes_read = 0usize;
207
208        while bytes_read < read_size {
209            if cancellation_token.is_cancelled() {
210                return Err(ViewError::Cancelled);
211            }
212            let n = file
213                .read(&mut buffer[bytes_read..])
214                .await
215                .map_err(|e| ViewError::Read { source: e })?;
216            if n == 0 {
217                break;
218            }
219            bytes_read += n;
220        }
221
222        buffer.truncate(bytes_read);
223        let content = String::from_utf8_lossy(&buffer);
224        let lines: Vec<&str> = content.lines().collect();
225        let total_lines = lines.len();
226        let truncated = file_size as usize > MAX_READ_BYTES;
227        let numbered_lines: Vec<String> = lines
228            .into_iter()
229            .enumerate()
230            .map(|(i, line)| format!("{:5}\t{}", i + 1, line))
231            .collect();
232
233        (numbered_lines.join("\n"), total_lines, truncated)
234    };
235
236    Ok(FileContentResult {
237        content,
238        file_path: file_path.display().to_string(),
239        line_count: total_lines,
240        truncated,
241    })
242}
243
244fn list_directory_internal(
245    path_str: &str,
246    ignore_patterns: &[String],
247    cancellation_token: &CancellationToken,
248) -> std::result::Result<FileListResult, LsError> {
249    let path = Path::new(path_str);
250    if !path.is_dir() {
251        return Err(LsError::NotADirectory {
252            path: path_str.to_string(),
253        });
254    }
255
256    if cancellation_token.is_cancelled() {
257        return Err(LsError::Cancelled);
258    }
259
260    let mut walk_builder = WalkBuilder::new(path);
261    walk_builder.max_depth(Some(1));
262    walk_builder.git_ignore(true);
263    walk_builder.ignore(true);
264    walk_builder.hidden(false);
265
266    for pattern in ignore_patterns {
267        walk_builder.add_ignore(pattern);
268    }
269
270    let walker = walk_builder.build();
271    let mut entries = Vec::new();
272
273    for result in walker.skip(1) {
274        if cancellation_token.is_cancelled() {
275            return Err(LsError::Cancelled);
276        }
277
278        match result {
279            Ok(entry) => {
280                let file_path = entry.path();
281                let file_name = file_path.file_name().unwrap_or_default().to_string_lossy();
282                let metadata = file_path.metadata().ok();
283                let size = if file_path.is_dir() {
284                    None
285                } else {
286                    metadata.as_ref().map(|m| m.len())
287                };
288
289                entries.push(FileEntry {
290                    path: file_name.to_string(),
291                    is_directory: file_path.is_dir(),
292                    size,
293                    permissions: None,
294                });
295            }
296            Err(e) => {
297                tracing::warn!("Error accessing entry: {e}");
298            }
299        }
300    }
301
302    entries.sort_by(|a, b| match (a.is_directory, b.is_directory) {
303        (true, false) => std::cmp::Ordering::Less,
304        (false, true) => std::cmp::Ordering::Greater,
305        _ => a.path.cmp(&b.path),
306    });
307
308    Ok(FileListResult {
309        entries,
310        base_path: path_str.to_string(),
311    })
312}
313
314fn grep_search_internal(
315    pattern: &str,
316    include: Option<&str>,
317    base_path: &Path,
318    cancellation_token: &CancellationToken,
319) -> std::result::Result<SearchResult, String> {
320    struct FileMatchBucket {
321        mtime: std::time::SystemTime,
322        matches: Vec<(usize, String)>,
323    }
324
325    if !base_path.exists() {
326        return Err(format!("Path does not exist: {}", base_path.display()));
327    }
328
329    let matcher_pattern = if RegexMatcherBuilder::new()
330        .line_terminator(Some(b'\n'))
331        .build(pattern)
332        .is_ok()
333    {
334        pattern.to_string()
335    } else {
336        let escaped = regex::escape(pattern);
337        RegexMatcherBuilder::new()
338            .line_terminator(Some(b'\n'))
339            .build(&escaped)
340            .map_err(|e| format!("Failed to create matcher: {e}"))?;
341        escaped
342    };
343
344    let include_glob = include.map(ToOwned::to_owned);
345    if let Some(include_pattern) = include_glob.as_deref() {
346        glob::Pattern::new(include_pattern).map_err(|e| format!("Invalid glob pattern: {e}"))?;
347    }
348
349    let mut walker = WalkBuilder::new(base_path);
350    walker.hidden(false);
351    walker.git_ignore(true);
352    walker.git_global(true);
353    walker.git_exclude(true);
354
355    let include_pattern = include
356        .map(|p| glob::Pattern::new(p).map_err(|e| format!("Invalid glob pattern: {e}")))
357        .transpose()?;
358
359    let mut file_buckets: BTreeMap<String, FileMatchBucket> = BTreeMap::new();
360    let mut files_searched = 0usize;
361
362    for result in walker.build() {
363        if cancellation_token.is_cancelled() {
364            break;
365        }
366
367        let entry = match result {
368            Ok(e) => e,
369            Err(_) => continue,
370        };
371
372        let path = entry.path();
373        if !path.is_file() {
374            continue;
375        }
376
377        if let Some(ref pattern) = include_pattern
378            && !path_matches_glob(path, pattern, base_path)
379        {
380            continue;
381        }
382
383        files_searched += 1;
384
385        let display_path = match path.canonicalize() {
386            Ok(canonical) => canonical.display().to_string(),
387            Err(_) => path.display().to_string(),
388        };
389        let file_mtime = path
390            .metadata()
391            .and_then(|m| m.modified())
392            .unwrap_or(std::time::SystemTime::UNIX_EPOCH);
393
394        let mut lines_in_file = Vec::new();
395        let matcher = RegexMatcherBuilder::new()
396            .line_terminator(Some(b'\n'))
397            .build(&matcher_pattern)
398            .map_err(|e| format!("Failed to create matcher: {e}"))?;
399        let mut searcher = SearcherBuilder::new()
400            .binary_detection(BinaryDetection::quit(b'\x00'))
401            .line_number(true)
402            .build();
403
404        let search_result = searcher.search_path(
405            &matcher,
406            path,
407            UTF8(|line_num, line| {
408                if cancellation_token.is_cancelled() {
409                    return Err(SinkError::error_message("Operation cancelled".to_string()));
410                }
411
412                lines_in_file.push((line_num as usize, line.trim_end().to_string()));
413                Ok(true)
414            }),
415        );
416
417        let append_file_matches =
418            |buckets: &mut BTreeMap<String, FileMatchBucket>,
419             file_matches: Vec<(usize, String)>| {
420                if file_matches.is_empty() {
421                    return;
422                }
423
424                let bucket =
425                    buckets
426                        .entry(display_path.clone())
427                        .or_insert_with(|| FileMatchBucket {
428                            mtime: file_mtime,
429                            matches: Vec::new(),
430                        });
431                if file_mtime > bucket.mtime {
432                    bucket.mtime = file_mtime;
433                }
434                bucket.matches.extend(file_matches);
435            };
436
437        match search_result {
438            Err(err)
439                if cancellation_token.is_cancelled()
440                    && err.to_string().contains("Operation cancelled") =>
441            {
442                append_file_matches(&mut file_buckets, lines_in_file);
443                break;
444            }
445            Err(err) if err.kind() == std::io::ErrorKind::InvalidData => {}
446            Err(_) | Ok(()) => {
447                append_file_matches(&mut file_buckets, lines_in_file);
448            }
449        }
450    }
451
452    let search_completed = !cancellation_token.is_cancelled();
453    if file_buckets.is_empty() {
454        return Ok(SearchResult {
455            matches: Vec::new(),
456            total_files_searched: files_searched,
457            search_completed,
458        });
459    }
460
461    let mut sorted_files: Vec<(String, FileMatchBucket)> = file_buckets.into_iter().collect();
462    if sorted_files.len() > 1 {
463        sorted_files.sort_by(|a, b| b.1.mtime.cmp(&a.1.mtime).then_with(|| a.0.cmp(&b.0)));
464    }
465
466    let total_matches = sorted_files
467        .iter()
468        .map(|(_, bucket)| bucket.matches.len())
469        .sum();
470    let mut matches = Vec::with_capacity(total_matches);
471    for (file_path, mut bucket) in sorted_files {
472        for (line_number, line_content) in bucket.matches.drain(..) {
473            matches.push(SearchMatch {
474                file_path: file_path.clone(),
475                line_number,
476                line_content,
477                column_range: None,
478            });
479        }
480    }
481
482    Ok(SearchResult {
483        matches,
484        total_files_searched: files_searched,
485        search_completed,
486    })
487}
488
489fn astgrep_search_internal(
490    pattern: &str,
491    lang: Option<&str>,
492    include: Option<&str>,
493    exclude: Option<&str>,
494    base_path: &Path,
495    cancellation_token: &CancellationToken,
496) -> std::result::Result<SearchResult, String> {
497    if !base_path.exists() {
498        return Err(format!("Path does not exist: {}", base_path.display()));
499    }
500
501    let mut walker = WalkBuilder::new(base_path);
502    walker.hidden(false);
503    walker.git_ignore(true);
504    walker.git_global(true);
505    walker.git_exclude(true);
506
507    let include_pattern = include
508        .map(|p| glob::Pattern::new(p).map_err(|e| format!("Invalid include glob pattern: {e}")))
509        .transpose()?;
510
511    let exclude_pattern = exclude
512        .map(|p| glob::Pattern::new(p).map_err(|e| format!("Invalid exclude glob pattern: {e}")))
513        .transpose()?;
514
515    let mut all_matches = Vec::new();
516    let mut files_searched = 0usize;
517
518    for result in walker.build() {
519        if cancellation_token.is_cancelled() {
520            return Ok(SearchResult {
521                matches: all_matches,
522                total_files_searched: files_searched,
523                search_completed: false,
524            });
525        }
526
527        let entry = match result {
528            Ok(e) => e,
529            Err(_) => continue,
530        };
531
532        let path = entry.path();
533        if !path.is_file() {
534            continue;
535        }
536
537        if let Some(ref pattern) = include_pattern
538            && !path_matches_glob(path, pattern, base_path)
539        {
540            continue;
541        }
542
543        if let Some(ref pattern) = exclude_pattern
544            && path_matches_glob(path, pattern, base_path)
545        {
546            continue;
547        }
548
549        let detected_lang = if let Some(l) = lang {
550            match SupportLang::from_str(l) {
551                Ok(lang) => Some(lang),
552                Err(_) => continue,
553            }
554        } else {
555            SupportLang::from_extension(path).or_else(|| {
556                path.extension()
557                    .and_then(|ext| ext.to_str())
558                    .and_then(|ext| match ext {
559                        "jsx" => Some(SupportLang::JavaScript),
560                        "mjs" => Some(SupportLang::JavaScript),
561                        _ => None,
562                    })
563            })
564        };
565
566        let Some(language) = detected_lang else {
567            continue;
568        };
569
570        files_searched += 1;
571        let content = match std::fs::read_to_string(path) {
572            Ok(c) => c,
573            Err(_) => continue,
574        };
575
576        let ast_grep = language.ast_grep(&content);
577        let pattern_matcher = match Pattern::try_new(pattern, language) {
578            Ok(p) => p,
579            Err(e) => return Err(format!("Invalid pattern: {e}")),
580        };
581
582        let relative_path = path.strip_prefix(base_path).unwrap_or(path);
583        let file_matches = find_matches(&ast_grep, &pattern_matcher, relative_path, &content);
584
585        for m in file_matches {
586            all_matches.push(SearchMatch {
587                file_path: m.file,
588                line_number: m.line,
589                line_content: m.context.trim().to_string(),
590                column_range: Some((m.column, m.column + m.matched_code.len())),
591            });
592        }
593    }
594
595    all_matches.sort_by(|a, b| {
596        a.file_path
597            .cmp(&b.file_path)
598            .then(a.line_number.cmp(&b.line_number))
599    });
600
601    Ok(SearchResult {
602        matches: all_matches,
603        total_files_searched: files_searched,
604        search_completed: true,
605    })
606}
607
608#[derive(Debug)]
609struct AstGrepMatch {
610    file: String,
611    line: usize,
612    column: usize,
613    matched_code: String,
614    context: String,
615}
616
617fn find_matches(
618    ast_grep: &AstGrep<StrDoc<SupportLang>>,
619    pattern: &Pattern,
620    path: &Path,
621    content: &str,
622) -> Vec<AstGrepMatch> {
623    let root = ast_grep.root();
624    let matches = root.find_all(pattern);
625
626    let mut results = Vec::new();
627    for node_match in matches {
628        let node = node_match.get_node();
629        let range = node.range();
630        let start_pos = node.start_pos();
631        let matched_code = node.text();
632
633        let line_start = content[..range.start].rfind('\n').map_or(0, |i| i + 1);
634        let line_end = content[range.end..]
635            .find('\n')
636            .map_or(content.len(), |i| range.end + i);
637        let context = &content[line_start..line_end];
638
639        results.push(AstGrepMatch {
640            file: path.display().to_string(),
641            line: start_pos.line() + 1,
642            column: start_pos.column(node) + 1,
643            matched_code: matched_code.to_string(),
644            context: context.to_string(),
645        });
646    }
647
648    results
649}
650
651fn path_matches_glob(path: &Path, pattern: &glob::Pattern, base_path: &Path) -> bool {
652    if pattern.matches_path(path) {
653        return true;
654    }
655
656    if let Ok(relative_path) = path.strip_prefix(base_path)
657        && pattern.matches_path(relative_path)
658    {
659        return true;
660    }
661
662    if let Some(filename) = path.file_name()
663        && pattern.matches(&filename.to_string_lossy())
664    {
665        return true;
666    }
667
668    false
669}
670
671trait LanguageHelpers {
672    fn from_extension(path: &Path) -> Option<SupportLang>;
673}
674
675impl LanguageHelpers for SupportLang {
676    fn from_extension(path: &Path) -> Option<SupportLang> {
677        ast_grep_language::Language::from_path(path)
678    }
679}
680
681async fn perform_edit_operations(
682    file_path: &Path,
683    operations: &[crate::ops::EditOperation],
684    token: Option<&CancellationToken>,
685) -> WorkspaceResult<(String, usize)> {
686    if token.is_some_and(|t| t.is_cancelled()) {
687        return Err(WorkspaceError::ToolExecution(
688            "Operation cancelled".to_string(),
689        ));
690    }
691
692    for (index, edit_op) in operations.iter().enumerate() {
693        if edit_op.old_string.is_empty() {
694            return Err(WorkspaceError::Edit(EditFailure::EmptyOldString {
695                edit_index: index + 1,
696            }));
697        }
698    }
699
700    let mut current_content = tokio::fs::read_to_string(file_path)
701        .await
702        .map_err(|error| {
703            if error.kind() == std::io::ErrorKind::NotFound {
704                WorkspaceError::Edit(EditFailure::FileNotFound {
705                    file_path: file_path.display().to_string(),
706                })
707            } else {
708                WorkspaceError::Io(format!(
709                    "Failed to read file {}: {error}",
710                    file_path.display()
711                ))
712            }
713        })?;
714
715    if operations.is_empty() {
716        return Ok((current_content, 0));
717    }
718
719    let mut edits_applied_count = 0usize;
720    for (index, edit_op) in operations.iter().enumerate() {
721        if token.is_some_and(|t| t.is_cancelled()) {
722            return Err(WorkspaceError::ToolExecution(
723                "Operation cancelled".to_string(),
724            ));
725        }
726
727        let edit_index = index + 1;
728        let occurrences = current_content.matches(&edit_op.old_string).count();
729        if occurrences == 0 {
730            return Err(WorkspaceError::Edit(EditFailure::StringNotFound {
731                file_path: file_path.display().to_string(),
732                edit_index,
733            }));
734        }
735
736        if occurrences > 1 {
737            return Err(WorkspaceError::Edit(EditFailure::NonUniqueMatch {
738                file_path: file_path.display().to_string(),
739                edit_index,
740                occurrences,
741            }));
742        }
743
744        current_content = current_content.replacen(&edit_op.old_string, &edit_op.new_string, 1);
745        edits_applied_count += 1;
746    }
747
748    Ok((current_content, edits_applied_count))
749}
750
751impl LocalWorkspace {
752    pub async fn with_path(path: PathBuf) -> WorkspaceResult<Self> {
753        let metadata = WorkspaceMetadata {
754            id: format!("local:{}", path.display()),
755            workspace_type: WorkspaceType::Local,
756            location: path.display().to_string(),
757        };
758
759        Ok(Self {
760            path,
761            environment_cache: Arc::new(RwLock::new(None)),
762            metadata,
763        })
764    }
765
766    /// Collect environment information for the local workspace
767    async fn collect_environment(&self) -> WorkspaceResult<EnvironmentInfo> {
768        EnvironmentInfo::collect_for_path(&self.path)
769    }
770}
771
772impl std::fmt::Debug for LocalWorkspace {
773    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
774        f.debug_struct("LocalWorkspace")
775            .field("path", &self.path)
776            .field("metadata", &self.metadata)
777            .finish_non_exhaustive()
778    }
779}
780
781#[async_trait]
782impl Workspace for LocalWorkspace {
783    async fn environment(&self) -> WorkspaceResult<EnvironmentInfo> {
784        let mut cache = self.environment_cache.write().await;
785
786        // Check if we have valid cached data
787        if let Some(cached) = cache.as_ref()
788            && !cached.is_expired()
789        {
790            return Ok(cached.info.clone());
791        }
792
793        // Collect fresh environment info
794        let env_info = self.collect_environment().await?;
795
796        // Cache it with 5 minute TTL
797        *cache = Some(CachedEnvironment::new(
798            env_info.clone(),
799            Duration::from_secs(300), // 5 minutes
800        ));
801
802        Ok(env_info)
803    }
804
805    fn metadata(&self) -> WorkspaceMetadata {
806        self.metadata.clone()
807    }
808
809    async fn invalidate_environment_cache(&self) {
810        let mut cache = self.environment_cache.write().await;
811        *cache = None;
812    }
813
814    async fn list_files(
815        &self,
816        query: Option<&str>,
817        max_results: Option<usize>,
818    ) -> WorkspaceResult<Vec<String>> {
819        use crate::utils::FileListingUtils;
820
821        info!(target: "workspace.list_files", "Listing files in workspace: {:?}", self.path);
822
823        FileListingUtils::list_files(&self.path, query, max_results).map_err(WorkspaceError::from)
824    }
825
826    fn working_directory(&self) -> &std::path::Path {
827        &self.path
828    }
829
830    async fn read_file(
831        &self,
832        request: ReadFileRequest,
833        ctx: &WorkspaceOpContext,
834    ) -> WorkspaceResult<FileContentResult> {
835        let abs_path = resolve_path(&self.path, &request.file_path);
836        view_file_internal(
837            &abs_path,
838            request.offset,
839            request.limit,
840            request.raw,
841            &ctx.cancellation_token,
842        )
843        .await
844        .map_err(|e| WorkspaceError::Io(e.to_string()))
845    }
846
847    async fn list_directory(
848        &self,
849        request: ListDirectoryRequest,
850        ctx: &WorkspaceOpContext,
851    ) -> WorkspaceResult<FileListResult> {
852        let target_path = resolve_path(&self.path, &request.path);
853        let target_path_str = target_path.to_string_lossy().to_string();
854        let ignore_patterns = request.ignore.unwrap_or_default();
855        let cancellation_token = ctx.cancellation_token.clone();
856
857        let result = task::spawn_blocking(move || {
858            list_directory_internal(&target_path_str, &ignore_patterns, &cancellation_token)
859        })
860        .await;
861
862        match result {
863            Ok(listing_result) => listing_result.map_err(|e| WorkspaceError::Io(e.to_string())),
864            Err(join_error) => Err(WorkspaceError::Io(format!("Task join error: {join_error}"))),
865        }
866    }
867
868    async fn glob(
869        &self,
870        request: GlobRequest,
871        ctx: &WorkspaceOpContext,
872    ) -> WorkspaceResult<GlobResult> {
873        if ctx.cancellation_token.is_cancelled() {
874            return Err(WorkspaceError::ToolExecution(
875                "Operation cancelled".to_string(),
876            ));
877        }
878
879        let search_path = request.path.as_deref().unwrap_or(".");
880        let base_path = resolve_path(&self.path, search_path);
881
882        let glob_pattern = format!("{}/{}", base_path.display(), request.pattern);
883
884        let mut results = Vec::new();
885        match glob::glob(&glob_pattern) {
886            Ok(paths) => {
887                for entry in paths {
888                    if ctx.cancellation_token.is_cancelled() {
889                        return Err(WorkspaceError::ToolExecution(
890                            "Operation cancelled".to_string(),
891                        ));
892                    }
893
894                    match entry {
895                        Ok(path) => results.push(path.display().to_string()),
896                        Err(e) => {
897                            return Err(WorkspaceError::ToolExecution(format!(
898                                "Error matching glob pattern '{glob_pattern}': {e}"
899                            )));
900                        }
901                    }
902                }
903            }
904            Err(e) => {
905                return Err(WorkspaceError::ToolExecution(format!(
906                    "Invalid glob pattern '{glob_pattern}': {e}"
907                )));
908            }
909        }
910
911        results.sort();
912        Ok(GlobResult {
913            matches: results,
914            pattern: request.pattern,
915        })
916    }
917
918    async fn grep(
919        &self,
920        request: GrepRequest,
921        ctx: &WorkspaceOpContext,
922    ) -> WorkspaceResult<SearchResult> {
923        let search_path = request.path.as_deref().unwrap_or(".");
924        let base_path = resolve_path(&self.path, search_path);
925
926        let pattern = request.pattern.clone();
927        let include = request.include.clone();
928        let cancellation_token = ctx.cancellation_token.clone();
929
930        let result = task::spawn_blocking(move || {
931            grep_search_internal(
932                &pattern,
933                include.as_deref(),
934                &base_path,
935                &cancellation_token,
936            )
937        })
938        .await;
939
940        match result {
941            Ok(search_result) => search_result.map_err(WorkspaceError::ToolExecution),
942            Err(e) => Err(WorkspaceError::ToolExecution(format!(
943                "Task join error: {e}"
944            ))),
945        }
946    }
947
948    async fn astgrep(
949        &self,
950        request: AstGrepRequest,
951        ctx: &WorkspaceOpContext,
952    ) -> WorkspaceResult<SearchResult> {
953        let search_path = request.path.as_deref().unwrap_or(".");
954        let base_path = resolve_path(&self.path, search_path);
955
956        let pattern = request.pattern.clone();
957        let lang = request.lang.clone();
958        let include = request.include.clone();
959        let exclude = request.exclude.clone();
960        let cancellation_token = ctx.cancellation_token.clone();
961
962        let result = task::spawn_blocking(move || {
963            astgrep_search_internal(
964                &pattern,
965                lang.as_deref(),
966                include.as_deref(),
967                exclude.as_deref(),
968                &base_path,
969                &cancellation_token,
970            )
971        })
972        .await;
973
974        match result {
975            Ok(search_result) => search_result.map_err(WorkspaceError::ToolExecution),
976            Err(e) => Err(WorkspaceError::ToolExecution(format!(
977                "Task join error: {e}"
978            ))),
979        }
980    }
981
982    async fn apply_edits(
983        &self,
984        request: ApplyEditsRequest,
985        ctx: &WorkspaceOpContext,
986    ) -> WorkspaceResult<EditResult> {
987        let abs_path = resolve_path(&self.path, &request.file_path);
988        let abs_path_str = abs_path.display().to_string();
989        let file_lock = get_file_lock(&abs_path_str).await;
990        let _lock_guard = file_lock.lock().await;
991
992        let (final_content, num_ops) =
993            perform_edit_operations(&abs_path, &request.edits, Some(&ctx.cancellation_token))
994                .await?;
995
996        if num_ops > 0 {
997            if ctx.cancellation_token.is_cancelled() {
998                return Err(WorkspaceError::ToolExecution(
999                    "Operation cancelled".to_string(),
1000                ));
1001            }
1002            tokio::fs::write(&abs_path, &final_content)
1003                .await
1004                .map_err(|e| {
1005                    WorkspaceError::Io(format!(
1006                        "Failed to write file {}: {}",
1007                        abs_path.display(),
1008                        e
1009                    ))
1010                })?;
1011
1012            Ok(EditResult {
1013                file_path: abs_path_str,
1014                changes_made: num_ops,
1015                file_created: false,
1016                old_content: None,
1017                new_content: Some(final_content),
1018            })
1019        } else {
1020            Ok(EditResult {
1021                file_path: abs_path_str,
1022                changes_made: 0,
1023                file_created: false,
1024                old_content: None,
1025                new_content: None,
1026            })
1027        }
1028    }
1029
1030    async fn write_file(
1031        &self,
1032        request: WriteFileRequest,
1033        ctx: &WorkspaceOpContext,
1034    ) -> WorkspaceResult<EditResult> {
1035        let abs_path = resolve_path(&self.path, &request.file_path);
1036        let abs_path_str = abs_path.display().to_string();
1037        let file_lock = get_file_lock(&abs_path_str).await;
1038        let _lock_guard = file_lock.lock().await;
1039
1040        if ctx.cancellation_token.is_cancelled() {
1041            return Err(WorkspaceError::ToolExecution(
1042                "Operation cancelled".to_string(),
1043            ));
1044        }
1045
1046        if let Some(parent) = abs_path.parent()
1047            && !parent.exists()
1048        {
1049            tokio::fs::create_dir_all(parent).await.map_err(|e| {
1050                WorkspaceError::Io(format!(
1051                    "Failed to create parent directory {}: {e}",
1052                    parent.display()
1053                ))
1054            })?;
1055        }
1056
1057        let file_existed = abs_path.exists();
1058        tokio::fs::write(&abs_path, &request.content)
1059            .await
1060            .map_err(|e| {
1061                WorkspaceError::Io(format!("Failed to write file {}: {e}", abs_path.display()))
1062            })?;
1063
1064        Ok(EditResult {
1065            file_path: abs_path_str,
1066            changes_made: 1,
1067            file_created: !file_existed,
1068            old_content: None,
1069            new_content: Some(request.content),
1070        })
1071    }
1072}
1073
1074#[cfg(test)]
1075mod tests {
1076    use super::*;
1077    use tempfile::tempdir;
1078    use tokio_util::sync::CancellationToken;
1079
1080    #[tokio::test]
1081    async fn test_local_workspace_creation() {
1082        let temp_dir = tempdir().unwrap();
1083        let workspace = LocalWorkspace::with_path(temp_dir.path().to_path_buf())
1084            .await
1085            .unwrap();
1086        assert!(matches!(
1087            workspace.metadata().workspace_type,
1088            WorkspaceType::Local
1089        ));
1090    }
1091
1092    #[tokio::test]
1093    async fn test_local_workspace_with_path() {
1094        let temp_dir = tempdir().unwrap();
1095        let workspace = LocalWorkspace::with_path(temp_dir.path().to_path_buf())
1096            .await
1097            .unwrap();
1098
1099        assert!(matches!(
1100            workspace.metadata().workspace_type,
1101            WorkspaceType::Local
1102        ));
1103        assert_eq!(
1104            workspace.metadata().location,
1105            temp_dir.path().display().to_string()
1106        );
1107    }
1108
1109    #[tokio::test]
1110    async fn test_environment_caching() {
1111        let temp_dir = tempdir().unwrap();
1112        let workspace = LocalWorkspace::with_path(temp_dir.path().to_path_buf())
1113            .await
1114            .unwrap();
1115
1116        // First call should collect fresh data
1117        let env1 = workspace.environment().await.unwrap();
1118
1119        // Second call should return cached data
1120        let env2 = workspace.environment().await.unwrap();
1121
1122        // Should be identical
1123        assert_eq!(env1.working_directory, env2.working_directory);
1124        assert_eq!(env1.vcs.is_some(), env2.vcs.is_some());
1125        assert_eq!(env1.platform, env2.platform);
1126    }
1127
1128    #[tokio::test]
1129    async fn test_cache_invalidation() {
1130        let temp_dir = tempdir().unwrap();
1131        let workspace = LocalWorkspace::with_path(temp_dir.path().to_path_buf())
1132            .await
1133            .unwrap();
1134
1135        // Get initial environment
1136        let _ = workspace.environment().await.unwrap();
1137
1138        // Invalidate cache
1139        workspace.invalidate_environment_cache().await;
1140
1141        // Should work fine and fetch fresh data
1142        let env = workspace.environment().await.unwrap();
1143        assert!(!env.working_directory.as_os_str().is_empty());
1144    }
1145
1146    #[tokio::test]
1147    async fn test_environment_collection() {
1148        let temp_dir = tempdir().unwrap();
1149        let workspace = LocalWorkspace::with_path(temp_dir.path().to_path_buf())
1150            .await
1151            .unwrap();
1152
1153        let env = workspace.environment().await.unwrap();
1154
1155        // Verify basic environment info
1156        let expected_path = temp_dir
1157            .path()
1158            .canonicalize()
1159            .unwrap_or_else(|_| temp_dir.path().to_path_buf());
1160
1161        // Canonicalize both paths for comparison on macOS
1162        let actual_canonical = env
1163            .working_directory
1164            .canonicalize()
1165            .unwrap_or(env.working_directory.clone());
1166        let expected_canonical = expected_path
1167            .canonicalize()
1168            .unwrap_or(expected_path.clone());
1169
1170        assert_eq!(actual_canonical, expected_canonical);
1171    }
1172
1173    #[tokio::test]
1174    async fn test_list_files() {
1175        let temp_dir = tempdir().unwrap();
1176        let workspace = LocalWorkspace::with_path(temp_dir.path().to_path_buf())
1177            .await
1178            .unwrap();
1179
1180        // Create some test files
1181        std::fs::write(temp_dir.path().join("test.rs"), "test").unwrap();
1182        std::fs::write(temp_dir.path().join("main.rs"), "main").unwrap();
1183        std::fs::create_dir(temp_dir.path().join("src")).unwrap();
1184        std::fs::write(temp_dir.path().join("src/lib.rs"), "lib").unwrap();
1185
1186        // List all files
1187        let files = workspace.list_files(None, None).await.unwrap();
1188        assert_eq!(files.len(), 4); // 3 files + 1 directory
1189        assert!(files.contains(&"test.rs".to_string()));
1190        assert!(files.contains(&"main.rs".to_string()));
1191        assert!(files.contains(&"src/".to_string())); // Directory with trailing slash
1192        assert!(files.contains(&"src/lib.rs".to_string()));
1193
1194        // Test with query
1195        let files = workspace.list_files(Some("test"), None).await.unwrap();
1196        assert_eq!(files.len(), 1);
1197        assert_eq!(files[0], "test.rs");
1198
1199        // Test with max_results
1200        let files = workspace.list_files(None, Some(2)).await.unwrap();
1201        assert_eq!(files.len(), 2);
1202    }
1203
1204    #[tokio::test]
1205    async fn test_list_files_includes_dotfiles() {
1206        let temp_dir = tempdir().unwrap();
1207        let workspace = LocalWorkspace::with_path(temp_dir.path().to_path_buf())
1208            .await
1209            .unwrap();
1210
1211        // Create a dotfile
1212        std::fs::write(temp_dir.path().join(".gitignore"), "target/").unwrap();
1213
1214        let files = workspace.list_files(None, None).await.unwrap();
1215        assert!(files.contains(&".gitignore".to_string()));
1216    }
1217
1218    #[tokio::test]
1219    async fn test_working_directory() {
1220        let temp_dir = tempdir().unwrap();
1221        let workspace = LocalWorkspace::with_path(temp_dir.path().to_path_buf())
1222            .await
1223            .unwrap();
1224
1225        assert_eq!(workspace.working_directory(), temp_dir.path());
1226    }
1227
1228    #[tokio::test]
1229    async fn test_read_file_raw_offset_limit_preserves_exact_content() {
1230        let temp_dir = tempdir().unwrap();
1231        let workspace = LocalWorkspace::with_path(temp_dir.path().to_path_buf())
1232            .await
1233            .unwrap();
1234
1235        let file_path = temp_dir.path().join("sample.txt");
1236        std::fs::write(&file_path, "alpha  \nbeta\t \ngamma\n").unwrap();
1237
1238        let context = WorkspaceOpContext::new("test-read-file-raw", CancellationToken::new());
1239        let raw_result = workspace
1240            .read_file(
1241                ReadFileRequest {
1242                    file_path: file_path.to_string_lossy().to_string(),
1243                    offset: Some(1),
1244                    limit: Some(2),
1245                    raw: Some(true),
1246                },
1247                &context,
1248            )
1249            .await
1250            .unwrap();
1251
1252        assert_eq!(raw_result.content, "alpha  \nbeta\t \n");
1253        assert_eq!(raw_result.line_count, 2);
1254        assert!(raw_result.truncated);
1255        assert!(!raw_result.content.starts_with("    1\t"));
1256
1257        let formatted_result = workspace
1258            .read_file(
1259                ReadFileRequest {
1260                    file_path: file_path.to_string_lossy().to_string(),
1261                    offset: Some(1),
1262                    limit: Some(2),
1263                    raw: None,
1264                },
1265                &context,
1266            )
1267            .await
1268            .unwrap();
1269
1270        assert_eq!(formatted_result.content, "    1\talpha\n    2\tbeta");
1271    }
1272
1273    #[tokio::test]
1274    async fn test_read_file_raw_offset_limit_disables_line_truncation_marker() {
1275        let temp_dir = tempdir().unwrap();
1276        let workspace = LocalWorkspace::with_path(temp_dir.path().to_path_buf())
1277            .await
1278            .unwrap();
1279
1280        let file_path = temp_dir.path().join("long-line.txt");
1281        let long_line = "x".repeat(MAX_LINE_LENGTH + 20);
1282        std::fs::write(&file_path, format!("{long_line}\nsecond\n")).unwrap();
1283
1284        let context = WorkspaceOpContext::new("test-read-file-marker", CancellationToken::new());
1285
1286        let formatted_result = workspace
1287            .read_file(
1288                ReadFileRequest {
1289                    file_path: file_path.to_string_lossy().to_string(),
1290                    offset: Some(1),
1291                    limit: Some(1),
1292                    raw: None,
1293                },
1294                &context,
1295            )
1296            .await
1297            .unwrap();
1298        assert!(formatted_result.content.contains("... [line truncated]"));
1299
1300        let raw_result = workspace
1301            .read_file(
1302                ReadFileRequest {
1303                    file_path: file_path.to_string_lossy().to_string(),
1304                    offset: Some(1),
1305                    limit: Some(1),
1306                    raw: Some(true),
1307                },
1308                &context,
1309            )
1310            .await
1311            .unwrap();
1312
1313        assert_eq!(raw_result.content, format!("{long_line}\n"));
1314        assert!(!raw_result.content.contains("... [line truncated]"));
1315    }
1316
1317    #[tokio::test]
1318    async fn test_read_file_raw_full_file_disables_byte_limit_truncation() {
1319        let temp_dir = tempdir().unwrap();
1320        let workspace = LocalWorkspace::with_path(temp_dir.path().to_path_buf())
1321            .await
1322            .unwrap();
1323
1324        let file_path = temp_dir.path().join("large.txt");
1325        let file_content = "x".repeat(MAX_READ_BYTES + 128);
1326        std::fs::write(&file_path, &file_content).unwrap();
1327
1328        let context = WorkspaceOpContext::new("test-read-file-raw-full", CancellationToken::new());
1329
1330        let default_result = workspace
1331            .read_file(
1332                ReadFileRequest {
1333                    file_path: file_path.to_string_lossy().to_string(),
1334                    offset: None,
1335                    limit: None,
1336                    raw: None,
1337                },
1338                &context,
1339            )
1340            .await
1341            .unwrap();
1342        assert!(default_result.truncated);
1343
1344        let raw_result = workspace
1345            .read_file(
1346                ReadFileRequest {
1347                    file_path: file_path.to_string_lossy().to_string(),
1348                    offset: None,
1349                    limit: None,
1350                    raw: Some(true),
1351                },
1352                &context,
1353            )
1354            .await
1355            .unwrap();
1356
1357        assert!(!raw_result.truncated);
1358        assert_eq!(raw_result.content, file_content);
1359        assert_eq!(raw_result.line_count, 1);
1360    }
1361
1362    #[tokio::test]
1363    async fn test_apply_edits_rejects_empty_old_string_with_typed_error() {
1364        let temp_dir = tempdir().unwrap();
1365        let workspace = LocalWorkspace::with_path(temp_dir.path().to_path_buf())
1366            .await
1367            .unwrap();
1368        let file_path = temp_dir.path().join("sample.txt");
1369        std::fs::write(&file_path, "hello world\n").unwrap();
1370
1371        let context = WorkspaceOpContext::new("test-edit-empty-old", CancellationToken::new());
1372        let err = workspace
1373            .apply_edits(
1374                ApplyEditsRequest {
1375                    file_path: file_path.display().to_string(),
1376                    edits: vec![crate::EditOperation {
1377                        old_string: String::new(),
1378                        new_string: "replacement".to_string(),
1379                    }],
1380                },
1381                &context,
1382            )
1383            .await
1384            .expect_err("empty old_string should fail");
1385
1386        assert!(matches!(
1387            err,
1388            WorkspaceError::Edit(EditFailure::EmptyOldString { edit_index: 1 })
1389        ));
1390    }
1391
1392    #[tokio::test]
1393    async fn test_apply_edits_returns_typed_string_not_found_error() {
1394        let temp_dir = tempdir().unwrap();
1395        let workspace = LocalWorkspace::with_path(temp_dir.path().to_path_buf())
1396            .await
1397            .unwrap();
1398        let file_path = temp_dir.path().join("sample.txt");
1399        std::fs::write(&file_path, "hello world\n").unwrap();
1400
1401        let context =
1402            WorkspaceOpContext::new("test-edit-string-not-found", CancellationToken::new());
1403        let err = workspace
1404            .apply_edits(
1405                ApplyEditsRequest {
1406                    file_path: file_path.display().to_string(),
1407                    edits: vec![crate::EditOperation {
1408                        old_string: "missing".to_string(),
1409                        new_string: "replacement".to_string(),
1410                    }],
1411                },
1412                &context,
1413            )
1414            .await
1415            .expect_err("missing string should fail");
1416
1417        assert!(matches!(
1418            err,
1419            WorkspaceError::Edit(EditFailure::StringNotFound { edit_index: 1, .. })
1420        ));
1421    }
1422
1423    #[tokio::test]
1424    async fn test_apply_edits_returns_typed_non_unique_match_error() {
1425        let temp_dir = tempdir().unwrap();
1426        let workspace = LocalWorkspace::with_path(temp_dir.path().to_path_buf())
1427            .await
1428            .unwrap();
1429        let file_path = temp_dir.path().join("sample.txt");
1430        std::fs::write(&file_path, "repeat\nrepeat\n").unwrap();
1431
1432        let context = WorkspaceOpContext::new("test-edit-non-unique", CancellationToken::new());
1433        let err = workspace
1434            .apply_edits(
1435                ApplyEditsRequest {
1436                    file_path: file_path.display().to_string(),
1437                    edits: vec![crate::EditOperation {
1438                        old_string: "repeat".to_string(),
1439                        new_string: "done".to_string(),
1440                    }],
1441                },
1442                &context,
1443            )
1444            .await
1445            .expect_err("non-unique string should fail");
1446
1447        assert!(matches!(
1448            err,
1449            WorkspaceError::Edit(EditFailure::NonUniqueMatch {
1450                edit_index: 1,
1451                occurrences: 2,
1452                ..
1453            })
1454        ));
1455    }
1456
1457    #[tokio::test]
1458    async fn test_grep_orders_matches_by_mtime_then_path() {
1459        let temp_dir = tempdir().unwrap();
1460        let root = temp_dir.path();
1461
1462        let b_file = root.join("b.rs");
1463        let a_file = root.join("a.rs");
1464
1465        std::fs::write(&b_file, "needle from b\n").unwrap();
1466        std::thread::sleep(std::time::Duration::from_millis(20));
1467        std::fs::write(&a_file, "needle from a\n").unwrap();
1468        std::thread::sleep(std::time::Duration::from_millis(20));
1469
1470        // Refresh b so it has the newest mtime and should appear first.
1471        std::fs::write(&b_file, "needle from b updated\n").unwrap();
1472
1473        let workspace = LocalWorkspace::with_path(root.to_path_buf()).await.unwrap();
1474
1475        let context = WorkspaceOpContext::new("test-grep-order", CancellationToken::new());
1476        let result = workspace
1477            .grep(
1478                GrepRequest {
1479                    pattern: "needle".to_string(),
1480                    include: Some("*.rs".to_string()),
1481                    path: Some(".".to_string()),
1482                },
1483                &context,
1484            )
1485            .await
1486            .unwrap();
1487
1488        assert!(result.search_completed);
1489        assert_eq!(result.total_files_searched, 2);
1490        assert_eq!(result.matches.len(), 2);
1491
1492        let first = std::path::Path::new(&result.matches[0].file_path)
1493            .file_name()
1494            .unwrap()
1495            .to_string_lossy()
1496            .to_string();
1497        let second = std::path::Path::new(&result.matches[1].file_path)
1498            .file_name()
1499            .unwrap()
1500            .to_string_lossy()
1501            .to_string();
1502
1503        assert_eq!(first, "b.rs");
1504        assert_eq!(second, "a.rs");
1505    }
1506
1507    #[tokio::test]
1508    async fn test_grep_include_filters_files() {
1509        let temp_dir = tempdir().unwrap();
1510        let root = temp_dir.path();
1511
1512        std::fs::create_dir_all(root.join("src")).unwrap();
1513        std::fs::create_dir_all(root.join("docs")).unwrap();
1514
1515        std::fs::write(root.join("src/lib.rs"), "needle in rust\n").unwrap();
1516        std::fs::write(root.join("src/readme.txt"), "needle in text\n").unwrap();
1517        std::fs::write(root.join("docs/guide.md"), "needle in markdown\n").unwrap();
1518
1519        let workspace = LocalWorkspace::with_path(root.to_path_buf()).await.unwrap();
1520        let context = WorkspaceOpContext::new("test-grep-include", CancellationToken::new());
1521        let result = workspace
1522            .grep(
1523                GrepRequest {
1524                    pattern: "needle".to_string(),
1525                    include: Some("*.rs".to_string()),
1526                    path: Some(".".to_string()),
1527                },
1528                &context,
1529            )
1530            .await
1531            .unwrap();
1532
1533        assert!(result.search_completed);
1534        assert_eq!(result.total_files_searched, 1);
1535        assert_eq!(result.matches.len(), 1);
1536
1537        let file_name = std::path::Path::new(&result.matches[0].file_path)
1538            .file_name()
1539            .unwrap()
1540            .to_string_lossy()
1541            .to_string();
1542        assert_eq!(file_name, "lib.rs");
1543    }
1544
1545    #[tokio::test]
1546    async fn test_grep_pre_cancelled_returns_incomplete_result() {
1547        let temp_dir = tempdir().unwrap();
1548        let root = temp_dir.path();
1549
1550        std::fs::write(root.join("a.rs"), "needle\n").unwrap();
1551        std::fs::write(root.join("b.rs"), "needle\n").unwrap();
1552
1553        let workspace = LocalWorkspace::with_path(root.to_path_buf()).await.unwrap();
1554        let cancellation_token = CancellationToken::new();
1555        cancellation_token.cancel();
1556        let context = WorkspaceOpContext::new("test-grep-cancelled", cancellation_token);
1557
1558        let result = workspace
1559            .grep(
1560                GrepRequest {
1561                    pattern: "needle".to_string(),
1562                    include: Some("*.rs".to_string()),
1563                    path: Some(".".to_string()),
1564                },
1565                &context,
1566            )
1567            .await
1568            .unwrap();
1569
1570        assert!(!result.search_completed);
1571        assert_eq!(result.total_files_searched, 0);
1572        assert!(result.matches.is_empty());
1573    }
1574}