Skip to main content

aft/
search_index.rs

1use std::collections::{BTreeMap, BTreeSet, HashMap, HashSet};
2use std::fs::{self, File};
3use std::io::{BufReader, BufWriter, Read, Seek, Write};
4use std::path::{Component, Path, PathBuf};
5use std::process::Command;
6use std::sync::{
7    atomic::{AtomicBool, AtomicUsize, Ordering},
8    Arc,
9};
10use std::time::{Duration, SystemTime, UNIX_EPOCH};
11
12use globset::{Glob, GlobSet, GlobSetBuilder};
13use ignore::WalkBuilder;
14use rayon::prelude::*;
15use regex::bytes::{Regex, RegexBuilder};
16use regex_syntax::hir::{Hir, HirKind};
17
18const DEFAULT_MAX_FILE_SIZE: u64 = 1_048_576;
19const INDEX_MAGIC: &[u8; 8] = b"AFTIDX01";
20const LOOKUP_MAGIC: &[u8; 8] = b"AFTLKP01";
21const INDEX_VERSION: u32 = 2;
22const PREVIEW_BYTES: usize = 8 * 1024;
23const EOF_SENTINEL: u8 = 0;
24const MAX_ENTRIES: usize = 10_000_000;
25const MIN_FILE_ENTRY_BYTES: usize = 25;
26const LOOKUP_ENTRY_BYTES: usize = 16;
27const POSTING_BYTES: usize = 6;
28
29#[derive(Clone, Debug)]
30pub struct SearchIndex {
31    pub postings: HashMap<u32, Vec<Posting>>,
32    pub files: Vec<FileEntry>,
33    pub path_to_id: HashMap<PathBuf, u32>,
34    pub ready: bool,
35    project_root: PathBuf,
36    git_head: Option<String>,
37    max_file_size: u64,
38    file_trigrams: HashMap<u32, Vec<u32>>,
39    unindexed_files: HashSet<u32>,
40}
41
42impl SearchIndex {
43    /// Number of indexed files.
44    pub fn file_count(&self) -> usize {
45        self.files.len()
46    }
47
48    /// Number of unique trigrams in the index.
49    pub fn trigram_count(&self) -> usize {
50        self.postings.len()
51    }
52}
53
54#[derive(Clone, Debug, PartialEq, Eq)]
55pub struct Posting {
56    pub file_id: u32,
57    pub next_mask: u8,
58    pub loc_mask: u8,
59}
60
61#[derive(Clone, Debug)]
62pub struct FileEntry {
63    pub path: PathBuf,
64    pub size: u64,
65    pub modified: SystemTime,
66}
67
68#[derive(Clone, Debug, PartialEq, Eq)]
69pub struct GrepMatch {
70    pub file: PathBuf,
71    pub line: u32,
72    pub column: u32,
73    pub line_text: String,
74    pub match_text: String,
75}
76
77#[derive(Clone, Debug)]
78pub struct GrepResult {
79    pub matches: Vec<GrepMatch>,
80    pub total_matches: usize,
81    pub files_searched: usize,
82    pub files_with_matches: usize,
83    pub index_status: IndexStatus,
84    pub truncated: bool,
85}
86
87#[derive(Clone, Copy, Debug, PartialEq, Eq)]
88pub enum IndexStatus {
89    Ready,
90    Building,
91    Fallback,
92}
93
94impl IndexStatus {
95    pub fn as_str(&self) -> &'static str {
96        match self {
97            IndexStatus::Ready => "Ready",
98            IndexStatus::Building => "Building",
99            IndexStatus::Fallback => "Fallback",
100        }
101    }
102}
103
104#[derive(Clone, Debug, Default)]
105pub struct RegexQuery {
106    pub and_trigrams: Vec<u32>,
107    pub or_groups: Vec<Vec<u32>>,
108    pub(crate) and_filters: HashMap<u32, PostingFilter>,
109    pub(crate) or_filters: Vec<HashMap<u32, PostingFilter>>,
110}
111
112#[derive(Clone, Copy, Debug, Default)]
113pub(crate) struct PostingFilter {
114    next_mask: u8,
115    loc_mask: u8,
116}
117
118#[derive(Clone, Debug, Default)]
119struct QueryBuild {
120    and_runs: Vec<Vec<u8>>,
121    or_groups: Vec<Vec<Vec<u8>>>,
122}
123
124#[derive(Clone, Debug, Default)]
125pub(crate) struct PathFilters {
126    includes: Option<GlobSet>,
127    excludes: Option<GlobSet>,
128}
129
130#[derive(Clone, Debug)]
131pub(crate) struct SearchScope {
132    pub root: PathBuf,
133    pub use_index: bool,
134}
135
136#[derive(Clone, Debug)]
137struct SharedGrepMatch {
138    file: Arc<PathBuf>,
139    line: u32,
140    column: u32,
141    line_text: String,
142    match_text: String,
143}
144
145#[derive(Clone, Debug)]
146enum SearchMatcher {
147    Literal(LiteralSearch),
148    Regex(Regex),
149}
150
151#[derive(Clone, Debug)]
152enum LiteralSearch {
153    CaseSensitive(Vec<u8>),
154    AsciiCaseInsensitive(Vec<u8>),
155}
156
157impl SearchIndex {
158    pub fn new() -> Self {
159        SearchIndex {
160            postings: HashMap::new(),
161            files: Vec::new(),
162            path_to_id: HashMap::new(),
163            ready: false,
164            project_root: PathBuf::new(),
165            git_head: None,
166            max_file_size: DEFAULT_MAX_FILE_SIZE,
167            file_trigrams: HashMap::new(),
168            unindexed_files: HashSet::new(),
169        }
170    }
171
172    pub fn build(root: &Path) -> Self {
173        Self::build_with_limit(root, DEFAULT_MAX_FILE_SIZE)
174    }
175
176    pub(crate) fn build_with_limit(root: &Path, max_file_size: u64) -> Self {
177        let project_root = fs::canonicalize(root).unwrap_or_else(|_| root.to_path_buf());
178        let mut index = SearchIndex {
179            project_root: project_root.clone(),
180            max_file_size,
181            ..SearchIndex::new()
182        };
183
184        let filters = PathFilters::default();
185        for path in walk_project_files(&project_root, &filters) {
186            index.update_file(&path);
187        }
188
189        index.git_head = current_git_head(&project_root);
190        index.ready = true;
191        index
192    }
193
194    pub fn index_file(&mut self, path: &Path, content: &[u8]) {
195        self.remove_file(path);
196
197        let file_id = match self.allocate_file_id(path, content.len() as u64) {
198            Some(file_id) => file_id,
199            None => return,
200        };
201
202        let mut trigram_map: BTreeMap<u32, PostingFilter> = BTreeMap::new();
203        for (trigram, next_char, position) in extract_trigrams(content) {
204            let entry = trigram_map.entry(trigram).or_default();
205            entry.next_mask |= mask_for_next_char(next_char);
206            entry.loc_mask |= mask_for_position(position);
207        }
208
209        let mut file_trigrams = Vec::with_capacity(trigram_map.len());
210        for (trigram, filter) in trigram_map {
211            let postings = self.postings.entry(trigram).or_default();
212            postings.push(Posting {
213                file_id,
214                next_mask: filter.next_mask,
215                loc_mask: filter.loc_mask,
216            });
217            // Posting lists are kept sorted by file_id for binary search during
218            // intersection. Since file_ids are allocated incrementally, the new
219            // entry is usually already in order. Only sort when needed.
220            if postings.len() > 1
221                && postings[postings.len() - 2].file_id > postings[postings.len() - 1].file_id
222            {
223                postings.sort_unstable_by_key(|p| p.file_id);
224            }
225            file_trigrams.push(trigram);
226        }
227
228        self.file_trigrams.insert(file_id, file_trigrams);
229        self.unindexed_files.remove(&file_id);
230    }
231
232    pub fn remove_file(&mut self, path: &Path) {
233        let Some(file_id) = self.path_to_id.remove(path) else {
234            return;
235        };
236
237        if let Some(trigrams) = self.file_trigrams.remove(&file_id) {
238            for trigram in trigrams {
239                let should_remove = if let Some(postings) = self.postings.get_mut(&trigram) {
240                    postings.retain(|posting| posting.file_id != file_id);
241                    postings.is_empty()
242                } else {
243                    false
244                };
245
246                if should_remove {
247                    self.postings.remove(&trigram);
248                }
249            }
250        }
251
252        self.unindexed_files.remove(&file_id);
253        if let Some(file) = self.files.get_mut(file_id as usize) {
254            file.path = PathBuf::new();
255            file.size = 0;
256            file.modified = UNIX_EPOCH;
257        }
258    }
259
260    pub fn update_file(&mut self, path: &Path) {
261        self.remove_file(path);
262
263        let metadata = match fs::metadata(path) {
264            Ok(metadata) if metadata.is_file() => metadata,
265            _ => return,
266        };
267
268        if is_binary_path(path, metadata.len()) {
269            self.track_unindexed_file(path, &metadata);
270            return;
271        }
272
273        if metadata.len() > self.max_file_size {
274            self.track_unindexed_file(path, &metadata);
275            return;
276        }
277
278        let content = match fs::read(path) {
279            Ok(content) => content,
280            Err(_) => return,
281        };
282
283        if is_binary_bytes(&content) {
284            self.track_unindexed_file(path, &metadata);
285            return;
286        }
287
288        self.index_file(path, &content);
289    }
290
291    pub fn grep(
292        &self,
293        pattern: &str,
294        case_sensitive: bool,
295        include: &[String],
296        exclude: &[String],
297        search_root: &Path,
298        max_results: usize,
299    ) -> GrepResult {
300        self.search_grep(
301            pattern,
302            case_sensitive,
303            include,
304            exclude,
305            search_root,
306            max_results,
307        )
308    }
309
310    pub fn search_grep(
311        &self,
312        pattern: &str,
313        case_sensitive: bool,
314        include: &[String],
315        exclude: &[String],
316        search_root: &Path,
317        max_results: usize,
318    ) -> GrepResult {
319        // Detect if pattern is a plain literal (no regex metacharacters).
320        // If so, use memchr::memmem which is 3-10x faster than regex for byte scanning.
321        let is_literal = !pattern.chars().any(|c| {
322            matches!(
323                c,
324                '.' | '*' | '+' | '?' | '(' | ')' | '[' | ']' | '{' | '}' | '|' | '^' | '$' | '\\'
325            )
326        });
327
328        let literal_search = if is_literal {
329            if case_sensitive {
330                Some(LiteralSearch::CaseSensitive(pattern.as_bytes().to_vec()))
331            } else if pattern.is_ascii() {
332                Some(LiteralSearch::AsciiCaseInsensitive(
333                    pattern
334                        .as_bytes()
335                        .iter()
336                        .map(|byte| byte.to_ascii_lowercase())
337                        .collect(),
338                ))
339            } else {
340                None
341            }
342        } else {
343            None
344        };
345
346        // Build the regex for non-literal patterns (or literal Unicode fallback).
347        let regex = if literal_search.is_some() {
348            None
349        } else {
350            let regex_pattern = if is_literal {
351                regex::escape(pattern)
352            } else {
353                pattern.to_string()
354            };
355            let mut builder = RegexBuilder::new(&regex_pattern);
356            builder.case_insensitive(!case_sensitive);
357            // Treat `^` and `$` as line anchors (grep semantics), not file anchors.
358            builder.multi_line(true);
359            match builder.build() {
360                Ok(r) => Some(r),
361                Err(_) => {
362                    return GrepResult {
363                        matches: Vec::new(),
364                        total_matches: 0,
365                        files_searched: 0,
366                        files_with_matches: 0,
367                        index_status: if self.ready {
368                            IndexStatus::Ready
369                        } else {
370                            IndexStatus::Building
371                        },
372                        truncated: false,
373                    };
374                }
375            }
376        };
377
378        let matcher = if let Some(literal_search) = literal_search {
379            SearchMatcher::Literal(literal_search)
380        } else {
381            SearchMatcher::Regex(
382                regex.expect("regex should exist when literal matcher is unavailable"),
383            )
384        };
385
386        let filters = match build_path_filters(include, exclude) {
387            Ok(filters) => filters,
388            Err(_) => PathFilters::default(),
389        };
390        let search_root = canonicalize_or_normalize(search_root);
391
392        let query = decompose_regex(pattern);
393        let candidate_ids = self.candidates(&query);
394
395        let candidate_files: Vec<&FileEntry> = candidate_ids
396            .into_iter()
397            .filter_map(|file_id| self.files.get(file_id as usize))
398            .filter(|file| !file.path.as_os_str().is_empty())
399            .filter(|file| is_within_search_root(&search_root, &file.path))
400            .filter(|file| filters.matches(&self.project_root, &file.path))
401            .collect();
402
403        let total_matches = AtomicUsize::new(0);
404        let files_searched = AtomicUsize::new(0);
405        let files_with_matches = AtomicUsize::new(0);
406        let truncated = AtomicBool::new(false);
407        let stop_after = max_results.saturating_mul(2);
408
409        let mut matches = if candidate_files.len() > 10 {
410            candidate_files
411                .par_iter()
412                .map(|file| {
413                    search_candidate_file(
414                        file,
415                        &matcher,
416                        max_results,
417                        stop_after,
418                        &total_matches,
419                        &files_searched,
420                        &files_with_matches,
421                        &truncated,
422                    )
423                })
424                .reduce(Vec::new, |mut left, mut right| {
425                    left.append(&mut right);
426                    left
427                })
428        } else {
429            let mut matches = Vec::new();
430            for file in candidate_files {
431                matches.extend(search_candidate_file(
432                    file,
433                    &matcher,
434                    max_results,
435                    stop_after,
436                    &total_matches,
437                    &files_searched,
438                    &files_with_matches,
439                    &truncated,
440                ));
441
442                if should_stop_search(&truncated, &total_matches, stop_after) {
443                    break;
444                }
445            }
446            matches
447        };
448
449        sort_shared_grep_matches_by_cached_mtime_desc(&mut matches, |path| {
450            self.path_to_id
451                .get(path)
452                .and_then(|file_id| self.files.get(*file_id as usize))
453                .map(|file| file.modified)
454        });
455
456        let matches = matches
457            .into_iter()
458            .map(|matched| GrepMatch {
459                file: matched.file.as_ref().clone(),
460                line: matched.line,
461                column: matched.column,
462                line_text: matched.line_text,
463                match_text: matched.match_text,
464            })
465            .collect();
466
467        GrepResult {
468            total_matches: total_matches.load(Ordering::Relaxed),
469            matches,
470            files_searched: files_searched.load(Ordering::Relaxed),
471            files_with_matches: files_with_matches.load(Ordering::Relaxed),
472            index_status: if self.ready {
473                IndexStatus::Ready
474            } else {
475                IndexStatus::Building
476            },
477            truncated: truncated.load(Ordering::Relaxed),
478        }
479    }
480
481    pub fn glob(&self, pattern: &str, search_root: &Path) -> Vec<PathBuf> {
482        let filters = match build_path_filters(&[pattern.to_string()], &[]) {
483            Ok(filters) => filters,
484            Err(_) => return Vec::new(),
485        };
486        let search_root = canonicalize_or_normalize(search_root);
487        let mut entries = self
488            .files
489            .iter()
490            .filter(|file| !file.path.as_os_str().is_empty())
491            .filter(|file| is_within_search_root(&search_root, &file.path))
492            .filter(|file| filters.matches(&self.project_root, &file.path))
493            .map(|file| (file.path.clone(), file.modified))
494            .collect::<Vec<_>>();
495
496        entries.sort_by(|(left_path, left_mtime), (right_path, right_mtime)| {
497            right_mtime
498                .cmp(left_mtime)
499                .then_with(|| left_path.cmp(right_path))
500        });
501
502        entries.into_iter().map(|(path, _)| path).collect()
503    }
504
505    pub fn candidates(&self, query: &RegexQuery) -> Vec<u32> {
506        if query.and_trigrams.is_empty() && query.or_groups.is_empty() {
507            return self.active_file_ids();
508        }
509
510        let mut and_trigrams = query.and_trigrams.clone();
511        and_trigrams.sort_unstable_by_key(|trigram| self.postings.get(trigram).map_or(0, Vec::len));
512
513        let mut current: Option<Vec<u32>> = None;
514
515        for trigram in and_trigrams {
516            let filter = query.and_filters.get(&trigram).copied();
517            let matches = self.postings_for_trigram(trigram, filter);
518            current = Some(match current.take() {
519                Some(existing) => intersect_sorted_ids(&existing, &matches),
520                None => matches,
521            });
522
523            if current.as_ref().is_some_and(|ids| ids.is_empty()) {
524                break;
525            }
526        }
527
528        let mut current = current.unwrap_or_else(|| self.active_file_ids());
529
530        for (index, group) in query.or_groups.iter().enumerate() {
531            let mut group_matches = Vec::new();
532            let filters = query.or_filters.get(index);
533
534            for trigram in group {
535                let filter = filters.and_then(|filters| filters.get(trigram).copied());
536                let matches = self.postings_for_trigram(*trigram, filter);
537                if group_matches.is_empty() {
538                    group_matches = matches;
539                } else {
540                    group_matches = union_sorted_ids(&group_matches, &matches);
541                }
542            }
543
544            current = intersect_sorted_ids(&current, &group_matches);
545            if current.is_empty() {
546                break;
547            }
548        }
549
550        let mut unindexed = self
551            .unindexed_files
552            .iter()
553            .copied()
554            .filter(|file_id| self.is_active_file(*file_id))
555            .collect::<Vec<_>>();
556        if !unindexed.is_empty() {
557            unindexed.sort_unstable();
558            current = union_sorted_ids(&current, &unindexed);
559        }
560
561        current
562    }
563
564    pub fn write_to_disk(&self, cache_dir: &Path, git_head: Option<&str>) {
565        if fs::create_dir_all(cache_dir).is_err() {
566            return;
567        }
568
569        let postings_path = cache_dir.join("postings.bin");
570        let lookup_path = cache_dir.join("lookup.bin");
571        let tmp_postings = cache_dir.join("postings.bin.tmp");
572        let tmp_lookup = cache_dir.join("lookup.bin.tmp");
573
574        let active_ids = self.active_file_ids();
575        let mut id_map = HashMap::new();
576        for (new_id, old_id) in active_ids.iter().enumerate() {
577            let Ok(new_id_u32) = u32::try_from(new_id) else {
578                return;
579            };
580            id_map.insert(*old_id, new_id_u32);
581        }
582
583        let write_result = (|| -> std::io::Result<()> {
584            let mut postings_writer = BufWriter::new(File::create(&tmp_postings)?);
585
586            postings_writer.write_all(INDEX_MAGIC)?;
587            write_u32(&mut postings_writer, INDEX_VERSION)?;
588
589            let head = git_head.unwrap_or_default();
590            let root = self.project_root.to_string_lossy();
591            let head_len = u32::try_from(head.len())
592                .map_err(|_| std::io::Error::other("git head too large to cache"))?;
593            let root_len = u32::try_from(root.len())
594                .map_err(|_| std::io::Error::other("project root too large to cache"))?;
595            let file_count = u32::try_from(active_ids.len())
596                .map_err(|_| std::io::Error::other("too many files to cache"))?;
597
598            write_u32(&mut postings_writer, head_len)?;
599            write_u32(&mut postings_writer, root_len)?;
600            write_u64(&mut postings_writer, self.max_file_size)?;
601            write_u32(&mut postings_writer, file_count)?;
602            postings_writer.write_all(head.as_bytes())?;
603            postings_writer.write_all(root.as_bytes())?;
604
605            for old_id in &active_ids {
606                let Some(file) = self.files.get(*old_id as usize) else {
607                    return Err(std::io::Error::other("missing file entry for cache write"));
608                };
609                let path = relative_to_root(&self.project_root, &file.path);
610                let path = path.to_string_lossy();
611                let path_len = u32::try_from(path.len())
612                    .map_err(|_| std::io::Error::other("cached path too large"))?;
613                let modified = file
614                    .modified
615                    .duration_since(UNIX_EPOCH)
616                    .unwrap_or(Duration::ZERO);
617                let unindexed = if self.unindexed_files.contains(old_id) {
618                    1u8
619                } else {
620                    0u8
621                };
622
623                postings_writer.write_all(&[unindexed])?;
624                write_u32(&mut postings_writer, path_len)?;
625                write_u64(&mut postings_writer, file.size)?;
626                write_u64(&mut postings_writer, modified.as_secs())?;
627                write_u32(&mut postings_writer, modified.subsec_nanos())?;
628                postings_writer.write_all(path.as_bytes())?;
629            }
630
631            let mut lookup_entries = Vec::new();
632            let mut postings_blob = Vec::new();
633            let mut sorted_postings: Vec<_> = self.postings.iter().collect();
634            sorted_postings.sort_by_key(|(trigram, _)| **trigram);
635
636            for (trigram, postings) in sorted_postings {
637                let offset = u64::try_from(postings_blob.len())
638                    .map_err(|_| std::io::Error::other("postings blob too large"))?;
639                let mut count = 0u32;
640
641                for posting in postings {
642                    let Some(mapped_file_id) = id_map.get(&posting.file_id).copied() else {
643                        continue;
644                    };
645
646                    postings_blob.extend_from_slice(&mapped_file_id.to_le_bytes());
647                    postings_blob.push(posting.next_mask);
648                    postings_blob.push(posting.loc_mask);
649                    count = count.saturating_add(1);
650                }
651
652                if count > 0 {
653                    lookup_entries.push((*trigram, offset, count));
654                }
655            }
656
657            write_u64(
658                &mut postings_writer,
659                u64::try_from(postings_blob.len())
660                    .map_err(|_| std::io::Error::other("postings blob too large"))?,
661            )?;
662            postings_writer.write_all(&postings_blob)?;
663            write_crc32(&mut postings_writer, &tmp_postings)?;
664            postings_writer.flush()?;
665            drop(postings_writer);
666
667            let mut lookup_writer = BufWriter::new(File::create(&tmp_lookup)?);
668            let entry_count = u32::try_from(lookup_entries.len())
669                .map_err(|_| std::io::Error::other("too many lookup entries to cache"))?;
670
671            lookup_writer.write_all(LOOKUP_MAGIC)?;
672            write_u32(&mut lookup_writer, INDEX_VERSION)?;
673            write_u32(&mut lookup_writer, entry_count)?;
674
675            for (trigram, offset, count) in lookup_entries {
676                write_u32(&mut lookup_writer, trigram)?;
677                write_u64(&mut lookup_writer, offset)?;
678                write_u32(&mut lookup_writer, count)?;
679            }
680
681            write_crc32(&mut lookup_writer, &tmp_lookup)?;
682            lookup_writer.flush()?;
683            drop(lookup_writer);
684
685            fs::rename(&tmp_postings, &postings_path)?;
686            fs::rename(&tmp_lookup, &lookup_path)?;
687
688            Ok(())
689        })();
690
691        if write_result.is_err() {
692            let _ = fs::remove_file(&tmp_postings);
693            let _ = fs::remove_file(&tmp_lookup);
694        }
695    }
696
697    pub fn read_from_disk(cache_dir: &Path) -> Option<Self> {
698        let postings_path = cache_dir.join("postings.bin");
699        let lookup_path = cache_dir.join("lookup.bin");
700
701        let mut postings_reader = BufReader::new(File::open(&postings_path).ok()?);
702        let mut lookup_reader = BufReader::new(File::open(&lookup_path).ok()?);
703        let postings_len_total =
704            usize::try_from(postings_reader.get_ref().metadata().ok()?.len()).ok()?;
705        let lookup_len_total =
706            usize::try_from(lookup_reader.get_ref().metadata().ok()?.len()).ok()?;
707        if postings_len_total < 4 || lookup_len_total < 4 {
708            return None;
709        }
710        verify_crc32(&postings_path).ok()?;
711        verify_crc32(&lookup_path).ok()?;
712
713        let mut magic = [0u8; 8];
714        postings_reader.read_exact(&mut magic).ok()?;
715        if &magic != INDEX_MAGIC {
716            return None;
717        }
718        if read_u32(&mut postings_reader).ok()? != INDEX_VERSION {
719            return None;
720        }
721
722        let head_len = read_u32(&mut postings_reader).ok()? as usize;
723        let root_len = read_u32(&mut postings_reader).ok()? as usize;
724        let max_file_size = read_u64(&mut postings_reader).ok()?;
725        let file_count = read_u32(&mut postings_reader).ok()? as usize;
726        if file_count > MAX_ENTRIES {
727            return None;
728        }
729        let postings_body_len = postings_len_total.checked_sub(4)?;
730        let lookup_body_len = lookup_len_total.checked_sub(4)?;
731
732        let remaining_postings = remaining_bytes(&mut postings_reader, postings_body_len)?;
733        let minimum_file_bytes = file_count.checked_mul(MIN_FILE_ENTRY_BYTES)?;
734        if minimum_file_bytes > remaining_postings {
735            return None;
736        }
737
738        if head_len > remaining_bytes(&mut postings_reader, postings_body_len)? {
739            return None;
740        }
741        let mut head_bytes = vec![0u8; head_len];
742        postings_reader.read_exact(&mut head_bytes).ok()?;
743        let git_head = String::from_utf8(head_bytes)
744            .ok()
745            .filter(|head| !head.is_empty());
746
747        if root_len > remaining_bytes(&mut postings_reader, postings_body_len)? {
748            return None;
749        }
750        let mut root_bytes = vec![0u8; root_len];
751        postings_reader.read_exact(&mut root_bytes).ok()?;
752        let project_root = PathBuf::from(String::from_utf8(root_bytes).ok()?);
753
754        let mut files = Vec::with_capacity(file_count);
755        let mut path_to_id = HashMap::new();
756        let mut unindexed_files = HashSet::new();
757
758        for file_id in 0..file_count {
759            let mut unindexed = [0u8; 1];
760            postings_reader.read_exact(&mut unindexed).ok()?;
761            let path_len = read_u32(&mut postings_reader).ok()? as usize;
762            let size = read_u64(&mut postings_reader).ok()?;
763            let secs = read_u64(&mut postings_reader).ok()?;
764            let nanos = read_u32(&mut postings_reader).ok()?;
765            if nanos >= 1_000_000_000 {
766                return None;
767            }
768            if path_len > remaining_bytes(&mut postings_reader, postings_body_len)? {
769                return None;
770            }
771            let mut path_bytes = vec![0u8; path_len];
772            postings_reader.read_exact(&mut path_bytes).ok()?;
773            let relative_path = PathBuf::from(String::from_utf8(path_bytes).ok()?);
774            let full_path = project_root.join(relative_path);
775            let file_id_u32 = u32::try_from(file_id).ok()?;
776
777            files.push(FileEntry {
778                path: full_path.clone(),
779                size,
780                modified: UNIX_EPOCH + Duration::new(secs, nanos),
781            });
782            path_to_id.insert(full_path, file_id_u32);
783            if unindexed[0] == 1 {
784                unindexed_files.insert(file_id_u32);
785            }
786        }
787
788        let postings_len = read_u64(&mut postings_reader).ok()? as usize;
789        let max_postings_bytes = MAX_ENTRIES.checked_mul(POSTING_BYTES)?;
790        if postings_len > max_postings_bytes {
791            return None;
792        }
793        if postings_len > remaining_bytes(&mut postings_reader, postings_body_len)? {
794            return None;
795        }
796        let mut postings_blob = vec![0u8; postings_len];
797        postings_reader.read_exact(&mut postings_blob).ok()?;
798
799        let mut lookup_magic = [0u8; 8];
800        lookup_reader.read_exact(&mut lookup_magic).ok()?;
801        if &lookup_magic != LOOKUP_MAGIC {
802            return None;
803        }
804        if read_u32(&mut lookup_reader).ok()? != INDEX_VERSION {
805            return None;
806        }
807        let entry_count = read_u32(&mut lookup_reader).ok()? as usize;
808        if entry_count > MAX_ENTRIES {
809            return None;
810        }
811        let remaining_lookup = remaining_bytes(&mut lookup_reader, lookup_body_len)?;
812        let minimum_lookup_bytes = entry_count.checked_mul(LOOKUP_ENTRY_BYTES)?;
813        if minimum_lookup_bytes > remaining_lookup {
814            return None;
815        }
816
817        let mut postings = HashMap::new();
818        let mut file_trigrams: HashMap<u32, Vec<u32>> = HashMap::new();
819
820        for _ in 0..entry_count {
821            let trigram = read_u32(&mut lookup_reader).ok()?;
822            let offset = read_u64(&mut lookup_reader).ok()? as usize;
823            let count = read_u32(&mut lookup_reader).ok()? as usize;
824            if count > MAX_ENTRIES {
825                return None;
826            }
827            let bytes_len = count.checked_mul(POSTING_BYTES)?;
828            let end = offset.checked_add(bytes_len)?;
829            if end > postings_blob.len() {
830                return None;
831            }
832
833            let mut trigram_postings = Vec::with_capacity(count);
834            for chunk in postings_blob[offset..end].chunks_exact(6) {
835                let file_id = u32::from_le_bytes([chunk[0], chunk[1], chunk[2], chunk[3]]);
836                let posting = Posting {
837                    file_id,
838                    next_mask: chunk[4],
839                    loc_mask: chunk[5],
840                };
841                trigram_postings.push(posting.clone());
842                file_trigrams.entry(file_id).or_default().push(trigram);
843            }
844            postings.insert(trigram, trigram_postings);
845        }
846
847        Some(SearchIndex {
848            postings,
849            files,
850            path_to_id,
851            ready: true,
852            project_root,
853            git_head,
854            max_file_size,
855            file_trigrams,
856            unindexed_files,
857        })
858    }
859
860    pub(crate) fn stored_git_head(&self) -> Option<&str> {
861        self.git_head.as_deref()
862    }
863
864    pub(crate) fn set_ready(&mut self, ready: bool) {
865        self.ready = ready;
866    }
867
868    pub(crate) fn rebuild_or_refresh(
869        root: &Path,
870        max_file_size: u64,
871        current_head: Option<String>,
872        baseline: Option<SearchIndex>,
873    ) -> Self {
874        if current_head.is_none() {
875            return SearchIndex::build_with_limit(root, max_file_size);
876        }
877
878        if let Some(mut baseline) = baseline {
879            baseline.project_root = fs::canonicalize(root).unwrap_or_else(|_| root.to_path_buf());
880            baseline.max_file_size = max_file_size;
881
882            if baseline.git_head == current_head {
883                // HEAD matches, but files may have changed on disk since the index was
884                // last written (e.g., uncommitted edits, stash pop, manual file changes
885                // while OpenCode was closed). Verify mtimes and re-index stale files.
886                verify_file_mtimes(&mut baseline);
887                baseline.ready = true;
888                return baseline;
889            }
890
891            if let (Some(previous), Some(current)) =
892                (baseline.git_head.clone(), current_head.clone())
893            {
894                let project_root = baseline.project_root.clone();
895                if apply_git_diff_updates(&mut baseline, &project_root, &previous, &current) {
896                    baseline.git_head = Some(current);
897                    baseline.ready = true;
898                    return baseline;
899                }
900            }
901        }
902
903        SearchIndex::build_with_limit(root, max_file_size)
904    }
905
906    fn allocate_file_id(&mut self, path: &Path, size_hint: u64) -> Option<u32> {
907        let file_id = u32::try_from(self.files.len()).ok()?;
908        let metadata = fs::metadata(path).ok();
909        let size = metadata
910            .as_ref()
911            .map_or(size_hint, |metadata| metadata.len());
912        let modified = metadata
913            .and_then(|metadata| metadata.modified().ok())
914            .unwrap_or(UNIX_EPOCH);
915
916        self.files.push(FileEntry {
917            path: path.to_path_buf(),
918            size,
919            modified,
920        });
921        self.path_to_id.insert(path.to_path_buf(), file_id);
922        Some(file_id)
923    }
924
925    fn track_unindexed_file(&mut self, path: &Path, metadata: &fs::Metadata) {
926        let Some(file_id) = self.allocate_file_id(path, metadata.len()) else {
927            return;
928        };
929        self.unindexed_files.insert(file_id);
930        self.file_trigrams.insert(file_id, Vec::new());
931    }
932
933    fn active_file_ids(&self) -> Vec<u32> {
934        let mut ids: Vec<u32> = self.path_to_id.values().copied().collect();
935        ids.sort_unstable();
936        ids
937    }
938
939    fn is_active_file(&self, file_id: u32) -> bool {
940        self.files
941            .get(file_id as usize)
942            .map(|file| !file.path.as_os_str().is_empty())
943            .unwrap_or(false)
944    }
945
946    fn postings_for_trigram(&self, trigram: u32, filter: Option<PostingFilter>) -> Vec<u32> {
947        let Some(postings) = self.postings.get(&trigram) else {
948            return Vec::new();
949        };
950
951        let mut matches = Vec::with_capacity(postings.len());
952
953        for posting in postings {
954            if let Some(filter) = filter {
955                // next_mask: bloom filter check — the character following this trigram in the
956                // query must also appear after this trigram somewhere in the file.
957                if filter.next_mask != 0 && posting.next_mask & filter.next_mask == 0 {
958                    continue;
959                }
960                // NOTE: loc_mask (position mod 8) is stored for future adjacency checks
961                // between consecutive trigram pairs, but is NOT used as a single-trigram
962                // filter because the position in the query string has no relationship to
963                // the position in the file. Using it here causes false negatives.
964            }
965            if self.is_active_file(posting.file_id) {
966                matches.push(posting.file_id);
967            }
968        }
969
970        matches
971    }
972}
973
974fn search_candidate_file(
975    file: &FileEntry,
976    matcher: &SearchMatcher,
977    max_results: usize,
978    stop_after: usize,
979    total_matches: &AtomicUsize,
980    files_searched: &AtomicUsize,
981    files_with_matches: &AtomicUsize,
982    truncated: &AtomicBool,
983) -> Vec<SharedGrepMatch> {
984    if should_stop_search(truncated, total_matches, stop_after) {
985        return Vec::new();
986    }
987
988    let content = match read_indexed_file_bytes(&file.path) {
989        Some(content) => content,
990        None => return Vec::new(),
991    };
992    // Defense in depth: even though indexing tries to filter binaries via
993    // `is_binary_path` + full-content `is_binary_bytes`, we double-check at
994    // query time. content_inspector is fast (~bytes-per-cycle on a small
995    // preview) and this guarantees we never surface matches inside binary
996    // files even if the indexer somehow let one through (e.g. file changed
997    // between indexing and query).
998    if is_binary_bytes(&content) {
999        return Vec::new();
1000    }
1001    files_searched.fetch_add(1, Ordering::Relaxed);
1002
1003    let shared_path = Arc::new(file.path.clone());
1004    let mut matches = Vec::new();
1005    let mut line_starts = None;
1006    let mut seen_lines = HashSet::new();
1007    let mut matched_this_file = false;
1008
1009    match matcher {
1010        SearchMatcher::Literal(LiteralSearch::CaseSensitive(needle)) => {
1011            let finder = memchr::memmem::Finder::new(needle);
1012            let mut start = 0;
1013
1014            while let Some(position) = finder.find(&content[start..]) {
1015                if should_stop_search(truncated, total_matches, stop_after) {
1016                    break;
1017                }
1018
1019                let offset = start + position;
1020                start = offset + 1;
1021
1022                let line_starts = line_starts.get_or_insert_with(|| line_starts_bytes(&content));
1023                let (line, column, line_text) = line_details_bytes(&content, line_starts, offset);
1024                if !seen_lines.insert(line) {
1025                    continue;
1026                }
1027
1028                matched_this_file = true;
1029                let match_number = total_matches.fetch_add(1, Ordering::Relaxed) + 1;
1030                if match_number > max_results {
1031                    truncated.store(true, Ordering::Relaxed);
1032                    break;
1033                }
1034
1035                let end = offset + needle.len();
1036                matches.push(SharedGrepMatch {
1037                    file: shared_path.clone(),
1038                    line,
1039                    column,
1040                    line_text,
1041                    match_text: String::from_utf8_lossy(&content[offset..end]).into_owned(),
1042                });
1043            }
1044        }
1045        SearchMatcher::Literal(LiteralSearch::AsciiCaseInsensitive(needle)) => {
1046            let search_content = content.to_ascii_lowercase();
1047            let finder = memchr::memmem::Finder::new(needle);
1048            let mut start = 0;
1049
1050            while let Some(position) = finder.find(&search_content[start..]) {
1051                if should_stop_search(truncated, total_matches, stop_after) {
1052                    break;
1053                }
1054
1055                let offset = start + position;
1056                start = offset + 1;
1057
1058                let line_starts = line_starts.get_or_insert_with(|| line_starts_bytes(&content));
1059                let (line, column, line_text) = line_details_bytes(&content, line_starts, offset);
1060                if !seen_lines.insert(line) {
1061                    continue;
1062                }
1063
1064                matched_this_file = true;
1065                let match_number = total_matches.fetch_add(1, Ordering::Relaxed) + 1;
1066                if match_number > max_results {
1067                    truncated.store(true, Ordering::Relaxed);
1068                    break;
1069                }
1070
1071                let end = offset + needle.len();
1072                matches.push(SharedGrepMatch {
1073                    file: shared_path.clone(),
1074                    line,
1075                    column,
1076                    line_text,
1077                    match_text: String::from_utf8_lossy(&content[offset..end]).into_owned(),
1078                });
1079            }
1080        }
1081        SearchMatcher::Regex(regex) => {
1082            for matched in regex.find_iter(&content) {
1083                if should_stop_search(truncated, total_matches, stop_after) {
1084                    break;
1085                }
1086
1087                let line_starts = line_starts.get_or_insert_with(|| line_starts_bytes(&content));
1088                let (line, column, line_text) =
1089                    line_details_bytes(&content, line_starts, matched.start());
1090                if !seen_lines.insert(line) {
1091                    continue;
1092                }
1093
1094                matched_this_file = true;
1095                let match_number = total_matches.fetch_add(1, Ordering::Relaxed) + 1;
1096                if match_number > max_results {
1097                    truncated.store(true, Ordering::Relaxed);
1098                    break;
1099                }
1100
1101                matches.push(SharedGrepMatch {
1102                    file: shared_path.clone(),
1103                    line,
1104                    column,
1105                    line_text,
1106                    match_text: String::from_utf8_lossy(matched.as_bytes()).into_owned(),
1107                });
1108            }
1109        }
1110    }
1111
1112    if matched_this_file {
1113        files_with_matches.fetch_add(1, Ordering::Relaxed);
1114    }
1115
1116    matches
1117}
1118
1119fn should_stop_search(
1120    truncated: &AtomicBool,
1121    total_matches: &AtomicUsize,
1122    stop_after: usize,
1123) -> bool {
1124    truncated.load(Ordering::Relaxed) && total_matches.load(Ordering::Relaxed) >= stop_after
1125}
1126
1127fn intersect_sorted_ids(left: &[u32], right: &[u32]) -> Vec<u32> {
1128    let mut merged = Vec::with_capacity(left.len().min(right.len()));
1129    let mut left_index = 0;
1130    let mut right_index = 0;
1131
1132    while left_index < left.len() && right_index < right.len() {
1133        match left[left_index].cmp(&right[right_index]) {
1134            std::cmp::Ordering::Less => left_index += 1,
1135            std::cmp::Ordering::Greater => right_index += 1,
1136            std::cmp::Ordering::Equal => {
1137                merged.push(left[left_index]);
1138                left_index += 1;
1139                right_index += 1;
1140            }
1141        }
1142    }
1143
1144    merged
1145}
1146
1147fn union_sorted_ids(left: &[u32], right: &[u32]) -> Vec<u32> {
1148    let mut merged = Vec::with_capacity(left.len() + right.len());
1149    let mut left_index = 0;
1150    let mut right_index = 0;
1151
1152    while left_index < left.len() && right_index < right.len() {
1153        match left[left_index].cmp(&right[right_index]) {
1154            std::cmp::Ordering::Less => {
1155                merged.push(left[left_index]);
1156                left_index += 1;
1157            }
1158            std::cmp::Ordering::Greater => {
1159                merged.push(right[right_index]);
1160                right_index += 1;
1161            }
1162            std::cmp::Ordering::Equal => {
1163                merged.push(left[left_index]);
1164                left_index += 1;
1165                right_index += 1;
1166            }
1167        }
1168    }
1169
1170    merged.extend_from_slice(&left[left_index..]);
1171    merged.extend_from_slice(&right[right_index..]);
1172    merged
1173}
1174
1175pub fn decompose_regex(pattern: &str) -> RegexQuery {
1176    let hir = match regex_syntax::parse(pattern) {
1177        Ok(hir) => hir,
1178        Err(_) => return RegexQuery::default(),
1179    };
1180
1181    let build = build_query(&hir);
1182    build.into_query()
1183}
1184
1185pub fn pack_trigram(a: u8, b: u8, c: u8) -> u32 {
1186    ((a as u32) << 16) | ((b as u32) << 8) | c as u32
1187}
1188
1189pub fn normalize_char(c: u8) -> u8 {
1190    c.to_ascii_lowercase()
1191}
1192
1193pub fn extract_trigrams(content: &[u8]) -> Vec<(u32, u8, usize)> {
1194    if content.len() < 3 {
1195        return Vec::new();
1196    }
1197
1198    let mut trigrams = Vec::with_capacity(content.len().saturating_sub(2));
1199    for start in 0..=content.len() - 3 {
1200        let trigram = pack_trigram(
1201            normalize_char(content[start]),
1202            normalize_char(content[start + 1]),
1203            normalize_char(content[start + 2]),
1204        );
1205        let next_char = content.get(start + 3).copied().unwrap_or(EOF_SENTINEL);
1206        trigrams.push((trigram, next_char, start));
1207    }
1208    trigrams
1209}
1210
1211pub fn resolve_cache_dir(project_root: &Path, storage_dir: Option<&Path>) -> PathBuf {
1212    // Respect AFT_CACHE_DIR for testing — prevents tests from polluting the user's storage
1213    if let Some(override_dir) = std::env::var_os("AFT_CACHE_DIR") {
1214        return PathBuf::from(override_dir)
1215            .join("index")
1216            .join(project_cache_key(project_root));
1217    }
1218    // Use configured storage dir (from plugin, XDG-compliant)
1219    if let Some(dir) = storage_dir {
1220        return dir.join("index").join(project_cache_key(project_root));
1221    }
1222    // Fallback to ~/.cache/aft/ (legacy, for standalone binary usage)
1223    let home = std::env::var_os("HOME")
1224        .map(PathBuf::from)
1225        .unwrap_or_else(|| PathBuf::from("."));
1226    home.join(".cache")
1227        .join("aft")
1228        .join("index")
1229        .join(project_cache_key(project_root))
1230}
1231
1232pub(crate) fn build_path_filters(
1233    include: &[String],
1234    exclude: &[String],
1235) -> Result<PathFilters, String> {
1236    Ok(PathFilters {
1237        includes: build_globset(include)?,
1238        excludes: build_globset(exclude)?,
1239    })
1240}
1241
1242pub(crate) fn walk_project_files(root: &Path, filters: &PathFilters) -> Vec<PathBuf> {
1243    walk_project_files_from(root, root, filters)
1244}
1245
1246pub(crate) fn walk_project_files_from(
1247    filter_root: &Path,
1248    search_root: &Path,
1249    filters: &PathFilters,
1250) -> Vec<PathBuf> {
1251    let mut builder = WalkBuilder::new(search_root);
1252    builder
1253        .hidden(false)
1254        .git_ignore(true)
1255        .git_global(true)
1256        .git_exclude(true)
1257        .filter_entry(|entry| {
1258            let name = entry.file_name().to_string_lossy();
1259            if entry.file_type().map_or(false, |ft| ft.is_dir()) {
1260                return !matches!(
1261                    name.as_ref(),
1262                    "node_modules"
1263                        | "target"
1264                        | "venv"
1265                        | ".venv"
1266                        | ".git"
1267                        | "__pycache__"
1268                        | ".tox"
1269                        | "dist"
1270                        | "build"
1271                );
1272            }
1273            true
1274        });
1275
1276    let mut files = Vec::new();
1277    for entry in builder.build().filter_map(|entry| entry.ok()) {
1278        if !entry
1279            .file_type()
1280            .map_or(false, |file_type| file_type.is_file())
1281        {
1282            continue;
1283        }
1284        let path = entry.into_path();
1285        if filters.matches(filter_root, &path) {
1286            files.push(path);
1287        }
1288    }
1289
1290    sort_paths_by_mtime_desc(&mut files);
1291    files
1292}
1293
1294pub(crate) fn read_searchable_text(path: &Path) -> Option<String> {
1295    let bytes = fs::read(path).ok()?;
1296    if is_binary_bytes(&bytes) {
1297        return None;
1298    }
1299    String::from_utf8(bytes).ok()
1300}
1301
1302fn read_indexed_file_bytes(path: &Path) -> Option<Vec<u8>> {
1303    fs::read(path).ok()
1304}
1305
1306pub(crate) fn relative_to_root(root: &Path, path: &Path) -> PathBuf {
1307    path.strip_prefix(root)
1308        .map(PathBuf::from)
1309        .unwrap_or_else(|_| path.to_path_buf())
1310}
1311
1312/// Sort paths newest-first by mtime, falling back to lexicographic order.
1313///
1314/// Pre-v0.15.2 this called `path_modified_time(...)` directly inside the
1315/// `sort_by()` closure. That made the comparator non-deterministic — a
1316/// `stat()` syscall for the same path can return different values across
1317/// invocations (file edited mid-sort, file deleted, OS clock adjustments,
1318/// concurrent file-watcher activity), and Rust's slice::sort panics at
1319/// runtime when it detects a non-total-order comparator. CI hit this on
1320/// a Pi e2e test where the bridge invalidated files in parallel with grep.
1321///
1322/// Fix: snapshot mtimes ONCE into a HashMap before sorting, then look up
1323/// from the map inside the closure. Pure function ⇒ guaranteed total order.
1324pub(crate) fn sort_paths_by_mtime_desc(paths: &mut [PathBuf]) {
1325    use std::collections::HashMap;
1326    let mut mtimes: HashMap<PathBuf, Option<SystemTime>> = HashMap::with_capacity(paths.len());
1327    for path in paths.iter() {
1328        mtimes
1329            .entry(path.clone())
1330            .or_insert_with(|| path_modified_time(path));
1331    }
1332    paths.sort_by(|left, right| {
1333        let left_mtime = mtimes.get(left).and_then(|v| *v);
1334        let right_mtime = mtimes.get(right).and_then(|v| *v);
1335        right_mtime.cmp(&left_mtime).then_with(|| left.cmp(right))
1336    });
1337}
1338
1339/// See `sort_paths_by_mtime_desc` for why mtimes are snapshotted ahead of
1340/// the sort. Same fix, applied to grep matches that share files.
1341pub(crate) fn sort_grep_matches_by_mtime_desc(matches: &mut [GrepMatch], project_root: &Path) {
1342    use std::collections::HashMap;
1343    let mut mtimes: HashMap<PathBuf, Option<SystemTime>> = HashMap::new();
1344    for m in matches.iter() {
1345        mtimes.entry(m.file.clone()).or_insert_with(|| {
1346            let resolved = resolve_match_path(project_root, &m.file);
1347            path_modified_time(&resolved)
1348        });
1349    }
1350    matches.sort_by(|left, right| {
1351        let left_mtime = mtimes.get(&left.file).and_then(|v| *v);
1352        let right_mtime = mtimes.get(&right.file).and_then(|v| *v);
1353        right_mtime
1354            .cmp(&left_mtime)
1355            .then_with(|| left.file.cmp(&right.file))
1356            .then_with(|| left.line.cmp(&right.line))
1357            .then_with(|| left.column.cmp(&right.column))
1358    });
1359}
1360
1361/// See `sort_paths_by_mtime_desc` for why mtimes are snapshotted ahead of
1362/// the sort. The cached lookup function `modified_for_path` is fast (in-memory
1363/// table from the search index), but it can still return different values if
1364/// the file is modified mid-sort. Snapshot once.
1365fn sort_shared_grep_matches_by_cached_mtime_desc<F>(
1366    matches: &mut [SharedGrepMatch],
1367    modified_for_path: F,
1368) where
1369    F: Fn(&Path) -> Option<SystemTime>,
1370{
1371    use std::collections::HashMap;
1372    let mut mtimes: HashMap<PathBuf, Option<SystemTime>> = HashMap::with_capacity(matches.len());
1373    for m in matches.iter() {
1374        let path = m.file.as_path().to_path_buf();
1375        mtimes
1376            .entry(path.clone())
1377            .or_insert_with(|| modified_for_path(&path));
1378    }
1379    matches.sort_by(|left, right| {
1380        let left_mtime = mtimes.get(left.file.as_path()).and_then(|v| *v);
1381        let right_mtime = mtimes.get(right.file.as_path()).and_then(|v| *v);
1382        right_mtime
1383            .cmp(&left_mtime)
1384            .then_with(|| left.file.as_path().cmp(right.file.as_path()))
1385            .then_with(|| left.line.cmp(&right.line))
1386            .then_with(|| left.column.cmp(&right.column))
1387    });
1388}
1389
1390pub(crate) fn resolve_search_scope(project_root: &Path, path: Option<&str>) -> SearchScope {
1391    let resolved_project_root = canonicalize_or_normalize(project_root);
1392    let root = match path {
1393        Some(path) => {
1394            let path = PathBuf::from(path);
1395            if path.is_absolute() {
1396                canonicalize_or_normalize(&path)
1397            } else {
1398                normalize_path(&resolved_project_root.join(path))
1399            }
1400        }
1401        None => resolved_project_root.clone(),
1402    };
1403
1404    let use_index = is_within_search_root(&resolved_project_root, &root);
1405    SearchScope { root, use_index }
1406}
1407
1408pub(crate) fn is_binary_bytes(content: &[u8]) -> bool {
1409    content_inspector::inspect(content).is_binary()
1410}
1411
1412pub(crate) fn current_git_head(root: &Path) -> Option<String> {
1413    run_git(root, &["rev-parse", "HEAD"])
1414}
1415
1416pub fn project_cache_key(project_root: &Path) -> String {
1417    use sha2::{Digest, Sha256};
1418
1419    let mut hasher = Sha256::new();
1420
1421    if let Some(root_commit) = run_git(project_root, &["rev-list", "--max-parents=0", "HEAD"]) {
1422        // Git repo: root commit is the unique identity.
1423        // Same repo cloned anywhere produces the same key.
1424        hasher.update(root_commit.as_bytes());
1425    } else {
1426        // Non-git project: use the canonical filesystem path as identity.
1427        let canonical_root = canonicalize_or_normalize(project_root);
1428        hasher.update(canonical_root.to_string_lossy().as_bytes());
1429    }
1430
1431    let digest = format!("{:x}", hasher.finalize());
1432    digest[..16].to_string()
1433}
1434
1435impl PathFilters {
1436    fn matches(&self, root: &Path, path: &Path) -> bool {
1437        let relative = to_glob_path(&relative_to_root(root, path));
1438        if self
1439            .includes
1440            .as_ref()
1441            .is_some_and(|includes| !includes.is_match(&relative))
1442        {
1443            return false;
1444        }
1445        if self
1446            .excludes
1447            .as_ref()
1448            .is_some_and(|excludes| excludes.is_match(&relative))
1449        {
1450            return false;
1451        }
1452        true
1453    }
1454}
1455
1456fn canonicalize_or_normalize(path: &Path) -> PathBuf {
1457    fs::canonicalize(path).unwrap_or_else(|_| normalize_path(path))
1458}
1459
1460fn resolve_match_path(project_root: &Path, path: &Path) -> PathBuf {
1461    if path.is_absolute() {
1462        path.to_path_buf()
1463    } else {
1464        project_root.join(path)
1465    }
1466}
1467
1468fn path_modified_time(path: &Path) -> Option<SystemTime> {
1469    fs::metadata(path)
1470        .and_then(|metadata| metadata.modified())
1471        .ok()
1472}
1473
1474fn normalize_path(path: &Path) -> PathBuf {
1475    let mut result = PathBuf::new();
1476    for component in path.components() {
1477        match component {
1478            Component::ParentDir => {
1479                if !result.pop() {
1480                    result.push(component);
1481                }
1482            }
1483            Component::CurDir => {}
1484            _ => result.push(component),
1485        }
1486    }
1487    result
1488}
1489
1490/// Verify stored file mtimes against disk. Re-index any files whose mtime changed
1491/// since the index was last written. Also detect new files and deleted files.
1492fn verify_file_mtimes(index: &mut SearchIndex) {
1493    // Collect stale files (mtime mismatch or deleted)
1494    let mut stale_paths = Vec::new();
1495    for entry in &index.files {
1496        if entry.path.as_os_str().is_empty() {
1497            continue; // tombstoned entry
1498        }
1499        match fs::metadata(&entry.path) {
1500            Ok(meta) => {
1501                let current_mtime = meta.modified().unwrap_or(UNIX_EPOCH);
1502                if current_mtime != entry.modified || meta.len() != entry.size {
1503                    stale_paths.push(entry.path.clone());
1504                }
1505            }
1506            Err(_) => {
1507                // File deleted
1508                stale_paths.push(entry.path.clone());
1509            }
1510        }
1511    }
1512
1513    // Re-index stale files
1514    for path in &stale_paths {
1515        index.update_file(path);
1516    }
1517
1518    // Detect new files not in the index
1519    let filters = PathFilters::default();
1520    for path in walk_project_files(&index.project_root, &filters) {
1521        if !index.path_to_id.contains_key(&path) {
1522            index.update_file(&path);
1523        }
1524    }
1525
1526    if !stale_paths.is_empty() {
1527        log::info!(
1528            "search index: refreshed {} stale file(s) from disk cache",
1529            stale_paths.len()
1530        );
1531    }
1532}
1533
1534fn is_within_search_root(search_root: &Path, path: &Path) -> bool {
1535    path.starts_with(search_root)
1536}
1537
1538impl QueryBuild {
1539    fn into_query(self) -> RegexQuery {
1540        let mut query = RegexQuery::default();
1541
1542        for run in self.and_runs {
1543            add_run_to_and_query(&mut query, &run);
1544        }
1545
1546        for group in self.or_groups {
1547            let mut trigrams = BTreeSet::new();
1548            let mut filters = HashMap::new();
1549            for run in group {
1550                for (trigram, filter) in trigram_filters(&run) {
1551                    trigrams.insert(trigram);
1552                    merge_filter(filters.entry(trigram).or_default(), filter);
1553                }
1554            }
1555            if !trigrams.is_empty() {
1556                query.or_groups.push(trigrams.into_iter().collect());
1557                query.or_filters.push(filters);
1558            }
1559        }
1560
1561        query
1562    }
1563}
1564
1565fn build_query(hir: &Hir) -> QueryBuild {
1566    match hir.kind() {
1567        HirKind::Literal(literal) => {
1568            if literal.0.len() >= 3 {
1569                QueryBuild {
1570                    and_runs: vec![literal.0.to_vec()],
1571                    or_groups: Vec::new(),
1572                }
1573            } else {
1574                QueryBuild::default()
1575            }
1576        }
1577        HirKind::Capture(capture) => build_query(&capture.sub),
1578        HirKind::Concat(parts) => {
1579            let mut build = QueryBuild::default();
1580            for part in parts {
1581                let part_build = build_query(part);
1582                build.and_runs.extend(part_build.and_runs);
1583                build.or_groups.extend(part_build.or_groups);
1584            }
1585            build
1586        }
1587        HirKind::Alternation(parts) => {
1588            let mut group = Vec::new();
1589            for part in parts {
1590                let Some(mut choices) = guaranteed_run_choices(part) else {
1591                    return QueryBuild::default();
1592                };
1593                group.append(&mut choices);
1594            }
1595            if group.is_empty() {
1596                QueryBuild::default()
1597            } else {
1598                QueryBuild {
1599                    and_runs: Vec::new(),
1600                    or_groups: vec![group],
1601                }
1602            }
1603        }
1604        HirKind::Repetition(repetition) => {
1605            if repetition.min == 0 {
1606                QueryBuild::default()
1607            } else {
1608                build_query(&repetition.sub)
1609            }
1610        }
1611        HirKind::Empty | HirKind::Class(_) | HirKind::Look(_) => QueryBuild::default(),
1612    }
1613}
1614
1615fn guaranteed_run_choices(hir: &Hir) -> Option<Vec<Vec<u8>>> {
1616    match hir.kind() {
1617        HirKind::Literal(literal) => {
1618            if literal.0.len() >= 3 {
1619                Some(vec![literal.0.to_vec()])
1620            } else {
1621                None
1622            }
1623        }
1624        HirKind::Capture(capture) => guaranteed_run_choices(&capture.sub),
1625        HirKind::Concat(parts) => {
1626            let mut runs = Vec::new();
1627            for part in parts {
1628                if let Some(mut part_runs) = guaranteed_run_choices(part) {
1629                    runs.append(&mut part_runs);
1630                }
1631            }
1632            if runs.is_empty() {
1633                None
1634            } else {
1635                Some(runs)
1636            }
1637        }
1638        HirKind::Alternation(parts) => {
1639            let mut runs = Vec::new();
1640            for part in parts {
1641                let Some(mut part_runs) = guaranteed_run_choices(part) else {
1642                    return None;
1643                };
1644                runs.append(&mut part_runs);
1645            }
1646            if runs.is_empty() {
1647                None
1648            } else {
1649                Some(runs)
1650            }
1651        }
1652        HirKind::Repetition(repetition) => {
1653            if repetition.min == 0 {
1654                None
1655            } else {
1656                guaranteed_run_choices(&repetition.sub)
1657            }
1658        }
1659        HirKind::Empty | HirKind::Class(_) | HirKind::Look(_) => None,
1660    }
1661}
1662
1663fn add_run_to_and_query(query: &mut RegexQuery, run: &[u8]) {
1664    for (trigram, filter) in trigram_filters(run) {
1665        if !query.and_trigrams.contains(&trigram) {
1666            query.and_trigrams.push(trigram);
1667        }
1668        merge_filter(query.and_filters.entry(trigram).or_default(), filter);
1669    }
1670}
1671
1672fn trigram_filters(run: &[u8]) -> Vec<(u32, PostingFilter)> {
1673    let mut filters: BTreeMap<u32, PostingFilter> = BTreeMap::new();
1674    for (trigram, next_char, position) in extract_trigrams(run) {
1675        let entry: &mut PostingFilter = filters.entry(trigram).or_default();
1676        if next_char != EOF_SENTINEL {
1677            entry.next_mask |= mask_for_next_char(next_char);
1678        }
1679        entry.loc_mask |= mask_for_position(position);
1680    }
1681    filters.into_iter().collect()
1682}
1683
1684fn merge_filter(target: &mut PostingFilter, filter: PostingFilter) {
1685    target.next_mask |= filter.next_mask;
1686    target.loc_mask |= filter.loc_mask;
1687}
1688
1689fn mask_for_next_char(next_char: u8) -> u8 {
1690    let bit = (normalize_char(next_char).wrapping_mul(31) & 7) as u32;
1691    1u8 << bit
1692}
1693
1694fn mask_for_position(position: usize) -> u8 {
1695    1u8 << (position % 8)
1696}
1697
1698fn build_globset(patterns: &[String]) -> Result<Option<GlobSet>, String> {
1699    if patterns.is_empty() {
1700        return Ok(None);
1701    }
1702
1703    let mut builder = GlobSetBuilder::new();
1704    for pattern in patterns {
1705        let glob = Glob::new(pattern).map_err(|error| error.to_string())?;
1706        builder.add(glob);
1707    }
1708    builder.build().map(Some).map_err(|error| error.to_string())
1709}
1710
1711fn read_u32<R: Read>(reader: &mut R) -> std::io::Result<u32> {
1712    let mut buffer = [0u8; 4];
1713    reader.read_exact(&mut buffer)?;
1714    Ok(u32::from_le_bytes(buffer))
1715}
1716
1717fn read_u64<R: Read>(reader: &mut R) -> std::io::Result<u64> {
1718    let mut buffer = [0u8; 8];
1719    reader.read_exact(&mut buffer)?;
1720    Ok(u64::from_le_bytes(buffer))
1721}
1722
1723fn write_u32<W: Write>(writer: &mut W, value: u32) -> std::io::Result<()> {
1724    writer.write_all(&value.to_le_bytes())
1725}
1726
1727fn write_u64<W: Write>(writer: &mut W, value: u64) -> std::io::Result<()> {
1728    writer.write_all(&value.to_le_bytes())
1729}
1730
1731fn write_crc32(writer: &mut BufWriter<File>, path: &Path) -> std::io::Result<()> {
1732    writer.flush()?;
1733    let body = std::fs::read(path)?;
1734    let checksum = crc32fast::hash(&body);
1735    writer.write_all(&checksum.to_le_bytes())
1736}
1737
1738fn verify_crc32(path: &Path) -> std::io::Result<()> {
1739    let bytes = std::fs::read(path)?;
1740    let Some((body, stored)) = bytes.split_last_chunk::<4>() else {
1741        return Err(std::io::Error::other("search index checksum missing"));
1742    };
1743    let expected = u32::from_le_bytes(*stored);
1744    let actual = crc32fast::hash(body);
1745    if actual != expected {
1746        return Err(std::io::Error::other("search index checksum mismatch"));
1747    }
1748    Ok(())
1749}
1750
1751fn remaining_bytes<R: Seek>(reader: &mut R, total_len: usize) -> Option<usize> {
1752    let pos = usize::try_from(reader.stream_position().ok()?).ok()?;
1753    total_len.checked_sub(pos)
1754}
1755
1756fn run_git(root: &Path, args: &[&str]) -> Option<String> {
1757    let output = Command::new("git")
1758        .arg("-C")
1759        .arg(root)
1760        .args(args)
1761        .output()
1762        .ok()?;
1763    if !output.status.success() {
1764        return None;
1765    }
1766    let value = String::from_utf8(output.stdout).ok()?;
1767    let value = value.trim().to_string();
1768    if value.is_empty() {
1769        None
1770    } else {
1771        Some(value)
1772    }
1773}
1774
1775fn apply_git_diff_updates(index: &mut SearchIndex, root: &Path, from: &str, to: &str) -> bool {
1776    let diff_range = format!("{}..{}", from, to);
1777    let output = match Command::new("git")
1778        .arg("-C")
1779        .arg(root)
1780        .args(["diff", "--name-only", &diff_range])
1781        .output()
1782    {
1783        Ok(output) => output,
1784        Err(_) => return false,
1785    };
1786
1787    if !output.status.success() {
1788        return false;
1789    }
1790
1791    let Ok(paths) = String::from_utf8(output.stdout) else {
1792        return false;
1793    };
1794
1795    for relative_path in paths.lines().map(str::trim).filter(|path| !path.is_empty()) {
1796        let path = root.join(relative_path);
1797        if path.exists() {
1798            index.update_file(&path);
1799        } else {
1800            index.remove_file(&path);
1801        }
1802    }
1803
1804    true
1805}
1806
1807fn is_binary_path(path: &Path, size: u64) -> bool {
1808    if size == 0 {
1809        return false;
1810    }
1811
1812    let mut file = match File::open(path) {
1813        Ok(file) => file,
1814        Err(_) => return true,
1815    };
1816
1817    let mut preview = vec![0u8; PREVIEW_BYTES.min(size as usize)];
1818    match file.read(&mut preview) {
1819        Ok(read) => is_binary_bytes(&preview[..read]),
1820        Err(_) => true,
1821    }
1822}
1823
1824fn line_starts_bytes(content: &[u8]) -> Vec<usize> {
1825    let mut starts = vec![0usize];
1826    for (index, byte) in content.iter().copied().enumerate() {
1827        if byte == b'\n' {
1828            starts.push(index + 1);
1829        }
1830    }
1831    starts
1832}
1833
1834fn line_details_bytes(content: &[u8], line_starts: &[usize], offset: usize) -> (u32, u32, String) {
1835    let line_index = match line_starts.binary_search(&offset) {
1836        Ok(index) => index,
1837        Err(index) => index.saturating_sub(1),
1838    };
1839    let line_start = line_starts.get(line_index).copied().unwrap_or(0);
1840    let line_end = content[line_start..]
1841        .iter()
1842        .position(|byte| *byte == b'\n')
1843        .map(|length| line_start + length)
1844        .unwrap_or(content.len());
1845    let mut line_slice = &content[line_start..line_end];
1846    if line_slice.ends_with(b"\r") {
1847        line_slice = &line_slice[..line_slice.len() - 1];
1848    }
1849    let line_text = String::from_utf8_lossy(line_slice).into_owned();
1850    let column = String::from_utf8_lossy(&content[line_start..offset])
1851        .chars()
1852        .count() as u32
1853        + 1;
1854    (line_index as u32 + 1, column, line_text)
1855}
1856
1857fn to_glob_path(path: &Path) -> String {
1858    path.to_string_lossy().replace('\\', "/")
1859}
1860
1861#[cfg(test)]
1862mod tests {
1863    use std::process::Command;
1864
1865    use super::*;
1866
1867    #[test]
1868    fn extract_trigrams_tracks_next_char_and_position() {
1869        let trigrams = extract_trigrams(b"Rust");
1870        assert_eq!(trigrams.len(), 2);
1871        assert_eq!(trigrams[0], (pack_trigram(b'r', b'u', b's'), b't', 0));
1872        assert_eq!(
1873            trigrams[1],
1874            (pack_trigram(b'u', b's', b't'), EOF_SENTINEL, 1)
1875        );
1876    }
1877
1878    #[test]
1879    fn decompose_regex_extracts_literals_and_alternations() {
1880        let query = decompose_regex("abc(def|ghi)xyz");
1881        assert!(query.and_trigrams.contains(&pack_trigram(b'a', b'b', b'c')));
1882        assert!(query.and_trigrams.contains(&pack_trigram(b'x', b'y', b'z')));
1883        assert_eq!(query.or_groups.len(), 1);
1884        assert!(query.or_groups[0].contains(&pack_trigram(b'd', b'e', b'f')));
1885        assert!(query.or_groups[0].contains(&pack_trigram(b'g', b'h', b'i')));
1886    }
1887
1888    #[test]
1889    fn candidates_intersect_posting_lists() {
1890        let mut index = SearchIndex::new();
1891        let dir = tempfile::tempdir().expect("create temp dir");
1892        let alpha = dir.path().join("alpha.txt");
1893        let beta = dir.path().join("beta.txt");
1894        fs::write(&alpha, "abcdef").expect("write alpha");
1895        fs::write(&beta, "abcxyz").expect("write beta");
1896        index.project_root = dir.path().to_path_buf();
1897        index.index_file(&alpha, b"abcdef");
1898        index.index_file(&beta, b"abcxyz");
1899
1900        let query = RegexQuery {
1901            and_trigrams: vec![
1902                pack_trigram(b'a', b'b', b'c'),
1903                pack_trigram(b'd', b'e', b'f'),
1904            ],
1905            ..RegexQuery::default()
1906        };
1907
1908        let candidates = index.candidates(&query);
1909        assert_eq!(candidates.len(), 1);
1910        assert_eq!(index.files[candidates[0] as usize].path, alpha);
1911    }
1912
1913    #[test]
1914    fn candidates_apply_bloom_filters() {
1915        let mut index = SearchIndex::new();
1916        let dir = tempfile::tempdir().expect("create temp dir");
1917        let file = dir.path().join("sample.txt");
1918        fs::write(&file, "abcd efgh").expect("write sample");
1919        index.project_root = dir.path().to_path_buf();
1920        index.index_file(&file, b"abcd efgh");
1921
1922        let trigram = pack_trigram(b'a', b'b', b'c');
1923        let matching_filter = PostingFilter {
1924            next_mask: mask_for_next_char(b'd'),
1925            loc_mask: mask_for_position(0),
1926        };
1927        let non_matching_filter = PostingFilter {
1928            next_mask: mask_for_next_char(b'z'),
1929            loc_mask: mask_for_position(0),
1930        };
1931
1932        assert_eq!(
1933            index
1934                .postings_for_trigram(trigram, Some(matching_filter))
1935                .len(),
1936            1
1937        );
1938        assert!(index
1939            .postings_for_trigram(trigram, Some(non_matching_filter))
1940            .is_empty());
1941    }
1942
1943    #[test]
1944    fn disk_round_trip_preserves_postings_and_files() {
1945        let dir = tempfile::tempdir().expect("create temp dir");
1946        let project = dir.path().join("project");
1947        fs::create_dir_all(&project).expect("create project dir");
1948        let file = project.join("src.txt");
1949        fs::write(&file, "abcdef").expect("write source");
1950
1951        let mut index = SearchIndex::build(&project);
1952        index.git_head = Some("deadbeef".to_string());
1953        let cache_dir = dir.path().join("cache");
1954        index.write_to_disk(&cache_dir, index.git_head.as_deref());
1955
1956        let loaded = SearchIndex::read_from_disk(&cache_dir).expect("load index from disk");
1957        assert_eq!(loaded.stored_git_head(), Some("deadbeef"));
1958        assert_eq!(loaded.files.len(), 1);
1959        assert_eq!(
1960            relative_to_root(&loaded.project_root, &loaded.files[0].path),
1961            PathBuf::from("src.txt")
1962        );
1963        assert_eq!(loaded.postings.len(), index.postings.len());
1964        assert!(loaded
1965            .postings
1966            .contains_key(&pack_trigram(b'a', b'b', b'c')));
1967    }
1968
1969    #[test]
1970    fn read_from_disk_rejects_corrupt_postings_checksum() {
1971        let dir = tempfile::tempdir().expect("create temp dir");
1972        let project = dir.path().join("project");
1973        fs::create_dir_all(&project).expect("create project dir");
1974        fs::write(project.join("src.txt"), "abcdef").expect("write source");
1975
1976        let index = SearchIndex::build(&project);
1977        let cache_dir = dir.path().join("cache");
1978        index.write_to_disk(&cache_dir, None);
1979
1980        let postings_path = cache_dir.join("postings.bin");
1981        let mut bytes = fs::read(&postings_path).expect("read postings");
1982        let middle = bytes.len() / 2;
1983        bytes[middle] ^= 0xff;
1984        fs::write(&postings_path, bytes).expect("write corrupted postings");
1985
1986        assert!(SearchIndex::read_from_disk(&cache_dir).is_none());
1987    }
1988
1989    #[test]
1990    fn write_to_disk_uses_temp_files_and_cleans_them_up() {
1991        let dir = tempfile::tempdir().expect("create temp dir");
1992        let project = dir.path().join("project");
1993        fs::create_dir_all(&project).expect("create project dir");
1994        fs::write(project.join("src.txt"), "abcdef").expect("write source");
1995
1996        let index = SearchIndex::build(&project);
1997        let cache_dir = dir.path().join("cache");
1998        index.write_to_disk(&cache_dir, None);
1999
2000        assert!(cache_dir.join("postings.bin").is_file());
2001        assert!(cache_dir.join("lookup.bin").is_file());
2002        assert!(!cache_dir.join("postings.bin.tmp").exists());
2003        assert!(!cache_dir.join("lookup.bin.tmp").exists());
2004    }
2005
2006    #[test]
2007    fn project_cache_key_includes_checkout_path() {
2008        let dir = tempfile::tempdir().expect("create temp dir");
2009        let source = dir.path().join("source");
2010        fs::create_dir_all(&source).expect("create source repo dir");
2011        fs::write(source.join("tracked.txt"), "content\n").expect("write tracked file");
2012
2013        assert!(Command::new("git")
2014            .current_dir(&source)
2015            .args(["init"])
2016            .status()
2017            .expect("init git repo")
2018            .success());
2019        assert!(Command::new("git")
2020            .current_dir(&source)
2021            .args(["add", "."])
2022            .status()
2023            .expect("git add")
2024            .success());
2025        assert!(Command::new("git")
2026            .current_dir(&source)
2027            .args([
2028                "-c",
2029                "user.name=AFT Tests",
2030                "-c",
2031                "user.email=aft-tests@example.com",
2032                "commit",
2033                "-m",
2034                "initial",
2035            ])
2036            .status()
2037            .expect("git commit")
2038            .success());
2039
2040        let clone = dir.path().join("clone");
2041        assert!(Command::new("git")
2042            .args(["clone", "--quiet"])
2043            .arg(&source)
2044            .arg(&clone)
2045            .status()
2046            .expect("git clone")
2047            .success());
2048
2049        let source_key = project_cache_key(&source);
2050        let clone_key = project_cache_key(&clone);
2051
2052        assert_eq!(source_key.len(), 16);
2053        assert_eq!(clone_key.len(), 16);
2054        // Same repo (same root commit) → same cache key regardless of clone path
2055        assert_eq!(source_key, clone_key);
2056    }
2057
2058    #[test]
2059    fn resolve_search_scope_disables_index_for_external_path() {
2060        let dir = tempfile::tempdir().expect("create temp dir");
2061        let project = dir.path().join("project");
2062        let outside = dir.path().join("outside");
2063        fs::create_dir_all(&project).expect("create project dir");
2064        fs::create_dir_all(&outside).expect("create outside dir");
2065
2066        let scope = resolve_search_scope(&project, outside.to_str());
2067
2068        assert_eq!(
2069            scope.root,
2070            fs::canonicalize(&outside).expect("canonicalize outside")
2071        );
2072        assert!(!scope.use_index);
2073    }
2074
2075    #[test]
2076    fn grep_filters_matches_to_search_root() {
2077        let dir = tempfile::tempdir().expect("create temp dir");
2078        let project = dir.path().join("project");
2079        let src = project.join("src");
2080        let docs = project.join("docs");
2081        fs::create_dir_all(&src).expect("create src dir");
2082        fs::create_dir_all(&docs).expect("create docs dir");
2083        fs::write(src.join("main.rs"), "pub struct SearchIndex;\n").expect("write src file");
2084        fs::write(docs.join("guide.md"), "SearchIndex guide\n").expect("write docs file");
2085
2086        let index = SearchIndex::build(&project);
2087        let result = index.search_grep("SearchIndex", true, &[], &[], &src, 10);
2088
2089        assert_eq!(result.files_searched, 1);
2090        assert_eq!(result.files_with_matches, 1);
2091        assert_eq!(result.matches.len(), 1);
2092        // Index stores canonicalized paths; on macOS /var → /private/var
2093        let expected = fs::canonicalize(src.join("main.rs")).expect("canonicalize");
2094        assert_eq!(result.matches[0].file, expected);
2095    }
2096
2097    #[test]
2098    fn grep_deduplicates_multiple_matches_on_same_line() {
2099        let dir = tempfile::tempdir().expect("create temp dir");
2100        let project = dir.path().join("project");
2101        let src = project.join("src");
2102        fs::create_dir_all(&src).expect("create src dir");
2103        fs::write(src.join("main.rs"), "SearchIndex SearchIndex\n").expect("write src file");
2104
2105        let index = SearchIndex::build(&project);
2106        let result = index.search_grep("SearchIndex", true, &[], &[], &src, 10);
2107
2108        assert_eq!(result.total_matches, 1);
2109        assert_eq!(result.matches.len(), 1);
2110    }
2111
2112    #[test]
2113    fn grep_reports_total_matches_before_truncation() {
2114        let dir = tempfile::tempdir().expect("create temp dir");
2115        let project = dir.path().join("project");
2116        let src = project.join("src");
2117        fs::create_dir_all(&src).expect("create src dir");
2118        fs::write(src.join("main.rs"), "SearchIndex\nSearchIndex\n").expect("write src file");
2119
2120        let index = SearchIndex::build(&project);
2121        let result = index.search_grep("SearchIndex", true, &[], &[], &src, 1);
2122
2123        assert_eq!(result.total_matches, 2);
2124        assert_eq!(result.matches.len(), 1);
2125        assert!(result.truncated);
2126    }
2127
2128    #[test]
2129    fn glob_filters_results_to_search_root() {
2130        let dir = tempfile::tempdir().expect("create temp dir");
2131        let project = dir.path().join("project");
2132        let src = project.join("src");
2133        let scripts = project.join("scripts");
2134        fs::create_dir_all(&src).expect("create src dir");
2135        fs::create_dir_all(&scripts).expect("create scripts dir");
2136        fs::write(src.join("main.rs"), "pub fn main() {}\n").expect("write src file");
2137        fs::write(scripts.join("tool.rs"), "pub fn tool() {}\n").expect("write scripts file");
2138
2139        let index = SearchIndex::build(&project);
2140        let files = index.glob("**/*.rs", &src);
2141
2142        assert_eq!(
2143            files,
2144            vec![fs::canonicalize(src.join("main.rs")).expect("canonicalize src file")]
2145        );
2146    }
2147
2148    #[test]
2149    fn glob_includes_hidden_and_binary_files() {
2150        let dir = tempfile::tempdir().expect("create temp dir");
2151        let project = dir.path().join("project");
2152        let hidden_dir = project.join(".hidden");
2153        fs::create_dir_all(&hidden_dir).expect("create hidden dir");
2154        let hidden_file = hidden_dir.join("data.bin");
2155        fs::write(&hidden_file, [0u8, 159, 146, 150]).expect("write binary file");
2156
2157        let index = SearchIndex::build(&project);
2158        let files = index.glob("**/*.bin", &project);
2159
2160        assert_eq!(
2161            files,
2162            vec![fs::canonicalize(hidden_file).expect("canonicalize binary file")]
2163        );
2164    }
2165
2166    #[test]
2167    fn read_from_disk_rejects_invalid_nanos() {
2168        let dir = tempfile::tempdir().expect("create temp dir");
2169        let cache_dir = dir.path().join("cache");
2170        fs::create_dir_all(&cache_dir).expect("create cache dir");
2171
2172        let mut postings = Vec::new();
2173        postings.extend_from_slice(INDEX_MAGIC);
2174        postings.extend_from_slice(&INDEX_VERSION.to_le_bytes());
2175        postings.extend_from_slice(&0u32.to_le_bytes());
2176        postings.extend_from_slice(&1u32.to_le_bytes());
2177        postings.extend_from_slice(&DEFAULT_MAX_FILE_SIZE.to_le_bytes());
2178        postings.extend_from_slice(&1u32.to_le_bytes());
2179        postings.extend_from_slice(b"/");
2180        postings.push(0u8);
2181        postings.extend_from_slice(&1u32.to_le_bytes());
2182        postings.extend_from_slice(&0u64.to_le_bytes());
2183        postings.extend_from_slice(&0u64.to_le_bytes());
2184        postings.extend_from_slice(&1_000_000_000u32.to_le_bytes());
2185        postings.extend_from_slice(b"a");
2186        postings.extend_from_slice(&0u64.to_le_bytes());
2187
2188        let mut lookup = Vec::new();
2189        lookup.extend_from_slice(LOOKUP_MAGIC);
2190        lookup.extend_from_slice(&INDEX_VERSION.to_le_bytes());
2191        lookup.extend_from_slice(&0u32.to_le_bytes());
2192
2193        fs::write(cache_dir.join("postings.bin"), postings).expect("write postings");
2194        fs::write(cache_dir.join("lookup.bin"), lookup).expect("write lookup");
2195
2196        assert!(SearchIndex::read_from_disk(&cache_dir).is_none());
2197    }
2198
2199    /// Regression: v0.15.2 — sort_paths_by_mtime_desc panicked when files
2200    /// changed between cmp() calls.
2201    ///
2202    /// Pre-fix, the sort closure called `path_modified_time(path)` directly,
2203    /// which does a `stat()` syscall. If the file was deleted, modified, or
2204    /// touched mid-sort, the comparator returned different values for the
2205    /// same input pair on different invocations. Rust's slice::sort detects
2206    /// this and panics with "user-provided comparison function does not
2207    /// correctly implement a total order".
2208    ///
2209    /// CI hit this on a Pi e2e test (workflow run 24887807972) where the
2210    /// bridge invalidated files in parallel with grep's sort path. This
2211    /// test simulates the worst case: most paths don't exist (Err from
2212    /// fs::metadata) and sort still completes successfully.
2213    #[test]
2214    fn sort_paths_by_mtime_desc_does_not_panic_on_missing_files() {
2215        // Mix of existing and non-existing paths in deliberately
2216        // non-monotonic order — pre-fix, the sort would call stat() at
2217        // least N log N times and any flakiness would trigger the panic.
2218        let dir = tempfile::tempdir().expect("create tempdir");
2219        let mut paths: Vec<PathBuf> = Vec::new();
2220        for i in 0..30 {
2221            // Half exist, half don't.
2222            let path = if i % 2 == 0 {
2223                let p = dir.path().join(format!("real-{i}.rs"));
2224                fs::write(&p, format!("// {i}\n")).expect("write");
2225                p
2226            } else {
2227                dir.path().join(format!("missing-{i}.rs"))
2228            };
2229            paths.push(path);
2230        }
2231
2232        // Run the sort many times to maximise the chance of catching any
2233        // residual non-determinism. Pre-fix: panic. Post-fix: stable.
2234        for _ in 0..50 {
2235            let mut copy = paths.clone();
2236            sort_paths_by_mtime_desc(&mut copy);
2237            assert_eq!(copy.len(), paths.len());
2238        }
2239    }
2240
2241    /// Regression: v0.15.2 — sort_grep_matches_by_mtime_desc panicked under
2242    /// the same conditions as sort_paths_by_mtime_desc. See the
2243    /// sort_paths_... test above for the full rationale.
2244    #[test]
2245    fn sort_grep_matches_by_mtime_desc_does_not_panic_on_missing_files() {
2246        let dir = tempfile::tempdir().expect("create tempdir");
2247        let mut matches: Vec<GrepMatch> = Vec::new();
2248        for i in 0..30 {
2249            let file = if i % 2 == 0 {
2250                let p = dir.path().join(format!("real-{i}.rs"));
2251                fs::write(&p, format!("// {i}\n")).expect("write");
2252                p
2253            } else {
2254                dir.path().join(format!("missing-{i}.rs"))
2255            };
2256            matches.push(GrepMatch {
2257                file,
2258                line: u32::try_from(i).unwrap_or(0),
2259                column: 0,
2260                line_text: format!("match {i}"),
2261                match_text: format!("match {i}"),
2262            });
2263        }
2264
2265        for _ in 0..50 {
2266            let mut copy = matches.clone();
2267            sort_grep_matches_by_mtime_desc(&mut copy, dir.path());
2268            assert_eq!(copy.len(), matches.len());
2269        }
2270    }
2271}