Skip to main content

lean_ctx/core/
vector_index.rs

1use std::collections::HashMap;
2use std::path::{Path, PathBuf};
3
4use md5::{Digest, Md5};
5use serde::{Deserialize, Serialize};
6
7#[derive(Debug, Clone, Serialize, Deserialize)]
8pub struct CodeChunk {
9    pub file_path: String,
10    pub symbol_name: String,
11    pub kind: ChunkKind,
12    pub start_line: usize,
13    pub end_line: usize,
14    pub content: String,
15    pub tokens: Vec<String>,
16    pub token_count: usize,
17}
18
19#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
20pub enum ChunkKind {
21    Function,
22    Struct,
23    Impl,
24    Module,
25    Class,
26    Method,
27    Other,
28}
29
30#[derive(Debug, Clone, Serialize, Deserialize)]
31pub struct BM25Index {
32    pub chunks: Vec<CodeChunk>,
33    pub inverted: HashMap<String, Vec<(usize, f64)>>,
34    pub avg_doc_len: f64,
35    pub doc_count: usize,
36    pub doc_freqs: HashMap<String, usize>,
37}
38
39#[derive(Debug, Clone)]
40pub struct SearchResult {
41    pub chunk_idx: usize,
42    pub score: f64,
43    pub file_path: String,
44    pub symbol_name: String,
45    pub kind: ChunkKind,
46    pub start_line: usize,
47    pub end_line: usize,
48    pub snippet: String,
49}
50
51const BM25_K1: f64 = 1.2;
52const BM25_B: f64 = 0.75;
53
54impl Default for BM25Index {
55    fn default() -> Self {
56        Self::new()
57    }
58}
59
60impl BM25Index {
61    pub fn new() -> Self {
62        Self {
63            chunks: Vec::new(),
64            inverted: HashMap::new(),
65            avg_doc_len: 0.0,
66            doc_count: 0,
67            doc_freqs: HashMap::new(),
68        }
69    }
70
71    pub fn build_from_directory(root: &Path) -> Self {
72        let mut index = Self::new();
73        let walker = ignore::WalkBuilder::new(root)
74            .hidden(true)
75            .git_ignore(true)
76            .max_depth(Some(10))
77            .build();
78
79        let mut file_count = 0usize;
80        for entry in walker.flatten() {
81            if file_count >= 2000 {
82                break;
83            }
84            let path = entry.path();
85            if !path.is_file() {
86                continue;
87            }
88            if !is_code_file(path) {
89                continue;
90            }
91            if let Ok(content) = std::fs::read_to_string(path) {
92                let rel = path
93                    .strip_prefix(root)
94                    .unwrap_or(path)
95                    .to_string_lossy()
96                    .to_string();
97                let chunks = extract_chunks(&rel, &content);
98                for chunk in chunks {
99                    index.add_chunk(chunk);
100                }
101                file_count += 1;
102            }
103        }
104
105        index.finalize();
106        index
107    }
108
109    fn add_chunk(&mut self, chunk: CodeChunk) {
110        let idx = self.chunks.len();
111
112        for token in &chunk.tokens {
113            let lower = token.to_lowercase();
114            self.inverted.entry(lower).or_default().push((idx, 1.0));
115        }
116
117        self.chunks.push(chunk);
118    }
119
120    fn finalize(&mut self) {
121        self.doc_count = self.chunks.len();
122        if self.doc_count == 0 {
123            return;
124        }
125
126        let total_len: usize = self.chunks.iter().map(|c| c.token_count).sum();
127        self.avg_doc_len = total_len as f64 / self.doc_count as f64;
128
129        self.doc_freqs.clear();
130        for (term, postings) in &self.inverted {
131            let unique_docs: std::collections::HashSet<usize> =
132                postings.iter().map(|(idx, _)| *idx).collect();
133            self.doc_freqs.insert(term.clone(), unique_docs.len());
134        }
135    }
136
137    pub fn search(&self, query: &str, top_k: usize) -> Vec<SearchResult> {
138        let query_tokens = tokenize(query);
139        if query_tokens.is_empty() || self.doc_count == 0 {
140            return Vec::new();
141        }
142
143        let mut scores: HashMap<usize, f64> = HashMap::new();
144
145        for token in &query_tokens {
146            let lower = token.to_lowercase();
147            let df = *self.doc_freqs.get(&lower).unwrap_or(&0) as f64;
148            if df == 0.0 {
149                continue;
150            }
151
152            let idf = ((self.doc_count as f64 - df + 0.5) / (df + 0.5) + 1.0).ln();
153
154            if let Some(postings) = self.inverted.get(&lower) {
155                let mut doc_tfs: HashMap<usize, f64> = HashMap::new();
156                for (idx, weight) in postings {
157                    *doc_tfs.entry(*idx).or_insert(0.0) += weight;
158                }
159
160                for (doc_idx, tf) in &doc_tfs {
161                    let doc_len = self.chunks[*doc_idx].token_count as f64;
162                    let norm_len = doc_len / self.avg_doc_len.max(1.0);
163                    let bm25 = idf * (tf * (BM25_K1 + 1.0))
164                        / (tf + BM25_K1 * (1.0 - BM25_B + BM25_B * norm_len));
165
166                    *scores.entry(*doc_idx).or_insert(0.0) += bm25;
167                }
168            }
169        }
170
171        let mut results: Vec<SearchResult> = scores
172            .into_iter()
173            .map(|(idx, score)| {
174                let chunk = &self.chunks[idx];
175                let snippet = chunk.content.lines().take(5).collect::<Vec<_>>().join("\n");
176                SearchResult {
177                    chunk_idx: idx,
178                    score,
179                    file_path: chunk.file_path.clone(),
180                    symbol_name: chunk.symbol_name.clone(),
181                    kind: chunk.kind.clone(),
182                    start_line: chunk.start_line,
183                    end_line: chunk.end_line,
184                    snippet,
185                }
186            })
187            .collect();
188
189        results.sort_by(|a, b| {
190            b.score
191                .partial_cmp(&a.score)
192                .unwrap_or(std::cmp::Ordering::Equal)
193        });
194        results.truncate(top_k);
195        results
196    }
197
198    pub fn save(&self, root: &Path) -> std::io::Result<()> {
199        let dir = index_dir(root);
200        std::fs::create_dir_all(&dir)?;
201        let data = serde_json::to_string(self).map_err(std::io::Error::other)?;
202        std::fs::write(dir.join("bm25_index.json"), data)?;
203        Ok(())
204    }
205
206    pub fn load(root: &Path) -> Option<Self> {
207        let path = index_dir(root).join("bm25_index.json");
208        let data = std::fs::read_to_string(path).ok()?;
209        serde_json::from_str(&data).ok()
210    }
211
212    pub fn load_or_build(root: &Path) -> Self {
213        Self::load(root).unwrap_or_else(|| {
214            let built = Self::build_from_directory(root);
215            let _ = built.save(root);
216            built
217        })
218    }
219
220    pub fn index_file_path(root: &Path) -> PathBuf {
221        index_dir(root).join("bm25_index.json")
222    }
223}
224
225fn index_dir(root: &Path) -> PathBuf {
226    let mut hasher = Md5::new();
227    hasher.update(root.to_string_lossy().as_bytes());
228    let hash = format!("{:x}", hasher.finalize());
229    crate::core::data_dir::lean_ctx_data_dir()
230        .unwrap_or_else(|_| PathBuf::from("."))
231        .join("vectors")
232        .join(hash)
233}
234
235pub(crate) fn is_code_file(path: &Path) -> bool {
236    let ext = path.extension().and_then(|e| e.to_str()).unwrap_or("");
237    matches!(
238        ext,
239        "rs" | "ts"
240            | "tsx"
241            | "js"
242            | "jsx"
243            | "py"
244            | "go"
245            | "java"
246            | "c"
247            | "cpp"
248            | "h"
249            | "hpp"
250            | "rb"
251            | "cs"
252            | "kt"
253            | "swift"
254            | "php"
255            | "scala"
256            | "ex"
257            | "exs"
258            | "zig"
259            | "lua"
260            | "dart"
261            | "vue"
262            | "svelte"
263    )
264}
265
266fn tokenize(text: &str) -> Vec<String> {
267    let mut tokens = Vec::new();
268    let mut current = String::new();
269
270    for ch in text.chars() {
271        if ch.is_alphanumeric() || ch == '_' {
272            current.push(ch);
273        } else {
274            if current.len() >= 2 {
275                tokens.push(current.clone());
276            }
277            current.clear();
278        }
279    }
280    if current.len() >= 2 {
281        tokens.push(current);
282    }
283
284    split_camel_case_tokens(&tokens)
285}
286
287fn split_camel_case_tokens(tokens: &[String]) -> Vec<String> {
288    let mut result = Vec::new();
289    for token in tokens {
290        result.push(token.clone());
291        let mut start = 0;
292        let chars: Vec<char> = token.chars().collect();
293        for i in 1..chars.len() {
294            if chars[i].is_uppercase() && (i + 1 >= chars.len() || !chars[i + 1].is_uppercase()) {
295                let part: String = chars[start..i].iter().collect();
296                if part.len() >= 2 {
297                    result.push(part);
298                }
299                start = i;
300            }
301        }
302        if start > 0 {
303            let part: String = chars[start..].iter().collect();
304            if part.len() >= 2 {
305                result.push(part);
306            }
307        }
308    }
309    result
310}
311
312fn extract_chunks(file_path: &str, content: &str) -> Vec<CodeChunk> {
313    let lines: Vec<&str> = content.lines().collect();
314    if lines.is_empty() {
315        return Vec::new();
316    }
317
318    let mut chunks = Vec::new();
319    let mut i = 0;
320
321    while i < lines.len() {
322        let trimmed = lines[i].trim();
323
324        if let Some((name, kind)) = detect_symbol(trimmed) {
325            let start = i;
326            let end = find_block_end(&lines, i);
327            let block: String = lines[start..=end.min(lines.len() - 1)].to_vec().join("\n");
328            let tokens = tokenize(&block);
329            let token_count = tokens.len();
330
331            chunks.push(CodeChunk {
332                file_path: file_path.to_string(),
333                symbol_name: name,
334                kind,
335                start_line: start + 1,
336                end_line: end + 1,
337                content: block,
338                tokens,
339                token_count,
340            });
341
342            i = end + 1;
343        } else {
344            i += 1;
345        }
346    }
347
348    if chunks.is_empty() && !content.is_empty() {
349        let tokens = tokenize(content);
350        let token_count = tokens.len();
351        let snippet = lines
352            .iter()
353            .take(50)
354            .copied()
355            .collect::<Vec<_>>()
356            .join("\n");
357        chunks.push(CodeChunk {
358            file_path: file_path.to_string(),
359            symbol_name: file_path.to_string(),
360            kind: ChunkKind::Module,
361            start_line: 1,
362            end_line: lines.len(),
363            content: snippet,
364            tokens,
365            token_count,
366        });
367    }
368
369    chunks
370}
371
372fn detect_symbol(line: &str) -> Option<(String, ChunkKind)> {
373    let trimmed = line.trim();
374
375    let patterns: &[(&str, ChunkKind)] = &[
376        ("pub async fn ", ChunkKind::Function),
377        ("async fn ", ChunkKind::Function),
378        ("pub fn ", ChunkKind::Function),
379        ("fn ", ChunkKind::Function),
380        ("pub struct ", ChunkKind::Struct),
381        ("struct ", ChunkKind::Struct),
382        ("pub enum ", ChunkKind::Struct),
383        ("enum ", ChunkKind::Struct),
384        ("impl ", ChunkKind::Impl),
385        ("pub trait ", ChunkKind::Struct),
386        ("trait ", ChunkKind::Struct),
387        ("export function ", ChunkKind::Function),
388        ("export async function ", ChunkKind::Function),
389        ("export default function ", ChunkKind::Function),
390        ("function ", ChunkKind::Function),
391        ("async function ", ChunkKind::Function),
392        ("export class ", ChunkKind::Class),
393        ("class ", ChunkKind::Class),
394        ("export interface ", ChunkKind::Struct),
395        ("interface ", ChunkKind::Struct),
396        ("def ", ChunkKind::Function),
397        ("async def ", ChunkKind::Function),
398        ("class ", ChunkKind::Class),
399        ("func ", ChunkKind::Function),
400    ];
401
402    for (prefix, kind) in patterns {
403        if let Some(rest) = trimmed.strip_prefix(prefix) {
404            let name: String = rest
405                .chars()
406                .take_while(|c| c.is_alphanumeric() || *c == '_' || *c == '<')
407                .take_while(|c| *c != '<')
408                .collect();
409            if !name.is_empty() {
410                return Some((name, kind.clone()));
411            }
412        }
413    }
414
415    None
416}
417
418fn find_block_end(lines: &[&str], start: usize) -> usize {
419    let mut depth = 0i32;
420    let mut found_open = false;
421
422    for (i, line) in lines.iter().enumerate().skip(start) {
423        for ch in line.chars() {
424            match ch {
425                '{' | '(' if !found_open || depth > 0 => {
426                    depth += 1;
427                    found_open = true;
428                }
429                '}' | ')' if depth > 0 => {
430                    depth -= 1;
431                    if depth == 0 && found_open {
432                        return i;
433                    }
434                }
435                _ => {}
436            }
437        }
438
439        if found_open && depth <= 0 && i > start {
440            return i;
441        }
442
443        if !found_open && i > start + 2 {
444            let trimmed = lines[i].trim();
445            if trimmed.is_empty()
446                || (!trimmed.starts_with(' ') && !trimmed.starts_with('\t') && i > start)
447            {
448                return i.saturating_sub(1);
449            }
450        }
451    }
452
453    (start + 50).min(lines.len().saturating_sub(1))
454}
455
456pub fn format_search_results(results: &[SearchResult], compact: bool) -> String {
457    if results.is_empty() {
458        return "No results found.".to_string();
459    }
460
461    let mut out = String::new();
462    for (i, r) in results.iter().enumerate() {
463        if compact {
464            out.push_str(&format!(
465                "{}. {:.2} {}:{}-{} {:?} {}\n",
466                i + 1,
467                r.score,
468                r.file_path,
469                r.start_line,
470                r.end_line,
471                r.kind,
472                r.symbol_name,
473            ));
474        } else {
475            out.push_str(&format!(
476                "\n--- Result {} (score: {:.2}) ---\n{} :: {} [{:?}] (L{}-{})\n{}\n",
477                i + 1,
478                r.score,
479                r.file_path,
480                r.symbol_name,
481                r.kind,
482                r.start_line,
483                r.end_line,
484                r.snippet,
485            ));
486        }
487    }
488    out
489}
490
491#[cfg(test)]
492mod tests {
493    use super::*;
494
495    #[test]
496    fn tokenize_splits_code() {
497        let tokens = tokenize("fn calculate_total(items: Vec<Item>) -> f64");
498        assert!(tokens.contains(&"calculate_total".to_string()));
499        assert!(tokens.contains(&"items".to_string()));
500        assert!(tokens.contains(&"Vec".to_string()));
501    }
502
503    #[test]
504    fn camel_case_splitting() {
505        let tokens = split_camel_case_tokens(&["calculateTotal".to_string()]);
506        assert!(tokens.contains(&"calculateTotal".to_string()));
507        assert!(tokens.contains(&"calculate".to_string()));
508        assert!(tokens.contains(&"Total".to_string()));
509    }
510
511    #[test]
512    fn detect_rust_function() {
513        let (name, kind) =
514            detect_symbol("pub fn process_request(req: Request) -> Response {").unwrap();
515        assert_eq!(name, "process_request");
516        assert_eq!(kind, ChunkKind::Function);
517    }
518
519    #[test]
520    fn bm25_search_finds_relevant() {
521        let mut index = BM25Index::new();
522        index.add_chunk(CodeChunk {
523            file_path: "auth.rs".into(),
524            symbol_name: "validate_token".into(),
525            kind: ChunkKind::Function,
526            start_line: 1,
527            end_line: 10,
528            content: "fn validate_token(token: &str) -> bool { check_jwt_expiry(token) }".into(),
529            tokens: tokenize("fn validate_token token str bool check_jwt_expiry token"),
530            token_count: 8,
531        });
532        index.add_chunk(CodeChunk {
533            file_path: "db.rs".into(),
534            symbol_name: "connect_database".into(),
535            kind: ChunkKind::Function,
536            start_line: 1,
537            end_line: 5,
538            content: "fn connect_database(url: &str) -> Pool { create_pool(url) }".into(),
539            tokens: tokenize("fn connect_database url str Pool create_pool url"),
540            token_count: 7,
541        });
542        index.finalize();
543
544        let results = index.search("jwt token validation", 5);
545        assert!(!results.is_empty());
546        assert_eq!(results[0].symbol_name, "validate_token");
547    }
548}