1use std::collections::HashMap;
2use std::path::{Path, PathBuf};
3
4use md5::{Digest, Md5};
5use serde::{Deserialize, Serialize};
6
7#[derive(Debug, Clone, Serialize, Deserialize)]
8pub struct CodeChunk {
9 pub file_path: String,
10 pub symbol_name: String,
11 pub kind: ChunkKind,
12 pub start_line: usize,
13 pub end_line: usize,
14 pub content: String,
15 pub tokens: Vec<String>,
16 pub token_count: usize,
17}
18
19#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
20pub enum ChunkKind {
21 Function,
22 Struct,
23 Impl,
24 Module,
25 Class,
26 Method,
27 Other,
28}
29
30#[derive(Debug, Clone, Serialize, Deserialize)]
31pub struct BM25Index {
32 pub chunks: Vec<CodeChunk>,
33 pub inverted: HashMap<String, Vec<(usize, f64)>>,
34 pub avg_doc_len: f64,
35 pub doc_count: usize,
36 pub doc_freqs: HashMap<String, usize>,
37}
38
39#[derive(Debug, Clone)]
40pub struct SearchResult {
41 pub chunk_idx: usize,
42 pub score: f64,
43 pub file_path: String,
44 pub symbol_name: String,
45 pub kind: ChunkKind,
46 pub start_line: usize,
47 pub end_line: usize,
48 pub snippet: String,
49}
50
51const BM25_K1: f64 = 1.2;
52const BM25_B: f64 = 0.75;
53
54impl Default for BM25Index {
55 fn default() -> Self {
56 Self::new()
57 }
58}
59
60impl BM25Index {
61 pub fn new() -> Self {
62 Self {
63 chunks: Vec::new(),
64 inverted: HashMap::new(),
65 avg_doc_len: 0.0,
66 doc_count: 0,
67 doc_freqs: HashMap::new(),
68 }
69 }
70
71 pub fn build_from_directory(root: &Path) -> Self {
72 let mut index = Self::new();
73 let walker = ignore::WalkBuilder::new(root)
74 .hidden(true)
75 .git_ignore(true)
76 .max_depth(Some(10))
77 .build();
78
79 let mut file_count = 0usize;
80 for entry in walker.flatten() {
81 if file_count >= 2000 {
82 break;
83 }
84 let path = entry.path();
85 if !path.is_file() {
86 continue;
87 }
88 if !is_code_file(path) {
89 continue;
90 }
91 if let Ok(content) = std::fs::read_to_string(path) {
92 let rel = path
93 .strip_prefix(root)
94 .unwrap_or(path)
95 .to_string_lossy()
96 .to_string();
97 let chunks = extract_chunks(&rel, &content);
98 for chunk in chunks {
99 index.add_chunk(chunk);
100 }
101 file_count += 1;
102 }
103 }
104
105 index.finalize();
106 index
107 }
108
109 fn add_chunk(&mut self, chunk: CodeChunk) {
110 let idx = self.chunks.len();
111
112 for token in &chunk.tokens {
113 let lower = token.to_lowercase();
114 self.inverted.entry(lower).or_default().push((idx, 1.0));
115 }
116
117 self.chunks.push(chunk);
118 }
119
120 fn finalize(&mut self) {
121 self.doc_count = self.chunks.len();
122 if self.doc_count == 0 {
123 return;
124 }
125
126 let total_len: usize = self.chunks.iter().map(|c| c.token_count).sum();
127 self.avg_doc_len = total_len as f64 / self.doc_count as f64;
128
129 self.doc_freqs.clear();
130 for (term, postings) in &self.inverted {
131 let unique_docs: std::collections::HashSet<usize> =
132 postings.iter().map(|(idx, _)| *idx).collect();
133 self.doc_freqs.insert(term.clone(), unique_docs.len());
134 }
135 }
136
137 pub fn search(&self, query: &str, top_k: usize) -> Vec<SearchResult> {
138 let query_tokens = tokenize(query);
139 if query_tokens.is_empty() || self.doc_count == 0 {
140 return Vec::new();
141 }
142
143 let mut scores: HashMap<usize, f64> = HashMap::new();
144
145 for token in &query_tokens {
146 let lower = token.to_lowercase();
147 let df = *self.doc_freqs.get(&lower).unwrap_or(&0) as f64;
148 if df == 0.0 {
149 continue;
150 }
151
152 let idf = ((self.doc_count as f64 - df + 0.5) / (df + 0.5) + 1.0).ln();
153
154 if let Some(postings) = self.inverted.get(&lower) {
155 let mut doc_tfs: HashMap<usize, f64> = HashMap::new();
156 for (idx, weight) in postings {
157 *doc_tfs.entry(*idx).or_insert(0.0) += weight;
158 }
159
160 for (doc_idx, tf) in &doc_tfs {
161 let doc_len = self.chunks[*doc_idx].token_count as f64;
162 let norm_len = doc_len / self.avg_doc_len.max(1.0);
163 let bm25 = idf * (tf * (BM25_K1 + 1.0))
164 / (tf + BM25_K1 * (1.0 - BM25_B + BM25_B * norm_len));
165
166 *scores.entry(*doc_idx).or_insert(0.0) += bm25;
167 }
168 }
169 }
170
171 let mut results: Vec<SearchResult> = scores
172 .into_iter()
173 .map(|(idx, score)| {
174 let chunk = &self.chunks[idx];
175 let snippet = chunk.content.lines().take(5).collect::<Vec<_>>().join("\n");
176 SearchResult {
177 chunk_idx: idx,
178 score,
179 file_path: chunk.file_path.clone(),
180 symbol_name: chunk.symbol_name.clone(),
181 kind: chunk.kind.clone(),
182 start_line: chunk.start_line,
183 end_line: chunk.end_line,
184 snippet,
185 }
186 })
187 .collect();
188
189 results.sort_by(|a, b| {
190 b.score
191 .partial_cmp(&a.score)
192 .unwrap_or(std::cmp::Ordering::Equal)
193 });
194 results.truncate(top_k);
195 results
196 }
197
198 pub fn save(&self, root: &Path) -> std::io::Result<()> {
199 let dir = index_dir(root);
200 std::fs::create_dir_all(&dir)?;
201 let data = serde_json::to_string(self).map_err(std::io::Error::other)?;
202 std::fs::write(dir.join("bm25_index.json"), data)?;
203 Ok(())
204 }
205
206 pub fn load(root: &Path) -> Option<Self> {
207 let path = index_dir(root).join("bm25_index.json");
208 let data = std::fs::read_to_string(path).ok()?;
209 serde_json::from_str(&data).ok()
210 }
211
212 pub fn load_or_build(root: &Path) -> Self {
213 Self::load(root).unwrap_or_else(|| {
214 let built = Self::build_from_directory(root);
215 let _ = built.save(root);
216 built
217 })
218 }
219
220 pub fn index_file_path(root: &Path) -> PathBuf {
221 index_dir(root).join("bm25_index.json")
222 }
223}
224
225fn index_dir(root: &Path) -> PathBuf {
226 let mut hasher = Md5::new();
227 hasher.update(root.to_string_lossy().as_bytes());
228 let hash = format!("{:x}", hasher.finalize());
229 dirs::home_dir()
230 .unwrap_or_else(|| PathBuf::from("."))
231 .join(".lean-ctx")
232 .join("vectors")
233 .join(hash)
234}
235
236pub(crate) fn is_code_file(path: &Path) -> bool {
237 let ext = path.extension().and_then(|e| e.to_str()).unwrap_or("");
238 matches!(
239 ext,
240 "rs" | "ts"
241 | "tsx"
242 | "js"
243 | "jsx"
244 | "py"
245 | "go"
246 | "java"
247 | "c"
248 | "cpp"
249 | "h"
250 | "hpp"
251 | "rb"
252 | "cs"
253 | "kt"
254 | "swift"
255 | "php"
256 | "scala"
257 | "ex"
258 | "exs"
259 | "zig"
260 | "lua"
261 | "dart"
262 | "vue"
263 | "svelte"
264 )
265}
266
267fn tokenize(text: &str) -> Vec<String> {
268 let mut tokens = Vec::new();
269 let mut current = String::new();
270
271 for ch in text.chars() {
272 if ch.is_alphanumeric() || ch == '_' {
273 current.push(ch);
274 } else {
275 if current.len() >= 2 {
276 tokens.push(current.clone());
277 }
278 current.clear();
279 }
280 }
281 if current.len() >= 2 {
282 tokens.push(current);
283 }
284
285 split_camel_case_tokens(&tokens)
286}
287
288fn split_camel_case_tokens(tokens: &[String]) -> Vec<String> {
289 let mut result = Vec::new();
290 for token in tokens {
291 result.push(token.clone());
292 let mut start = 0;
293 let chars: Vec<char> = token.chars().collect();
294 for i in 1..chars.len() {
295 if chars[i].is_uppercase() && (i + 1 >= chars.len() || !chars[i + 1].is_uppercase()) {
296 let part: String = chars[start..i].iter().collect();
297 if part.len() >= 2 {
298 result.push(part);
299 }
300 start = i;
301 }
302 }
303 if start > 0 {
304 let part: String = chars[start..].iter().collect();
305 if part.len() >= 2 {
306 result.push(part);
307 }
308 }
309 }
310 result
311}
312
313fn extract_chunks(file_path: &str, content: &str) -> Vec<CodeChunk> {
314 let lines: Vec<&str> = content.lines().collect();
315 if lines.is_empty() {
316 return Vec::new();
317 }
318
319 let mut chunks = Vec::new();
320 let mut i = 0;
321
322 while i < lines.len() {
323 let trimmed = lines[i].trim();
324
325 if let Some((name, kind)) = detect_symbol(trimmed) {
326 let start = i;
327 let end = find_block_end(&lines, i);
328 let block: String = lines[start..=end.min(lines.len() - 1)].to_vec().join("\n");
329 let tokens = tokenize(&block);
330 let token_count = tokens.len();
331
332 chunks.push(CodeChunk {
333 file_path: file_path.to_string(),
334 symbol_name: name,
335 kind,
336 start_line: start + 1,
337 end_line: end + 1,
338 content: block,
339 tokens,
340 token_count,
341 });
342
343 i = end + 1;
344 } else {
345 i += 1;
346 }
347 }
348
349 if chunks.is_empty() && !content.is_empty() {
350 let tokens = tokenize(content);
351 let token_count = tokens.len();
352 let snippet = lines
353 .iter()
354 .take(50)
355 .copied()
356 .collect::<Vec<_>>()
357 .join("\n");
358 chunks.push(CodeChunk {
359 file_path: file_path.to_string(),
360 symbol_name: file_path.to_string(),
361 kind: ChunkKind::Module,
362 start_line: 1,
363 end_line: lines.len(),
364 content: snippet,
365 tokens,
366 token_count,
367 });
368 }
369
370 chunks
371}
372
373fn detect_symbol(line: &str) -> Option<(String, ChunkKind)> {
374 let trimmed = line.trim();
375
376 let patterns: &[(&str, ChunkKind)] = &[
377 ("pub async fn ", ChunkKind::Function),
378 ("async fn ", ChunkKind::Function),
379 ("pub fn ", ChunkKind::Function),
380 ("fn ", ChunkKind::Function),
381 ("pub struct ", ChunkKind::Struct),
382 ("struct ", ChunkKind::Struct),
383 ("pub enum ", ChunkKind::Struct),
384 ("enum ", ChunkKind::Struct),
385 ("impl ", ChunkKind::Impl),
386 ("pub trait ", ChunkKind::Struct),
387 ("trait ", ChunkKind::Struct),
388 ("export function ", ChunkKind::Function),
389 ("export async function ", ChunkKind::Function),
390 ("export default function ", ChunkKind::Function),
391 ("function ", ChunkKind::Function),
392 ("async function ", ChunkKind::Function),
393 ("export class ", ChunkKind::Class),
394 ("class ", ChunkKind::Class),
395 ("export interface ", ChunkKind::Struct),
396 ("interface ", ChunkKind::Struct),
397 ("def ", ChunkKind::Function),
398 ("async def ", ChunkKind::Function),
399 ("class ", ChunkKind::Class),
400 ("func ", ChunkKind::Function),
401 ];
402
403 for (prefix, kind) in patterns {
404 if let Some(rest) = trimmed.strip_prefix(prefix) {
405 let name: String = rest
406 .chars()
407 .take_while(|c| c.is_alphanumeric() || *c == '_' || *c == '<')
408 .take_while(|c| *c != '<')
409 .collect();
410 if !name.is_empty() {
411 return Some((name, kind.clone()));
412 }
413 }
414 }
415
416 None
417}
418
419fn find_block_end(lines: &[&str], start: usize) -> usize {
420 let mut depth = 0i32;
421 let mut found_open = false;
422
423 for (i, line) in lines.iter().enumerate().skip(start) {
424 for ch in line.chars() {
425 match ch {
426 '{' | '(' if !found_open || depth > 0 => {
427 depth += 1;
428 found_open = true;
429 }
430 '}' | ')' if depth > 0 => {
431 depth -= 1;
432 if depth == 0 && found_open {
433 return i;
434 }
435 }
436 _ => {}
437 }
438 }
439
440 if found_open && depth <= 0 && i > start {
441 return i;
442 }
443
444 if !found_open && i > start + 2 {
445 let trimmed = lines[i].trim();
446 if trimmed.is_empty()
447 || (!trimmed.starts_with(' ') && !trimmed.starts_with('\t') && i > start)
448 {
449 return i.saturating_sub(1);
450 }
451 }
452 }
453
454 (start + 50).min(lines.len().saturating_sub(1))
455}
456
457pub fn format_search_results(results: &[SearchResult], compact: bool) -> String {
458 if results.is_empty() {
459 return "No results found.".to_string();
460 }
461
462 let mut out = String::new();
463 for (i, r) in results.iter().enumerate() {
464 if compact {
465 out.push_str(&format!(
466 "{}. {:.2} {}:{}-{} {:?} {}\n",
467 i + 1,
468 r.score,
469 r.file_path,
470 r.start_line,
471 r.end_line,
472 r.kind,
473 r.symbol_name,
474 ));
475 } else {
476 out.push_str(&format!(
477 "\n--- Result {} (score: {:.2}) ---\n{} :: {} [{:?}] (L{}-{})\n{}\n",
478 i + 1,
479 r.score,
480 r.file_path,
481 r.symbol_name,
482 r.kind,
483 r.start_line,
484 r.end_line,
485 r.snippet,
486 ));
487 }
488 }
489 out
490}
491
492#[cfg(test)]
493mod tests {
494 use super::*;
495
496 #[test]
497 fn tokenize_splits_code() {
498 let tokens = tokenize("fn calculate_total(items: Vec<Item>) -> f64");
499 assert!(tokens.contains(&"calculate_total".to_string()));
500 assert!(tokens.contains(&"items".to_string()));
501 assert!(tokens.contains(&"Vec".to_string()));
502 }
503
504 #[test]
505 fn camel_case_splitting() {
506 let tokens = split_camel_case_tokens(&["calculateTotal".to_string()]);
507 assert!(tokens.contains(&"calculateTotal".to_string()));
508 assert!(tokens.contains(&"calculate".to_string()));
509 assert!(tokens.contains(&"Total".to_string()));
510 }
511
512 #[test]
513 fn detect_rust_function() {
514 let (name, kind) =
515 detect_symbol("pub fn process_request(req: Request) -> Response {").unwrap();
516 assert_eq!(name, "process_request");
517 assert_eq!(kind, ChunkKind::Function);
518 }
519
520 #[test]
521 fn bm25_search_finds_relevant() {
522 let mut index = BM25Index::new();
523 index.add_chunk(CodeChunk {
524 file_path: "auth.rs".into(),
525 symbol_name: "validate_token".into(),
526 kind: ChunkKind::Function,
527 start_line: 1,
528 end_line: 10,
529 content: "fn validate_token(token: &str) -> bool { check_jwt_expiry(token) }".into(),
530 tokens: tokenize("fn validate_token token str bool check_jwt_expiry token"),
531 token_count: 8,
532 });
533 index.add_chunk(CodeChunk {
534 file_path: "db.rs".into(),
535 symbol_name: "connect_database".into(),
536 kind: ChunkKind::Function,
537 start_line: 1,
538 end_line: 5,
539 content: "fn connect_database(url: &str) -> Pool { create_pool(url) }".into(),
540 tokens: tokenize("fn connect_database url str Pool create_pool url"),
541 token_count: 7,
542 });
543 index.finalize();
544
545 let results = index.search("jwt token validation", 5);
546 assert!(!results.is_empty());
547 assert_eq!(results[0].symbol_name, "validate_token");
548 }
549}