1use std::sync::Arc;
7
8use rustc_hash::FxHashMap;
9
10use crate::directories::Directory;
11use crate::dsl::Schema;
12use crate::error::Result;
13use crate::query::LazyGlobalStats;
14use crate::segment::{SegmentId, SegmentReader};
15#[cfg(feature = "native")]
16use crate::segment::{SegmentSnapshot, SegmentTracker};
17use crate::structures::{CoarseCentroids, PQCodebook};
18
19pub struct Searcher<D: Directory + 'static> {
24 #[cfg(feature = "native")]
26 _snapshot: SegmentSnapshot<D>,
27 #[cfg(not(feature = "native"))]
29 _phantom: std::marker::PhantomData<D>,
30 segments: Vec<Arc<SegmentReader>>,
32 schema: Arc<Schema>,
34 default_fields: Vec<crate::Field>,
36 tokenizers: Arc<crate::tokenizer::TokenizerRegistry>,
38 trained_centroids: FxHashMap<u32, Arc<CoarseCentroids>>,
40 trained_codebooks: FxHashMap<u32, Arc<PQCodebook>>,
42 global_stats: Arc<LazyGlobalStats>,
44}
45
46impl<D: Directory + 'static> Searcher<D> {
47 pub async fn open(
52 directory: Arc<D>,
53 schema: Arc<Schema>,
54 segment_ids: &[String],
55 term_cache_blocks: usize,
56 ) -> Result<Self> {
57 Self::create(
58 directory,
59 schema,
60 segment_ids,
61 FxHashMap::default(),
62 FxHashMap::default(),
63 term_cache_blocks,
64 )
65 .await
66 }
67
68 #[cfg(feature = "native")]
70 pub(crate) async fn from_snapshot(
71 directory: Arc<D>,
72 schema: Arc<Schema>,
73 snapshot: SegmentSnapshot<D>,
74 trained_centroids: FxHashMap<u32, Arc<CoarseCentroids>>,
75 trained_codebooks: FxHashMap<u32, Arc<PQCodebook>>,
76 term_cache_blocks: usize,
77 ) -> Result<Self> {
78 let (segments, default_fields, global_stats) = Self::load_common(
79 &directory,
80 &schema,
81 snapshot.segment_ids(),
82 term_cache_blocks,
83 )
84 .await;
85
86 Ok(Self {
87 _snapshot: snapshot,
88 segments,
89 schema,
90 default_fields,
91 tokenizers: Arc::new(crate::tokenizer::TokenizerRegistry::default()),
92 trained_centroids,
93 trained_codebooks,
94 global_stats,
95 })
96 }
97
98 async fn create(
100 directory: Arc<D>,
101 schema: Arc<Schema>,
102 segment_ids: &[String],
103 trained_centroids: FxHashMap<u32, Arc<CoarseCentroids>>,
104 trained_codebooks: FxHashMap<u32, Arc<PQCodebook>>,
105 term_cache_blocks: usize,
106 ) -> Result<Self> {
107 let (segments, default_fields, global_stats) =
108 Self::load_common(&directory, &schema, segment_ids, term_cache_blocks).await;
109
110 #[cfg(feature = "native")]
111 {
112 let tracker = Arc::new(SegmentTracker::new());
113 let snapshot = SegmentSnapshot::new(tracker, directory, segment_ids.to_vec());
114 Ok(Self {
115 _snapshot: snapshot,
116 segments,
117 schema,
118 default_fields,
119 tokenizers: Arc::new(crate::tokenizer::TokenizerRegistry::default()),
120 trained_centroids,
121 trained_codebooks,
122 global_stats,
123 })
124 }
125
126 #[cfg(not(feature = "native"))]
127 {
128 let _ = directory; Ok(Self {
130 _phantom: std::marker::PhantomData,
131 segments,
132 schema,
133 default_fields,
134 tokenizers: Arc::new(crate::tokenizer::TokenizerRegistry::default()),
135 trained_centroids,
136 trained_codebooks,
137 global_stats,
138 })
139 }
140 }
141
142 async fn load_common(
144 directory: &Arc<D>,
145 schema: &Arc<Schema>,
146 segment_ids: &[String],
147 term_cache_blocks: usize,
148 ) -> (
149 Vec<Arc<SegmentReader>>,
150 Vec<crate::Field>,
151 Arc<LazyGlobalStats>,
152 ) {
153 let segments = Self::load_segments(directory, schema, segment_ids, term_cache_blocks).await;
154 let default_fields = Self::build_default_fields(schema);
155 let global_stats = Arc::new(LazyGlobalStats::new(segments.clone()));
156 (segments, default_fields, global_stats)
157 }
158
159 async fn load_segments(
161 directory: &Arc<D>,
162 schema: &Arc<Schema>,
163 segment_ids: &[String],
164 term_cache_blocks: usize,
165 ) -> Vec<Arc<SegmentReader>> {
166 let valid_segments: Vec<(usize, SegmentId)> = segment_ids
168 .iter()
169 .enumerate()
170 .filter_map(|(idx, id_str)| SegmentId::from_hex(id_str).map(|sid| (idx, sid)))
171 .collect();
172
173 let futures: Vec<_> =
175 valid_segments
176 .iter()
177 .map(|(_, segment_id)| {
178 let dir = Arc::clone(directory);
179 let sch = Arc::clone(schema);
180 let sid = *segment_id;
181 async move {
182 SegmentReader::open(dir.as_ref(), sid, sch, 0, term_cache_blocks).await
183 }
184 })
185 .collect();
186
187 let results = futures::future::join_all(futures).await;
188
189 let mut loaded: Vec<(usize, SegmentReader)> = valid_segments
191 .into_iter()
192 .zip(results)
193 .filter_map(|((idx, _), result)| match result {
194 Ok(reader) => Some((idx, reader)),
195 Err(e) => {
196 log::warn!("Failed to open segment: {:?}", e);
197 None
198 }
199 })
200 .collect();
201
202 loaded.sort_by_key(|(idx, _)| *idx);
204
205 let mut doc_id_offset = 0u32;
207 let mut segments = Vec::with_capacity(loaded.len());
208 for (_, mut reader) in loaded {
209 reader.set_doc_id_offset(doc_id_offset);
210 doc_id_offset += reader.meta().num_docs;
211 segments.push(Arc::new(reader));
212 }
213
214 let total_docs: u32 = segments.iter().map(|s| s.meta().num_docs).sum();
216 let total_sparse_mem: usize = segments
217 .iter()
218 .flat_map(|s| s.sparse_indexes().values())
219 .map(|idx| idx.num_dimensions() * 12)
220 .sum();
221 log::info!(
222 "[searcher] loaded {} segments: total_docs={}, sparse_index_mem={:.2} MB",
223 segments.len(),
224 total_docs,
225 total_sparse_mem as f64 / (1024.0 * 1024.0)
226 );
227
228 segments
229 }
230
231 fn build_default_fields(schema: &Schema) -> Vec<crate::Field> {
233 if !schema.default_fields().is_empty() {
234 schema.default_fields().to_vec()
235 } else {
236 schema
237 .fields()
238 .filter(|(_, entry)| {
239 entry.indexed && entry.field_type == crate::dsl::FieldType::Text
240 })
241 .map(|(field, _)| field)
242 .collect()
243 }
244 }
245
246 pub fn schema(&self) -> &Schema {
248 &self.schema
249 }
250
251 pub fn segment_readers(&self) -> &[Arc<SegmentReader>] {
253 &self.segments
254 }
255
256 pub fn default_fields(&self) -> &[crate::Field] {
258 &self.default_fields
259 }
260
261 pub fn tokenizers(&self) -> &crate::tokenizer::TokenizerRegistry {
263 &self.tokenizers
264 }
265
266 pub fn trained_centroids(&self) -> &FxHashMap<u32, Arc<CoarseCentroids>> {
268 &self.trained_centroids
269 }
270
271 pub fn trained_codebooks(&self) -> &FxHashMap<u32, Arc<PQCodebook>> {
273 &self.trained_codebooks
274 }
275
276 pub fn global_stats(&self) -> &Arc<LazyGlobalStats> {
278 &self.global_stats
279 }
280
281 pub fn num_docs(&self) -> u32 {
283 self.segments.iter().map(|s| s.meta().num_docs).sum()
284 }
285
286 pub fn num_segments(&self) -> usize {
288 self.segments.len()
289 }
290
291 pub async fn doc(&self, doc_id: u32) -> Result<Option<crate::dsl::Document>> {
293 let mut offset = 0u32;
294 for segment in &self.segments {
295 let segment_docs = segment.meta().num_docs;
296 if doc_id < offset + segment_docs {
297 let local_doc_id = doc_id - offset;
298 return segment.doc(local_doc_id).await;
299 }
300 offset += segment_docs;
301 }
302 Ok(None)
303 }
304
305 pub async fn search(
307 &self,
308 query: &dyn crate::query::Query,
309 limit: usize,
310 ) -> Result<Vec<crate::query::SearchResult>> {
311 let (results, _) = self.search_with_count(query, limit).await?;
312 Ok(results)
313 }
314
315 pub async fn search_with_count(
318 &self,
319 query: &dyn crate::query::Query,
320 limit: usize,
321 ) -> Result<(Vec<crate::query::SearchResult>, u32)> {
322 self.search_with_offset_and_count(query, limit, 0).await
323 }
324
325 pub async fn search_with_offset(
327 &self,
328 query: &dyn crate::query::Query,
329 limit: usize,
330 offset: usize,
331 ) -> Result<Vec<crate::query::SearchResult>> {
332 let (results, _) = self
333 .search_with_offset_and_count(query, limit, offset)
334 .await?;
335 Ok(results)
336 }
337
338 pub async fn search_with_offset_and_count(
340 &self,
341 query: &dyn crate::query::Query,
342 limit: usize,
343 offset: usize,
344 ) -> Result<(Vec<crate::query::SearchResult>, u32)> {
345 let fetch_limit = offset + limit;
346
347 let futures: Vec<_> = self
348 .segments
349 .iter()
350 .map(|segment| {
351 let sid = segment.meta().id;
352 async move {
353 let (results, segment_seen) = crate::query::search_segment_with_count(
354 segment.as_ref(),
355 query,
356 fetch_limit,
357 )
358 .await?;
359 Ok::<_, crate::error::Error>((
360 results
361 .into_iter()
362 .map(move |r| (sid, r))
363 .collect::<Vec<_>>(),
364 segment_seen,
365 ))
366 }
367 })
368 .collect();
369
370 let batches = futures::future::try_join_all(futures).await?;
371 let mut all_results: Vec<(u128, crate::query::SearchResult)> = Vec::new();
372 let mut total_seen: u32 = 0;
373 for (batch, segment_seen) in batches {
374 total_seen += segment_seen;
375 all_results.extend(batch);
376 }
377
378 all_results.sort_by(|a, b| {
380 b.1.score
381 .partial_cmp(&a.1.score)
382 .unwrap_or(std::cmp::Ordering::Equal)
383 });
384
385 let results = all_results
387 .into_iter()
388 .skip(offset)
389 .take(limit)
390 .map(|(_, result)| result)
391 .collect();
392
393 Ok((results, total_seen))
394 }
395
396 pub async fn search_and_rerank(
401 &self,
402 query: &dyn crate::query::Query,
403 l1_limit: usize,
404 final_limit: usize,
405 config: &crate::query::RerankerConfig,
406 ) -> Result<(Vec<crate::query::SearchResult>, u32)> {
407 let (candidates, total_seen) = self.search_with_count(query, l1_limit).await?;
408 let reranked = crate::query::rerank(self, &candidates, config, final_limit).await?;
409 Ok((reranked, total_seen))
410 }
411
412 pub async fn query(
414 &self,
415 query_str: &str,
416 limit: usize,
417 ) -> Result<crate::query::SearchResponse> {
418 self.query_offset(query_str, limit, 0).await
419 }
420
421 pub async fn query_offset(
423 &self,
424 query_str: &str,
425 limit: usize,
426 offset: usize,
427 ) -> Result<crate::query::SearchResponse> {
428 let parser = self.query_parser();
429 let query = parser
430 .parse(query_str)
431 .map_err(crate::error::Error::Query)?;
432
433 let fetch_limit = offset + limit;
434 let query_ref = query.as_ref();
435
436 let futures: Vec<_> = self
437 .segments
438 .iter()
439 .map(|segment| {
440 let sid = segment.meta().id;
441 async move {
442 let results =
443 crate::query::search_segment(segment.as_ref(), query_ref, fetch_limit)
444 .await?;
445 Ok::<_, crate::error::Error>(
446 results
447 .into_iter()
448 .map(move |r| (sid, r))
449 .collect::<Vec<_>>(),
450 )
451 }
452 })
453 .collect();
454
455 let batches = futures::future::try_join_all(futures).await?;
456 let mut all_results: Vec<(u128, crate::query::SearchResult)> =
457 Vec::with_capacity(batches.iter().map(|b| b.len()).sum());
458 for batch in batches {
459 all_results.extend(batch);
460 }
461
462 all_results.sort_by(|a, b| {
463 b.1.score
464 .partial_cmp(&a.1.score)
465 .unwrap_or(std::cmp::Ordering::Equal)
466 });
467
468 let total_hits = all_results.len() as u32;
469
470 let hits: Vec<crate::query::SearchHit> = all_results
471 .into_iter()
472 .skip(offset)
473 .take(limit)
474 .map(|(segment_id, result)| crate::query::SearchHit {
475 address: crate::query::DocAddress::new(segment_id, result.doc_id),
476 score: result.score,
477 matched_fields: result.extract_ordinals(),
478 })
479 .collect();
480
481 Ok(crate::query::SearchResponse { hits, total_hits })
482 }
483
484 pub fn query_parser(&self) -> crate::dsl::QueryLanguageParser {
486 let query_routers = self.schema.query_routers();
487 if !query_routers.is_empty()
488 && let Ok(router) = crate::dsl::QueryFieldRouter::from_rules(query_routers)
489 {
490 return crate::dsl::QueryLanguageParser::with_router(
491 Arc::clone(&self.schema),
492 self.default_fields.clone(),
493 Arc::clone(&self.tokenizers),
494 router,
495 );
496 }
497
498 crate::dsl::QueryLanguageParser::new(
499 Arc::clone(&self.schema),
500 self.default_fields.clone(),
501 Arc::clone(&self.tokenizers),
502 )
503 }
504
505 pub async fn get_document(
507 &self,
508 address: &crate::query::DocAddress,
509 ) -> Result<Option<crate::dsl::Document>> {
510 let segment_id = address.segment_id_u128().ok_or_else(|| {
511 crate::error::Error::Query(format!("Invalid segment ID: {}", address.segment_id))
512 })?;
513
514 for segment in &self.segments {
515 if segment.meta().id == segment_id {
516 return segment.doc(address.doc_id).await;
517 }
518 }
519
520 Ok(None)
521 }
522}