use crate::search::cache::{CacheKey, SearchCache};
use crate::search::pipeline::SearchPipeline;
use crate::search::results::SearchOptions;
use std::sync::Arc;
use std::time::{Duration, Instant};
use tracing::{debug, info, warn};
pub struct CacheWarmer {
cache: Arc<SearchCache>,
}
impl CacheWarmer {
pub fn new(cache: Arc<SearchCache>) -> Self {
Self { cache }
}
pub async fn warm_with_queries(
&self,
queries: &[String],
repo_id: i64,
worktree_id: Option<i64>,
pipeline: &SearchPipeline,
timeout: Option<Duration>,
) -> Result<usize, WarmingError> {
let start = Instant::now();
let timeout = timeout.unwrap_or(Duration::from_secs(30));
info!(
"Starting cache warming with {} queries (timeout: {}s)",
queries.len(),
timeout.as_secs()
);
let mut warmed_count = 0;
for (i, query) in queries.iter().enumerate() {
if start.elapsed() >= timeout {
warn!(
"Cache warming timeout after {} queries ({:.2}s)",
warmed_count,
start.elapsed().as_secs_f64()
);
break;
}
let options = SearchOptions::new(repo_id, worktree_id, 10);
match pipeline.search(query, options).await {
Ok(results) => {
let key = CacheKey::new(query, repo_id, worktree_id, 10);
self.cache.put(key, results);
warmed_count += 1;
debug!(
"Warmed query {}/{}: '{}' ({:.2}s elapsed)",
i + 1,
queries.len(),
query,
start.elapsed().as_secs_f64()
);
}
Err(e) => {
warn!("Failed to warm query '{}': {}", query, e);
}
}
}
let elapsed = start.elapsed().as_secs_f64();
info!(
"Cache warming completed: {}/{} queries in {:.2}s",
warmed_count,
queries.len(),
elapsed
);
Ok(warmed_count)
}
pub async fn warm_with_patterns(
&self,
repo_id: i64,
worktree_id: Option<i64>,
pipeline: &SearchPipeline,
) -> Result<usize, WarmingError> {
let patterns = [
"main", "init", "config", "error", "handle", "process", "create", "update", "delete",
"get", "set",
];
let queries: Vec<String> = patterns.iter().map(|s| s.to_string()).collect();
self.warm_with_queries(&queries, repo_id, worktree_id, pipeline, None)
.await
}
pub fn stats(&self) -> WarmingStats {
let cache_stats = self.cache.stats();
WarmingStats {
cache_size: cache_stats.size,
cache_capacity: cache_stats.capacity,
cache_hit_rate: cache_stats.hit_rate(),
}
}
}
#[derive(Debug, Clone)]
pub struct WarmingStats {
pub cache_size: usize,
pub cache_capacity: usize,
pub cache_hit_rate: f64,
}
#[derive(Debug, thiserror::Error)]
pub enum WarmingError {
#[error("Search pipeline error: {0}")]
Pipeline(String),
#[error("Warming timeout after {0} queries")]
Timeout(usize),
#[error("Configuration error: {0}")]
Config(String),
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_warming_stats_structure() {
let stats = WarmingStats {
cache_size: 50,
cache_capacity: 1000,
cache_hit_rate: 0.75,
};
assert_eq!(stats.cache_size, 50);
assert_eq!(stats.cache_capacity, 1000);
assert_eq!(stats.cache_hit_rate, 0.75);
}
#[test]
fn test_warmer_creation() {
let cache = Arc::new(SearchCache::new(100));
let warmer = CacheWarmer::new(cache);
let stats = warmer.stats();
assert_eq!(stats.cache_size, 0);
assert_eq!(stats.cache_capacity, 100);
}
}