use axum::{Json, extract::State};
use std::sync::Arc;
use crate::{
error::{AppError, Result},
models::{ApiResponse, SearchRequest, SearchResultResponse},
state::AppState,
};
pub async fn search(
State(state): State<Arc<AppState>>,
Json(req): Json<SearchRequest>,
) -> Result<Json<ApiResponse<Vec<SearchResultResponse>>>> {
let limit = req.limit.unwrap_or(10);
let min_score = req.min_score.unwrap_or(0.6);
let results = search_layered(&state, &req.query, req.thread.as_deref(), limit, min_score).await?;
Ok(Json(ApiResponse::success(results)))
}
async fn search_layered(
state: &AppState,
query: &str,
thread: Option<&str>,
limit: usize,
min_score: f32,
) -> Result<Vec<SearchResultResponse>> {
use cortex_mem_core::SearchOptions;
let vector_engine_lock = state.vector_engine.read().await;
let vector_engine = vector_engine_lock.as_ref().ok_or_else(|| {
AppError::BadRequest("Vector search not available. Qdrant and Embedding service must be configured.".to_string())
})?;
let mut options = SearchOptions {
limit,
threshold: min_score,
root_uri: None,
recursive: true,
};
if let Some(thread_id) = thread {
options.root_uri = Some(format!("cortex://session/{}", thread_id));
}
let search_results = vector_engine
.layered_semantic_search(query, &options)
.await
.map_err(|e| AppError::Internal(format!("Layered search failed: {}", e)))?;
let results: Vec<SearchResultResponse> = search_results
.into_iter()
.map(|result| {
let snippet = if result.snippet.len() > 200 {
format!("{}...", &result.snippet.chars().take(200).collect::<String>())
} else {
result.snippet
};
SearchResultResponse {
uri: result.uri,
score: result.score,
snippet,
content: result.content,
source: "layered_vector".to_string(),
}
})
.collect();
Ok(results)
}