use std::path::Path;
use anyhow::Result;
use crate::rank::FuzzyResult;
use crate::timeline::FrameKind;
#[derive(Debug)]
pub struct SemanticSearchOutcome {
pub results: Vec<FuzzyResult>,
pub scanned: usize,
pub backend_label: &'static str,
pub model_id: String,
}
#[derive(Debug)]
pub enum SearchPath {
Semantic(SemanticSearchOutcome),
Fallback { reason: String },
}
pub fn try_semantic_search(
_store_root: &Path,
_query: &str,
_limit: usize,
_project_filter: Option<&str>,
_frame_kind_filter: Option<FrameKind>,
) -> Result<SearchPath> {
#[cfg(not(any(feature = "native-embedder", feature = "cloud-embedder")))]
{
Ok(SearchPath::Fallback {
reason: "native-embedder feature not compiled in this binary".to_string(),
})
}
#[cfg(any(feature = "native-embedder", feature = "cloud-embedder"))]
{
try_semantic_search_native(
_store_root,
_query,
_limit,
_project_filter,
_frame_kind_filter,
)
}
}
#[cfg(any(feature = "native-embedder", feature = "cloud-embedder"))]
fn try_semantic_search_native(
_store_root: &Path,
_query: &str,
_limit: usize,
_project_filter: Option<&str>,
_frame_kind_filter: Option<FrameKind>,
) -> Result<SearchPath> {
let engine = match crate::embedder::EmbeddingEngine::new() {
Ok(engine) => engine,
Err(err) => {
return Ok(SearchPath::Fallback {
reason: format!("embedder init failed: {err}"),
});
}
};
let _info = engine.info();
Ok(SearchPath::Fallback {
reason: "vector index not built yet (run `aicx index` after this iteration ships)"
.to_string(),
})
}
pub fn render_oracle_status_line(path: &SearchPath, result_count: usize, scanned: usize) -> String {
match path {
SearchPath::Semantic(outcome) => format!(
"{} result(s) from {} candidate chunks. oracle_status: backend={} index=lance fallback=none model={} loctree_scope_safe=true",
result_count, scanned, outcome.backend_label, outcome.model_id
),
SearchPath::Fallback { reason } => format!(
"{} result(s) from {} scanned chunks. oracle_status: backend=filesystem_fuzzy_fallback index=none fallback_reason=\"{}\" loctree_scope_safe=false",
result_count, scanned, reason
),
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::path::Path;
#[test]
fn fallback_path_returns_actionable_reason() {
let result = try_semantic_search(
Path::new("/tmp/aicx-search-engine-test"),
"any query",
10,
None,
None,
)
.expect("try_semantic_search must not return Err in fallback path");
match result {
SearchPath::Fallback { reason } => {
assert!(!reason.is_empty(), "fallback reason must not be empty");
}
SearchPath::Semantic(_) => {
}
}
}
#[test]
fn oracle_status_line_for_fallback_includes_reason() {
let path = SearchPath::Fallback {
reason: "embedder init failed: no GGUF model found".to_string(),
};
let line = render_oracle_status_line(&path, 5, 421);
assert!(line.contains("backend=filesystem_fuzzy_fallback"));
assert!(line.contains("fallback_reason=\"embedder init failed: no GGUF model found\""));
assert!(line.contains("5 result"));
assert!(line.contains("421 scanned chunks"));
}
#[test]
fn oracle_status_line_for_semantic_marks_backend_and_index() {
let path = SearchPath::Semantic(SemanticSearchOutcome {
results: Vec::new(),
scanned: 11_237,
backend_label: "embedded_semantic",
model_id: "F2LLM-v2-0.6B.Q4_K_M.gguf".to_string(),
});
let line = render_oracle_status_line(&path, 0, 11_237);
assert!(line.contains("backend=embedded_semantic"));
assert!(line.contains("index=lance"));
assert!(line.contains("model=F2LLM-v2-0.6B.Q4_K_M.gguf"));
assert!(line.contains("loctree_scope_safe=true"));
}
}