use zeph_llm::any::AnyProvider;
use zeph_llm::provider::{LlmProvider as _, Message, Role};
const CLASSIFY_PROMPT: &str = r#"Classify this query into one retrieval strategy. Reply with exactly one word.
Strategies:
- astar: factual lookups ("who is X", "what does X do", "find X")
- watercircles: exploratory ("tell me about X", "what relates to X", "overview of X")
- beam_search: multi-hop reasoning ("how does X connect to Y", "path from X to Z")
- synapse: default/unclear
Query: "#;
pub async fn classify_retrieval_strategy(provider: &AnyProvider, query: &str) -> String {
let _span = tracing::info_span!("memory.graph.classify_strategy").entered();
let prompt = format!("{CLASSIFY_PROMPT}{query}");
let messages = [Message {
role: Role::User,
content: prompt,
..Default::default()
}];
let response = match provider.chat(&messages).await {
Ok(r) => r,
Err(e) => {
tracing::warn!(
error = %e,
"strategy classifier: LLM error, falling back to synapse"
);
return "synapse".to_owned();
}
};
let word = response.trim().to_lowercase();
match word.as_str() {
"astar" | "watercircles" | "beam_search" | "synapse" => word,
_ => "synapse".to_owned(),
}
}