use crate::thinktool::{LlmClient, LlmRequest, UnifiedLlmClient};
use crate::{Error, Result};
pub struct HyDEExpander {
llm_client: UnifiedLlmClient,
}
impl HyDEExpander {
pub fn new(llm_client: UnifiedLlmClient) -> Self {
Self { llm_client }
}
pub async fn expand_query(&self, query: &str) -> Result<String> {
let prompt = format!(
r#"Given the question below, write a detailed paragraph that would
answer this question. This paragraph will be used for document retrieval,
so include specific technical terms and concepts that would appear in
authoritative sources.
Question: {query}
Hypothetical Answer Document:"#
);
let request = LlmRequest::new(&prompt)
.with_max_tokens(500)
.with_temperature(0.7);
let response = self
.llm_client
.complete(request)
.await
.map_err(|e| Error::network(format!("LLM generation failed: {}", e)))?;
Ok(response.content)
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::thinktool::MockLlmClient;
#[tokio::test]
async fn test_hyde_expansion() {
let mock_client = MockLlmClient::new();
mock_client.expect_complete().returning(|_| {
Ok(crate::thinktool::LlmResponse {
content: "Hypothetical answer about machine learning.".to_string(),
usage: crate::thinktool::LlmUsage {
prompt_tokens: 10,
completion_tokens: 20,
total_tokens: 30,
},
})
});
let expander = HyDEExpander::new(UnifiedLlmClient::Mock(mock_client));
let expanded = expander
.expand_query("What is machine learning?")
.await
.unwrap();
assert!(expanded.contains("Hypothetical answer"));
}
}