Skip to main content

mentedb_extraction/
cognitive_adapter.rs

1//! Adapter that wraps [`HttpExtractionProvider`] as a [`LlmJudge`] for the
2//! cognitive service. This bridges the extraction infrastructure (HTTP clients,
3//! provider configs, retry logic) with the cognitive judgment layer.
4
5use crate::ExtractionProvider;
6use crate::provider::HttpExtractionProvider;
7use mentedb_cognitive::LlmJudgeError;
8
9/// Wraps an [`HttpExtractionProvider`] so it can be used as a [`LlmJudge`]
10/// for the [`CognitiveLlmService`](mentedb_cognitive::CognitiveLlmService).
11///
12/// The extraction provider already handles OpenAI, Anthropic, Ollama, and
13/// Custom endpoints with retry logic — this adapter simply maps the interface.
14pub struct ExtractionLlmJudge {
15    provider: HttpExtractionProvider,
16}
17
18impl ExtractionLlmJudge {
19    /// Create a new adapter from an existing extraction provider.
20    pub fn new(provider: HttpExtractionProvider) -> Self {
21        Self { provider }
22    }
23}
24
25impl mentedb_cognitive::LlmJudge for ExtractionLlmJudge {
26    async fn complete(
27        &self,
28        system_prompt: &str,
29        user_prompt: &str,
30    ) -> Result<String, LlmJudgeError> {
31        self.provider
32            .extract(user_prompt, system_prompt)
33            .await
34            .map_err(|e| LlmJudgeError::ProviderError(e.to_string()))
35    }
36}
37
38#[cfg(test)]
39mod tests {
40    use super::*;
41    use crate::config::ExtractionConfig;
42
43    #[test]
44    fn test_adapter_construction() {
45        let config = ExtractionConfig::ollama();
46        let provider = HttpExtractionProvider::new(config).unwrap();
47        let _judge = ExtractionLlmJudge::new(provider);
48    }
49}