mentedb_extraction/
config.rs1use serde::{Deserialize, Serialize};
2
3#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
5pub enum LlmProvider {
6 OpenAI,
7 Anthropic,
8 Ollama,
9 Custom,
10}
11
12impl LlmProvider {
13 pub fn default_url(&self) -> &str {
15 match self {
16 LlmProvider::OpenAI => "https://api.openai.com/v1/chat/completions",
17 LlmProvider::Anthropic => "https://api.anthropic.com/v1/messages",
18 LlmProvider::Ollama => "http://localhost:11434/api/chat",
19 LlmProvider::Custom => "http://localhost:8080/v1/chat/completions",
20 }
21 }
22
23 pub fn default_model(&self) -> &str {
25 match self {
26 LlmProvider::OpenAI => "gpt-4o-mini",
27 LlmProvider::Anthropic => "claude-haiku-4-5",
28 LlmProvider::Ollama => "llama3",
29 LlmProvider::Custom => "default",
30 }
31 }
32
33 pub fn default_reader_model(&self) -> &str {
35 match self {
36 LlmProvider::OpenAI => "gpt-4o",
37 LlmProvider::Anthropic => "claude-sonnet-4-20250514",
38 LlmProvider::Ollama => "llama3",
39 LlmProvider::Custom => "default",
40 }
41 }
42}
43
44#[derive(Debug, Clone, Serialize, Deserialize)]
46pub struct ExtractionConfig {
47 pub provider: LlmProvider,
49 pub api_key: Option<String>,
51 pub api_url: String,
53 pub model: String,
55 pub max_extractions_per_conversation: usize,
57 pub quality_threshold: f32,
59 pub deduplication_threshold: f32,
61 pub enable_contradiction_check: bool,
63 pub enable_deduplication: bool,
65 pub extraction_passes: usize,
67}
68
69impl ExtractionConfig {
70 pub fn openai(api_key: impl Into<String>) -> Self {
72 Self {
73 provider: LlmProvider::OpenAI,
74 api_key: Some(api_key.into()),
75 api_url: LlmProvider::OpenAI.default_url().to_string(),
76 model: LlmProvider::OpenAI.default_model().to_string(),
77 ..Self::default()
78 }
79 }
80
81 pub fn anthropic(api_key: impl Into<String>) -> Self {
83 Self {
84 provider: LlmProvider::Anthropic,
85 api_key: Some(api_key.into()),
86 api_url: LlmProvider::Anthropic.default_url().to_string(),
87 model: LlmProvider::Anthropic.default_model().to_string(),
88 ..Self::default()
89 }
90 }
91
92 pub fn ollama() -> Self {
94 Self {
95 provider: LlmProvider::Ollama,
96 api_key: None,
97 api_url: LlmProvider::Ollama.default_url().to_string(),
98 model: LlmProvider::Ollama.default_model().to_string(),
99 ..Self::default()
100 }
101 }
102}
103
104impl Default for ExtractionConfig {
105 fn default() -> Self {
106 Self {
107 provider: LlmProvider::OpenAI,
108 api_key: None,
109 api_url: LlmProvider::OpenAI.default_url().to_string(),
110 model: LlmProvider::OpenAI.default_model().to_string(),
111 max_extractions_per_conversation: 50,
112 quality_threshold: 0.6,
113 deduplication_threshold: 0.85,
114 enable_contradiction_check: true,
115 enable_deduplication: true,
116 extraction_passes: 1,
117 }
118 }
119}