use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub enum LlmProvider {
OpenAI,
Anthropic,
Ollama,
Custom,
}
impl LlmProvider {
pub fn default_url(&self) -> &str {
match self {
LlmProvider::OpenAI => "https://api.openai.com/v1/chat/completions",
LlmProvider::Anthropic => "https://api.anthropic.com/v1/messages",
LlmProvider::Ollama => "http://localhost:11434/api/chat",
LlmProvider::Custom => "http://localhost:8080/v1/chat/completions",
}
}
pub fn default_model(&self) -> &str {
match self {
LlmProvider::OpenAI => "gpt-4o-mini",
LlmProvider::Anthropic => "claude-haiku-4-5",
LlmProvider::Ollama => "llama3",
LlmProvider::Custom => "default",
}
}
pub fn default_reader_model(&self) -> &str {
match self {
LlmProvider::OpenAI => "gpt-4o",
LlmProvider::Anthropic => "claude-sonnet-4-20250514",
LlmProvider::Ollama => "llama3",
LlmProvider::Custom => "default",
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ExtractionConfig {
pub provider: LlmProvider,
pub api_key: Option<String>,
pub api_url: String,
pub model: String,
pub max_extractions_per_conversation: usize,
pub quality_threshold: f32,
pub deduplication_threshold: f32,
pub enable_contradiction_check: bool,
pub enable_deduplication: bool,
pub extraction_passes: usize,
}
impl ExtractionConfig {
pub fn openai(api_key: impl Into<String>) -> Self {
Self {
provider: LlmProvider::OpenAI,
api_key: Some(api_key.into()),
api_url: LlmProvider::OpenAI.default_url().to_string(),
model: LlmProvider::OpenAI.default_model().to_string(),
..Self::default()
}
}
pub fn anthropic(api_key: impl Into<String>) -> Self {
Self {
provider: LlmProvider::Anthropic,
api_key: Some(api_key.into()),
api_url: LlmProvider::Anthropic.default_url().to_string(),
model: LlmProvider::Anthropic.default_model().to_string(),
..Self::default()
}
}
pub fn ollama() -> Self {
Self {
provider: LlmProvider::Ollama,
api_key: None,
api_url: LlmProvider::Ollama.default_url().to_string(),
model: LlmProvider::Ollama.default_model().to_string(),
..Self::default()
}
}
}
impl Default for ExtractionConfig {
fn default() -> Self {
Self {
provider: LlmProvider::OpenAI,
api_key: None,
api_url: LlmProvider::OpenAI.default_url().to_string(),
model: LlmProvider::OpenAI.default_model().to_string(),
max_extractions_per_conversation: 50,
quality_threshold: 0.6,
deduplication_threshold: 0.85,
enable_contradiction_check: true,
enable_deduplication: true,
extraction_passes: 1,
}
}
}