Skip to main content

devops_models/llm/
provider.rs

1//! LLM provider configuration.
2//!
3//! Defines which LLM backend to use and how to reach it.  This module
4//! contains only pure data types — no HTTP code, no async runtime.
5
6use serde::{Deserialize, Serialize};
7
8/// Supported LLM providers.
9///
10/// Each variant maps to a different API schema and default endpoint.
11#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
12#[serde(rename_all = "lowercase")]
13pub enum LlmProvider {
14    /// Anthropic Claude — uses the Messages API (`/v1/messages`).
15    Anthropic,
16    /// OpenAI — uses the Chat Completions API (`/v1/chat/completions`).
17    OpenAI,
18    /// Ollama — self-hosted, OpenAI-compatible endpoint on `localhost:11434`.
19    Ollama,
20}
21
22/// Complete configuration for an LLM client.
23///
24/// Use [`endpoint`](LlmConfig::endpoint) to resolve the effective API URL,
25/// which respects any custom `base_url` override.
26///
27/// # Example
28///
29/// ```rust
30/// use devops_models::llm::provider::{LlmConfig, LlmProvider};
31///
32/// let config = LlmConfig {
33///     provider: LlmProvider::Anthropic,
34///     api_key: "sk-ant-...".to_string(),
35///     model: "claude-sonnet-4-20250514".to_string(),
36///     base_url: None,
37/// };
38///
39/// assert!(config.endpoint().contains("anthropic.com"));
40/// ```
41#[derive(Debug, Clone, Serialize, Deserialize)]
42pub struct LlmConfig {
43    /// Which LLM backend to target.
44    pub provider: LlmProvider,
45    /// API key (or empty string for Ollama / local setups).
46    pub api_key: String,
47    /// Model identifier (e.g. `"claude-sonnet-4-20250514"`, `"gpt-4o"`).
48    pub model: String,
49    /// Optional URL override.  When `None`, the provider's default endpoint is used.
50    #[serde(default)]
51    pub base_url: Option<String>,
52}
53
54impl LlmConfig {
55    /// Resolve the effective API endpoint URL.
56    ///
57    /// Returns `base_url` if set, otherwise the provider's default endpoint.
58    ///
59    /// # Example
60    ///
61    /// ```rust
62    /// use devops_models::llm::provider::{LlmConfig, LlmProvider};
63    ///
64    /// let config = LlmConfig {
65    ///     provider: LlmProvider::Ollama,
66    ///     api_key: String::new(),
67    ///     model: "qwen3:8b".to_string(),
68    ///     base_url: Some("http://my-ollama:11434/v1/chat/completions".to_string()),
69    /// };
70    /// assert_eq!(config.endpoint(), "http://my-ollama:11434/v1/chat/completions");
71    /// ```
72    pub fn endpoint(&self) -> String {
73        match &self.base_url {
74            Some(url) => url.clone(),
75            None => match self.provider {
76                LlmProvider::Anthropic => "https://api.anthropic.com/v1/messages".to_string(),
77                LlmProvider::OpenAI => "https://api.openai.com/v1/chat/completions".to_string(),
78                LlmProvider::Ollama => "http://localhost:11434/v1/chat/completions".to_string(),
79            },
80        }
81    }
82
83    /// Return the recommended default model identifier for this provider.
84    ///
85    /// This is a convenience helper for new configurations; callers should
86    /// always allow the user to override with a specific model string.
87    pub fn default_model(&self) -> &str {
88        match self.provider {
89            LlmProvider::Anthropic => "claude-sonnet-4-20250514",
90            LlmProvider::OpenAI => "gpt-4o",
91            LlmProvider::Ollama => "qwen3:8b",
92        }
93    }
94}
95
96impl Default for LlmConfig {
97    fn default() -> Self {
98        Self {
99            provider: LlmProvider::Ollama,
100            api_key: String::new(),
101            model: "qwen3:8b".to_string(),
102            base_url: None,
103        }
104    }
105}