open_agent/config.rs
1//! # Configuration Helpers for the Open Agent SDK
2//!
3//! This module provides convenience utilities for configuring connections to different
4//! local LLM server providers. It simplifies the process of setting base URLs and model names
5//! by providing well-known defaults and environment variable support.
6//!
7//! ## Supported Providers
8//!
9//! - **LM Studio**: Popular local model server with GUI
10//! - **Ollama**: Command-line focused local model server
11//! - **llama.cpp**: C++ inference engine with server mode
12//! - **vLLM**: High-performance inference server
13//!
14//! ## Environment Variables
15//!
16//! - `OPEN_AGENT_BASE_URL`: Override base URL for any provider
17//! - `OPEN_AGENT_MODEL`: Override model name (when prefer_env is true)
18//!
19//! ## Examples
20//!
21//! ```rust
22//! use open_agent::{Provider, get_base_url, get_model, AgentOptions};
23//!
24//! // Quick setup with provider defaults
25//! let url = get_base_url(Some(Provider::Ollama), None);
26//! let model = get_model(Some("llama3:8b"), false);
27//!
28//! // Build options
29//! let options = AgentOptions::builder()
30//! .system_prompt("You are a helpful assistant")
31//! .model(model.unwrap())
32//! .base_url(&url)
33//! .build()
34//! .unwrap();
35//! ```
36
37use std::env;
38use std::str::FromStr;
39
40// ============================================================================
41// PROVIDER ENUM
42// ============================================================================
43
44/// Enum representing supported local LLM server providers.
45///
46/// Each provider has a default base URL where its API server typically runs.
47/// These are convenience shortcuts to avoid hardcoding URLs in application code.
48///
49/// ## Provider Details
50///
51/// | Provider | Default URL | Port | Description |
52/// |----------|-------------|------|-------------|
53/// | LMStudio | http://localhost:1234/v1 | 1234 | GUI-based local server |
54/// | Ollama | http://localhost:11434/v1 | 11434 | CLI-focused server |
55/// | LlamaCpp | http://localhost:8080/v1 | 8080 | C++ inference engine |
56/// | VLLM | http://localhost:8000/v1 | 8000 | High-performance server |
57///
58/// All providers implement the OpenAI-compatible API standard, making them
59/// interchangeable from the SDK's perspective.
60#[derive(Debug, Clone, Copy, PartialEq, Eq)]
61pub enum Provider {
62 /// LM Studio - Popular GUI-based local model server (default port 1234)
63 LMStudio,
64
65 /// Ollama - Command-line focused local model server (default port 11434)
66 Ollama,
67
68 /// llama.cpp - C++ inference engine with server mode (default port 8080)
69 LlamaCpp,
70
71 /// vLLM - High-performance inference server (default port 8000)
72 VLLM,
73}
74
75impl Provider {
76 /// Get the default base URL for this provider.
77 ///
78 /// Returns the standard localhost URL where each provider's API server
79 /// typically runs. All URLs include the `/v1` path suffix required by
80 /// the OpenAI-compatible API standard.
81 ///
82 /// # Returns
83 ///
84 /// A static string slice containing the full base URL including protocol,
85 /// host, port, and API version path.
86 ///
87 /// # Examples
88 ///
89 /// ```rust
90 /// use open_agent::Provider;
91 ///
92 /// assert_eq!(Provider::Ollama.default_url(), "http://localhost:11434/v1");
93 /// assert_eq!(Provider::LMStudio.default_url(), "http://localhost:1234/v1");
94 /// ```
95 pub fn default_url(&self) -> &'static str {
96 match self {
97 // LM Studio's default port from their documentation
98 Provider::LMStudio => "http://localhost:1234/v1",
99
100 // Ollama's default port from their documentation
101 Provider::Ollama => "http://localhost:11434/v1",
102
103 // llama.cpp server's common default port
104 Provider::LlamaCpp => "http://localhost:8080/v1",
105
106 // vLLM's default port from their documentation
107 Provider::VLLM => "http://localhost:8000/v1",
108 }
109 }
110}
111
112// ============================================================================
113// FROMSTR IMPLEMENTATION FOR PARSING
114// ============================================================================
115
116impl FromStr for Provider {
117 type Err = String;
118
119 /// Parse a provider name from a string.
120 ///
121 /// This implementation is case-insensitive and supports multiple naming
122 /// conventions (dashes, underscores, dots) for flexibility.
123 ///
124 /// # Supported Formats
125 ///
126 /// - **LMStudio**: "lmstudio", "lm-studio", "lm_studio" (case-insensitive)
127 /// - **Ollama**: "ollama" (case-insensitive)
128 /// - **LlamaCpp**: "llamacpp", "llama-cpp", "llama_cpp", "llama.cpp" (case-insensitive)
129 /// - **VLLM**: "vllm" (case-insensitive)
130 ///
131 /// # Errors
132 ///
133 /// Returns a `String` error message if the provider name is not recognized.
134 ///
135 /// # Examples
136 ///
137 /// ```rust
138 /// use open_agent::Provider;
139 /// use std::str::FromStr;
140 ///
141 /// let provider = "ollama".parse::<Provider>().unwrap();
142 /// assert_eq!(provider, Provider::Ollama);
143 ///
144 /// let provider = "LM-Studio".parse::<Provider>().unwrap();
145 /// assert_eq!(provider, Provider::LMStudio);
146 ///
147 /// assert!("unknown".parse::<Provider>().is_err());
148 /// ```
149 fn from_str(s: &str) -> Result<Self, Self::Err> {
150 // Convert to lowercase for case-insensitive matching
151 match s.to_lowercase().as_str() {
152 // LM Studio accepts multiple common variations
153 "lmstudio" | "lm-studio" | "lm_studio" => Ok(Provider::LMStudio),
154
155 // Ollama is simple
156 "ollama" => Ok(Provider::Ollama),
157
158 // llama.cpp has many variations in the wild
159 "llamacpp" | "llama-cpp" | "llama_cpp" | "llama.cpp" => Ok(Provider::LlamaCpp),
160
161 // vLLM is straightforward
162 "vllm" => Ok(Provider::VLLM),
163
164 // Unrecognized provider name
165 _ => Err(format!("Unknown provider: {}", s)),
166 }
167 }
168}
169
170// ============================================================================
171// CONFIGURATION HELPER FUNCTIONS
172// ============================================================================
173
174/// Get the base URL for API requests with environment variable support.
175///
176/// This function implements a priority-based resolution strategy for determining
177/// the API base URL, making it easy to override defaults during development or deployment.
178///
179/// ## Resolution Priority
180///
181/// 1. **Environment Variable**: `OPEN_AGENT_BASE_URL` (highest priority)
182/// 2. **Provider Default**: The provider's default URL if specified
183/// 3. **Fallback Parameter**: Explicit fallback value
184/// 4. **Ultimate Default**: LM Studio's default URL (http://localhost:1234/v1)
185///
186/// ## Use Cases
187///
188/// - **Development**: Use provider defaults for quick setup
189/// - **Testing**: Override with environment variable to point to test server
190/// - **Production**: Specify explicit fallback for reliability
191///
192/// # Arguments
193///
194/// * `provider` - Optional provider enum to use its default URL
195/// * `fallback` - Optional explicit fallback URL string
196///
197/// # Returns
198///
199/// The resolved base URL as a `String`. Always returns a value (never None).
200///
201/// # Examples
202///
203/// ```rust,no_run
204/// use open_agent::{get_base_url, Provider};
205///
206/// // Use Ollama's default (http://localhost:11434/v1)
207/// let url = get_base_url(Some(Provider::Ollama), None);
208///
209/// // With explicit fallback
210/// let url = get_base_url(None, Some("http://localhost:1234/v1"));
211///
212/// // Override via environment (takes precedence over everything)
213/// // SAFETY: This is a doctest example showing how env vars work
214/// unsafe { std::env::set_var("OPEN_AGENT_BASE_URL", "http://custom-server:8080/v1"); }
215/// let url = get_base_url(Some(Provider::Ollama), None);
216/// // Returns "http://custom-server:8080/v1" despite provider being set
217/// ```
218pub fn get_base_url(provider: Option<Provider>, fallback: Option<&str>) -> String {
219 // Priority 1: Check environment variable first (allows runtime override)
220 if let Ok(url) = env::var("OPEN_AGENT_BASE_URL") {
221 return url;
222 }
223
224 // Priority 2: Use provider's default URL if specified
225 if let Some(p) = provider {
226 return p.default_url().to_string();
227 }
228
229 // Priority 3: Use explicit fallback, or default to LM Studio
230 fallback
231 .unwrap_or(Provider::LMStudio.default_url())
232 .to_string()
233}
234
235/// Get the model name with optional environment variable override.
236///
237/// This function provides flexible model name resolution with opt-in environment
238/// variable support. Unlike `get_base_url`, environment variable checking is
239/// controlled by the `prefer_env` parameter.
240///
241/// ## Resolution Priority
242///
243/// If `prefer_env` is `true`:
244/// 1. **Environment Variable**: `OPEN_AGENT_MODEL` (if set)
245/// 2. **Fallback Parameter**: Explicit fallback value
246///
247/// If `prefer_env` is `false`:
248/// 1. **Fallback Parameter**: Explicit fallback value only
249///
250/// ## Why Optional Environment Override?
251///
252/// Model names are often specified explicitly in code for consistency across
253/// environments. The `prefer_env` flag gives you control over whether to
254/// allow environment overrides.
255///
256/// # Arguments
257///
258/// * `fallback` - Optional explicit model name
259/// * `prefer_env` - Whether to check environment variable first
260///
261/// # Returns
262///
263/// `Some(String)` if a model name was found, `None` if no model specified
264///
265/// # Examples
266///
267/// ```rust,no_run
268/// use open_agent::get_model;
269///
270/// // Use explicit model name, allow environment override
271/// let model = get_model(Some("llama3:8b"), true);
272///
273/// // Force specific model (ignore environment)
274/// let model = get_model(Some("qwen2.5-32b"), false);
275///
276/// // Try environment only
277/// let model = get_model(None, true);
278/// // Returns Some(model) if OPEN_AGENT_MODEL is set, None otherwise
279/// ```
280pub fn get_model(fallback: Option<&str>, prefer_env: bool) -> Option<String> {
281 // If environment override is preferred, check it first
282 if prefer_env {
283 if let Ok(model) = env::var("OPEN_AGENT_MODEL") {
284 return Some(model);
285 }
286 }
287
288 // Fall back to the explicit parameter (if provided)
289 fallback.map(|s| s.to_string())
290}
291
292// ============================================================================
293// TESTS
294// ============================================================================
295
296#[cfg(test)]
297mod tests {
298 use super::*;
299
300 #[test]
301 fn test_provider_default_urls() {
302 assert_eq!(Provider::LMStudio.default_url(), "http://localhost:1234/v1");
303 assert_eq!(Provider::Ollama.default_url(), "http://localhost:11434/v1");
304 assert_eq!(Provider::LlamaCpp.default_url(), "http://localhost:8080/v1");
305 assert_eq!(Provider::VLLM.default_url(), "http://localhost:8000/v1");
306 }
307
308 #[test]
309 fn test_provider_from_str() {
310 assert_eq!("lmstudio".parse::<Provider>(), Ok(Provider::LMStudio));
311 assert_eq!("LM-Studio".parse::<Provider>(), Ok(Provider::LMStudio));
312 assert_eq!("ollama".parse::<Provider>(), Ok(Provider::Ollama));
313 assert_eq!("llamacpp".parse::<Provider>(), Ok(Provider::LlamaCpp));
314 assert_eq!("llama.cpp".parse::<Provider>(), Ok(Provider::LlamaCpp));
315 assert_eq!("vllm".parse::<Provider>(), Ok(Provider::VLLM));
316 assert!("unknown".parse::<Provider>().is_err());
317 }
318
319 #[test]
320 fn test_get_base_url_with_provider() {
321 // SAFETY: This test runs in an isolated test environment where environment
322 // variable modifications won't affect other tests due to test isolation.
323 // The OPEN_AGENT_BASE_URL variable is specific to this library and not
324 // used by the Rust standard library or other critical system components.
325 unsafe {
326 env::remove_var("OPEN_AGENT_BASE_URL");
327 }
328
329 let url = get_base_url(Some(Provider::Ollama), None);
330 assert_eq!(url, "http://localhost:11434/v1");
331 }
332
333 #[test]
334 fn test_get_base_url_with_fallback() {
335 // SAFETY: This test runs in an isolated test environment where environment
336 // variable modifications won't affect other tests due to test isolation.
337 // The OPEN_AGENT_BASE_URL variable is specific to this library and not
338 // used by the Rust standard library or other critical system components.
339 unsafe {
340 env::remove_var("OPEN_AGENT_BASE_URL");
341 }
342
343 let url = get_base_url(None, Some("http://custom:8080/v1"));
344 assert_eq!(url, "http://custom:8080/v1");
345 }
346}