ai_lib/provider/
config.rs

1use crate::types::AiLibError;
2use serde::{Deserialize, Serialize};
3use std::collections::HashMap;
4
5/// Provider configuration template defining API access parameters
6///
7/// This struct contains all necessary configuration for connecting to an AI provider,
8/// including base URL, API endpoints, authentication, and model specifications.
9#[derive(Debug, Clone, Serialize, Deserialize)]
10pub struct ProviderConfig {
11    /// Base URL for the provider's API
12    pub base_url: String,
13    /// Environment variable name for the API key
14    pub api_key_env: String,
15    /// Chat completion endpoint path
16    pub chat_endpoint: String,
17    /// Default chat model for this provider
18    pub chat_model: String,
19    /// Optional multimodal model for this provider (if supported)
20    pub multimodal_model: Option<String>,
21    /// Optional file upload endpoint path (e.g. OpenAI: "/v1/files")
22    pub upload_endpoint: Option<String>,
23    /// Optional file size limit (bytes) above which files should be uploaded instead of inlined
24    pub upload_size_limit: Option<u64>,
25    /// Model list endpoint path
26    pub models_endpoint: Option<String>,
27    /// Request headers template
28    pub headers: HashMap<String, String>,
29    /// Field mapping configuration
30    pub field_mapping: FieldMapping,
31}
32
33/// Field mapping configuration defining field mappings for different API formats
34///
35/// This struct maps the standard ai-lib field names to provider-specific field names,
36/// allowing the library to work with different API formats seamlessly.
37#[derive(Debug, Clone, Serialize, Deserialize)]
38pub struct FieldMapping {
39    /// Messages array field name (OpenAI: "messages", Gemini: "contents")
40    pub messages_field: String,
41    /// Model field name
42    pub model_field: String,
43    /// Role field mapping from ai-lib roles to provider roles
44    pub role_mapping: HashMap<String, String>,
45    /// Response content path (e.g. "choices.0.message.content")
46    pub response_content_path: String,
47}
48
49impl ProviderConfig {
50    /// OpenAI-compatible configuration template
51    ///
52    /// Creates a standard OpenAI-compatible configuration with default models.
53    /// The default chat model is "gpt-3.5-turbo" and multimodal model is "gpt-4o".
54    ///
55    /// # Arguments
56    /// * `base_url` - The base URL for the provider's API
57    /// * `api_key_env` - Environment variable name for the API key
58    /// * `chat_model` - Default chat model name
59    /// * `multimodal_model` - Optional multimodal model name
60    pub fn openai_compatible(
61        base_url: &str,
62        api_key_env: &str,
63        chat_model: &str,
64        multimodal_model: Option<&str>,
65    ) -> Self {
66        let mut headers = HashMap::new();
67        headers.insert("Content-Type".to_string(), "application/json".to_string());
68
69        let mut role_mapping = HashMap::new();
70        role_mapping.insert("System".to_string(), "system".to_string());
71        role_mapping.insert("User".to_string(), "user".to_string());
72        role_mapping.insert("Assistant".to_string(), "assistant".to_string());
73
74        Self {
75            base_url: base_url.to_string(),
76            api_key_env: api_key_env.to_string(),
77            chat_endpoint: "/chat/completions".to_string(),
78            chat_model: chat_model.to_string(),
79            multimodal_model: multimodal_model.map(|s| s.to_string()),
80            upload_endpoint: Some("/v1/files".to_string()),
81            upload_size_limit: Some(1024 * 64),
82            models_endpoint: Some("/models".to_string()),
83            headers,
84            field_mapping: FieldMapping {
85                messages_field: "messages".to_string(),
86                model_field: "model".to_string(),
87                role_mapping,
88                response_content_path: "choices.0.message.content".to_string(),
89            },
90        }
91    }
92
93    /// OpenAI-compatible configuration template with default models
94    ///
95    /// This is a convenience method that uses standard default models.
96    /// For custom models, use `openai_compatible()` with explicit model names.
97    pub fn openai_compatible_default(base_url: &str, api_key_env: &str) -> Self {
98        Self::openai_compatible(base_url, api_key_env, "gpt-3.5-turbo", Some("gpt-4o"))
99    }
100
101    /// Validate the configuration for completeness and correctness
102    ///
103    /// # Returns
104    /// * `Result<(), AiLibError>` - Ok on success, error information on failure
105    pub fn validate(&self) -> Result<(), AiLibError> {
106        // Validate base_url
107        if self.base_url.is_empty() {
108            return Err(AiLibError::ConfigurationError(
109                "base_url cannot be empty".to_string(),
110            ));
111        }
112
113        if !self.base_url.starts_with("http://") && !self.base_url.starts_with("https://") {
114            return Err(AiLibError::ConfigurationError(
115                "base_url must be a valid HTTP/HTTPS URL".to_string(),
116            ));
117        }
118
119        // base_url should not end with trailing slash
120        if self.base_url.ends_with('/') {
121            return Err(AiLibError::ConfigurationError(
122                "base_url must not end with a trailing slash".to_string(),
123            ));
124        }
125
126        // Validate api_key_env
127        if self.api_key_env.is_empty() {
128            return Err(AiLibError::ConfigurationError(
129                "api_key_env cannot be empty".to_string(),
130            ));
131        }
132
133        // Validate chat_endpoint
134        if self.chat_endpoint.is_empty() {
135            return Err(AiLibError::ConfigurationError(
136                "chat_endpoint cannot be empty".to_string(),
137            ));
138        }
139        Self::validate_endpoint_path(&self.chat_endpoint, "chat_endpoint")?;
140
141        // Validate chat_model
142        if self.chat_model.is_empty() {
143            return Err(AiLibError::ConfigurationError(
144                "chat_model cannot be empty".to_string(),
145            ));
146        }
147
148        if let Some(endpoint) = &self.upload_endpoint {
149            Self::validate_endpoint_path(endpoint, "upload_endpoint")?;
150        }
151
152        if let Some(endpoint) = &self.models_endpoint {
153            Self::validate_endpoint_path(endpoint, "models_endpoint")?;
154        }
155
156        // Validate field_mapping
157        self.field_mapping.validate()?;
158
159        // Validate headers Content-Type
160        if let Some(content_type) = self.headers.get("Content-Type") {
161            if content_type != "application/json" && content_type != "multipart/form-data" {
162                return Err(AiLibError::ConfigurationError(
163                    "Content-Type header must be 'application/json' or 'multipart/form-data'"
164                        .to_string(),
165                ));
166            }
167        }
168
169        Ok(())
170    }
171
172    fn validate_endpoint_path(path: &str, field: &str) -> Result<(), AiLibError> {
173        if !path.starts_with('/') {
174            return Err(AiLibError::ConfigurationError(format!(
175                "{field} must start with /"
176            )));
177        }
178        Ok(())
179    }
180
181    /// Get the complete chat completion URL
182    pub fn chat_url(&self) -> String {
183        format!("{}{}", self.base_url, self.chat_endpoint)
184    }
185
186    /// Get the complete models list URL
187    pub fn models_url(&self) -> Option<String> {
188        self.models_endpoint
189            .as_ref()
190            .map(|endpoint| format!("{}{}", self.base_url, endpoint))
191    }
192
193    /// Get the complete file upload URL
194    pub fn upload_url(&self) -> Option<String> {
195        self.upload_endpoint
196            .as_ref()
197            .map(|endpoint| format!("{}{}", self.base_url, endpoint))
198    }
199
200    /// Get the default chat model for this provider
201    pub fn default_chat_model(&self) -> &str {
202        &self.chat_model
203    }
204
205    /// Get the multimodal model if available
206    pub fn multimodal_model(&self) -> Option<&str> {
207        self.multimodal_model.as_deref()
208    }
209}
210
211impl FieldMapping {
212    /// Validate the field mapping configuration
213    pub fn validate(&self) -> Result<(), AiLibError> {
214        if self.messages_field.is_empty() {
215            return Err(AiLibError::ConfigurationError(
216                "messages_field cannot be empty".to_string(),
217            ));
218        }
219
220        if self.model_field.is_empty() {
221            return Err(AiLibError::ConfigurationError(
222                "model_field cannot be empty".to_string(),
223            ));
224        }
225
226        if self.response_content_path.is_empty() {
227            return Err(AiLibError::ConfigurationError(
228                "response_content_path cannot be empty".to_string(),
229            ));
230        }
231
232        // Validate role_mapping is not empty
233        if self.role_mapping.is_empty() {
234            return Err(AiLibError::ConfigurationError(
235                "role_mapping cannot be empty".to_string(),
236            ));
237        }
238
239        // Validate required role mappings
240        let required_roles = ["System", "User", "Assistant"];
241        for role in &required_roles {
242            if !self.role_mapping.contains_key(*role) {
243                return Err(AiLibError::ConfigurationError(format!(
244                    "role_mapping must contain '{}' role",
245                    role
246                )));
247            }
248        }
249
250        Ok(())
251    }
252}