ai_lib/provider/
config.rs1use crate::types::AiLibError;
2use serde::{Deserialize, Serialize};
3use std::collections::HashMap;
4
5#[derive(Debug, Clone, Serialize, Deserialize)]
10pub struct ProviderConfig {
11 pub base_url: String,
13 pub api_key_env: String,
15 pub chat_endpoint: String,
17 pub chat_model: String,
19 pub multimodal_model: Option<String>,
21 pub upload_endpoint: Option<String>,
23 pub upload_size_limit: Option<u64>,
25 pub models_endpoint: Option<String>,
27 pub headers: HashMap<String, String>,
29 pub field_mapping: FieldMapping,
31}
32
33#[derive(Debug, Clone, Serialize, Deserialize)]
38pub struct FieldMapping {
39 pub messages_field: String,
41 pub model_field: String,
43 pub role_mapping: HashMap<String, String>,
45 pub response_content_path: String,
47}
48
49impl ProviderConfig {
50 pub fn openai_compatible(
61 base_url: &str,
62 api_key_env: &str,
63 chat_model: &str,
64 multimodal_model: Option<&str>,
65 ) -> Self {
66 let mut headers = HashMap::new();
67 headers.insert("Content-Type".to_string(), "application/json".to_string());
68
69 let mut role_mapping = HashMap::new();
70 role_mapping.insert("System".to_string(), "system".to_string());
71 role_mapping.insert("User".to_string(), "user".to_string());
72 role_mapping.insert("Assistant".to_string(), "assistant".to_string());
73
74 Self {
75 base_url: base_url.to_string(),
76 api_key_env: api_key_env.to_string(),
77 chat_endpoint: "/chat/completions".to_string(),
78 chat_model: chat_model.to_string(),
79 multimodal_model: multimodal_model.map(|s| s.to_string()),
80 upload_endpoint: Some("/v1/files".to_string()),
81 upload_size_limit: Some(1024 * 64),
82 models_endpoint: Some("/models".to_string()),
83 headers,
84 field_mapping: FieldMapping {
85 messages_field: "messages".to_string(),
86 model_field: "model".to_string(),
87 role_mapping,
88 response_content_path: "choices.0.message.content".to_string(),
89 },
90 }
91 }
92
93 pub fn openai_compatible_default(base_url: &str, api_key_env: &str) -> Self {
98 Self::openai_compatible(base_url, api_key_env, "gpt-3.5-turbo", Some("gpt-4o"))
99 }
100
101 pub fn validate(&self) -> Result<(), AiLibError> {
106 if self.base_url.is_empty() {
108 return Err(AiLibError::ConfigurationError(
109 "base_url cannot be empty".to_string(),
110 ));
111 }
112
113 if !self.base_url.starts_with("http://") && !self.base_url.starts_with("https://") {
114 return Err(AiLibError::ConfigurationError(
115 "base_url must be a valid HTTP/HTTPS URL".to_string(),
116 ));
117 }
118
119 if self.base_url.ends_with('/') {
121 return Err(AiLibError::ConfigurationError(
122 "base_url must not end with a trailing slash".to_string(),
123 ));
124 }
125
126 if self.api_key_env.is_empty() {
128 return Err(AiLibError::ConfigurationError(
129 "api_key_env cannot be empty".to_string(),
130 ));
131 }
132
133 if self.chat_endpoint.is_empty() {
135 return Err(AiLibError::ConfigurationError(
136 "chat_endpoint cannot be empty".to_string(),
137 ));
138 }
139 Self::validate_endpoint_path(&self.chat_endpoint, "chat_endpoint")?;
140
141 if self.chat_model.is_empty() {
143 return Err(AiLibError::ConfigurationError(
144 "chat_model cannot be empty".to_string(),
145 ));
146 }
147
148 if let Some(endpoint) = &self.upload_endpoint {
149 Self::validate_endpoint_path(endpoint, "upload_endpoint")?;
150 }
151
152 if let Some(endpoint) = &self.models_endpoint {
153 Self::validate_endpoint_path(endpoint, "models_endpoint")?;
154 }
155
156 self.field_mapping.validate()?;
158
159 if let Some(content_type) = self.headers.get("Content-Type") {
161 if content_type != "application/json" && content_type != "multipart/form-data" {
162 return Err(AiLibError::ConfigurationError(
163 "Content-Type header must be 'application/json' or 'multipart/form-data'"
164 .to_string(),
165 ));
166 }
167 }
168
169 Ok(())
170 }
171
172 fn validate_endpoint_path(path: &str, field: &str) -> Result<(), AiLibError> {
173 if !path.starts_with('/') {
174 return Err(AiLibError::ConfigurationError(format!(
175 "{field} must start with /"
176 )));
177 }
178 Ok(())
179 }
180
181 pub fn chat_url(&self) -> String {
183 format!("{}{}", self.base_url, self.chat_endpoint)
184 }
185
186 pub fn models_url(&self) -> Option<String> {
188 self.models_endpoint
189 .as_ref()
190 .map(|endpoint| format!("{}{}", self.base_url, endpoint))
191 }
192
193 pub fn upload_url(&self) -> Option<String> {
195 self.upload_endpoint
196 .as_ref()
197 .map(|endpoint| format!("{}{}", self.base_url, endpoint))
198 }
199
200 pub fn default_chat_model(&self) -> &str {
202 &self.chat_model
203 }
204
205 pub fn multimodal_model(&self) -> Option<&str> {
207 self.multimodal_model.as_deref()
208 }
209}
210
211impl FieldMapping {
212 pub fn validate(&self) -> Result<(), AiLibError> {
214 if self.messages_field.is_empty() {
215 return Err(AiLibError::ConfigurationError(
216 "messages_field cannot be empty".to_string(),
217 ));
218 }
219
220 if self.model_field.is_empty() {
221 return Err(AiLibError::ConfigurationError(
222 "model_field cannot be empty".to_string(),
223 ));
224 }
225
226 if self.response_content_path.is_empty() {
227 return Err(AiLibError::ConfigurationError(
228 "response_content_path cannot be empty".to_string(),
229 ));
230 }
231
232 if self.role_mapping.is_empty() {
234 return Err(AiLibError::ConfigurationError(
235 "role_mapping cannot be empty".to_string(),
236 ));
237 }
238
239 let required_roles = ["System", "User", "Assistant"];
241 for role in &required_roles {
242 if !self.role_mapping.contains_key(*role) {
243 return Err(AiLibError::ConfigurationError(format!(
244 "role_mapping must contain '{}' role",
245 role
246 )));
247 }
248 }
249
250 Ok(())
251 }
252}