ai_lib/provider/
config.rs1use crate::types::AiLibError;
2use serde::{Deserialize, Serialize};
3use std::collections::HashMap;
4
5#[derive(Debug, Clone, Serialize, Deserialize)]
10pub struct ProviderConfig {
11 pub base_url: String,
13 pub api_key_env: String,
15 pub chat_endpoint: String,
17 pub chat_model: String,
19 pub multimodal_model: Option<String>,
21 pub upload_endpoint: Option<String>,
23 pub upload_size_limit: Option<u64>,
25 pub models_endpoint: Option<String>,
27 pub headers: HashMap<String, String>,
29 pub field_mapping: FieldMapping,
31}
32
33#[derive(Debug, Clone, Serialize, Deserialize)]
38pub struct FieldMapping {
39 pub messages_field: String,
41 pub model_field: String,
43 pub role_mapping: HashMap<String, String>,
45 pub response_content_path: String,
47}
48
49impl ProviderConfig {
50 pub fn openai_compatible(
61 base_url: &str,
62 api_key_env: &str,
63 chat_model: &str,
64 multimodal_model: Option<&str>,
65 ) -> Self {
66 let mut headers = HashMap::new();
67 headers.insert("Content-Type".to_string(), "application/json".to_string());
68
69 let mut role_mapping = HashMap::new();
70 role_mapping.insert("System".to_string(), "system".to_string());
71 role_mapping.insert("User".to_string(), "user".to_string());
72 role_mapping.insert("Assistant".to_string(), "assistant".to_string());
73
74 Self {
75 base_url: base_url.to_string(),
76 api_key_env: api_key_env.to_string(),
77 chat_endpoint: "/chat/completions".to_string(),
78 chat_model: chat_model.to_string(),
79 multimodal_model: multimodal_model.map(|s| s.to_string()),
80 upload_endpoint: Some("/v1/files".to_string()),
81 upload_size_limit: Some(1024 * 64),
82 models_endpoint: Some("/models".to_string()),
83 headers,
84 field_mapping: FieldMapping {
85 messages_field: "messages".to_string(),
86 model_field: "model".to_string(),
87 role_mapping,
88 response_content_path: "choices.0.message.content".to_string(),
89 },
90 }
91 }
92
93 pub fn openai_compatible_default(base_url: &str, api_key_env: &str) -> Self {
98 Self::openai_compatible(base_url, api_key_env, "gpt-3.5-turbo", Some("gpt-4o"))
99 }
100
101 pub fn validate(&self) -> Result<(), AiLibError> {
106 if self.base_url.is_empty() {
108 return Err(AiLibError::ConfigurationError(
109 "base_url cannot be empty".to_string(),
110 ));
111 }
112
113 if !self.base_url.starts_with("http://") && !self.base_url.starts_with("https://") {
114 return Err(AiLibError::ConfigurationError(
115 "base_url must be a valid HTTP/HTTPS URL".to_string(),
116 ));
117 }
118
119 if self.api_key_env.is_empty() {
121 return Err(AiLibError::ConfigurationError(
122 "api_key_env cannot be empty".to_string(),
123 ));
124 }
125
126 if self.chat_endpoint.is_empty() {
128 return Err(AiLibError::ConfigurationError(
129 "chat_endpoint cannot be empty".to_string(),
130 ));
131 }
132
133 if self.chat_model.is_empty() {
135 return Err(AiLibError::ConfigurationError(
136 "chat_model cannot be empty".to_string(),
137 ));
138 }
139
140 self.field_mapping.validate()?;
142
143 if let Some(content_type) = self.headers.get("Content-Type") {
145 if content_type != "application/json" && content_type != "multipart/form-data" {
146 return Err(AiLibError::ConfigurationError(
147 "Content-Type header must be 'application/json' or 'multipart/form-data'"
148 .to_string(),
149 ));
150 }
151 }
152
153 Ok(())
154 }
155
156 pub fn chat_url(&self) -> String {
158 format!("{}{}", self.base_url, self.chat_endpoint)
159 }
160
161 pub fn models_url(&self) -> Option<String> {
163 self.models_endpoint
164 .as_ref()
165 .map(|endpoint| format!("{}{}", self.base_url, endpoint))
166 }
167
168 pub fn upload_url(&self) -> Option<String> {
170 self.upload_endpoint
171 .as_ref()
172 .map(|endpoint| format!("{}{}", self.base_url, endpoint))
173 }
174
175 pub fn default_chat_model(&self) -> &str {
177 &self.chat_model
178 }
179
180 pub fn multimodal_model(&self) -> Option<&str> {
182 self.multimodal_model.as_deref()
183 }
184}
185
186impl FieldMapping {
187 pub fn validate(&self) -> Result<(), AiLibError> {
189 if self.messages_field.is_empty() {
190 return Err(AiLibError::ConfigurationError(
191 "messages_field cannot be empty".to_string(),
192 ));
193 }
194
195 if self.model_field.is_empty() {
196 return Err(AiLibError::ConfigurationError(
197 "model_field cannot be empty".to_string(),
198 ));
199 }
200
201 if self.response_content_path.is_empty() {
202 return Err(AiLibError::ConfigurationError(
203 "response_content_path cannot be empty".to_string(),
204 ));
205 }
206
207 if self.role_mapping.is_empty() {
209 return Err(AiLibError::ConfigurationError(
210 "role_mapping cannot be empty".to_string(),
211 ));
212 }
213
214 let required_roles = ["System", "User", "Assistant"];
216 for role in &required_roles {
217 if !self.role_mapping.contains_key(*role) {
218 return Err(AiLibError::ConfigurationError(format!(
219 "role_mapping must contain '{}' role",
220 role
221 )));
222 }
223 }
224
225 Ok(())
226 }
227}