1use serde::Deserialize;
4use serde::Serialize;
5use std::collections::HashMap;
6
7#[derive(Debug, Clone, Deserialize, Serialize)]
9pub struct EmbeddingsConfig {
10 #[serde(default)]
12 pub enabled: bool,
13
14 #[serde(default = "default_provider")]
16 pub provider: ProviderSelection,
17
18 #[serde(skip_serializing_if = "Option::is_none")]
20 pub openai: Option<OpenAIConfig>,
21
22 #[serde(skip_serializing_if = "Option::is_none")]
24 pub gemini: Option<GeminiConfig>,
25
26 #[serde(skip_serializing_if = "Option::is_none")]
28 pub voyage: Option<VoyageConfig>,
29
30 #[serde(default)]
32 pub cache: CacheConfig,
33}
34
35impl Default for EmbeddingsConfig {
36 fn default() -> Self {
37 Self {
38 enabled: false, provider: ProviderSelection::Auto,
40 openai: None,
41 gemini: None,
42 voyage: None,
43 cache: CacheConfig::default(),
44 }
45 }
46}
47
48#[derive(Debug, Clone, Deserialize, Serialize)]
50#[serde(rename_all = "lowercase")]
51pub enum ProviderSelection {
52 Auto,
54 OpenAI,
56 Gemini,
58 Voyage,
60}
61
62const fn default_provider() -> ProviderSelection {
63 ProviderSelection::Auto
64}
65
66#[derive(Debug, Clone, Deserialize, Serialize)]
68pub struct OpenAIConfig {
69 #[serde(default = "default_openai_model")]
71 pub model: String,
72
73 #[serde(skip_serializing_if = "Option::is_none")]
75 pub dimensions: Option<usize>,
76
77 #[serde(skip_serializing_if = "Option::is_none")]
79 pub api_endpoint: Option<String>,
80}
81
82fn default_openai_model() -> String {
83 "text-embedding-3-small".to_string()
84}
85
86#[derive(Debug, Clone, Deserialize, Serialize)]
88pub struct GeminiConfig {
89 #[serde(default = "default_gemini_model")]
91 pub model: String,
92
93 #[serde(default = "default_task_type")]
95 pub task_type: String,
96}
97
98fn default_gemini_model() -> String {
99 "gemini-embedding-001".to_string()
100}
101
102fn default_task_type() -> String {
103 "retrieval_document".to_string()
104}
105
106#[derive(Debug, Clone, Deserialize, Serialize)]
108pub struct VoyageConfig {
109 #[serde(default = "default_voyage_model")]
111 pub model: String,
112
113 #[serde(default = "default_input_type")]
115 pub input_type: String,
116}
117
118fn default_voyage_model() -> String {
119 "voyage-3.5".to_string()
120}
121
122fn default_input_type() -> String {
123 "document".to_string()
124}
125
126#[derive(Debug, Clone, Deserialize, Serialize)]
128pub struct CacheConfig {
129 #[serde(default = "default_true")]
131 pub enabled: bool,
132
133 #[serde(default = "default_cache_size")]
135 pub max_size_mb: usize,
136
137 #[serde(default = "default_cache_ttl")]
139 pub ttl_seconds: u64,
140}
141
142impl Default for CacheConfig {
143 fn default() -> Self {
144 Self {
145 enabled: true,
146 max_size_mb: 500,
147 ttl_seconds: 3600, }
149 }
150}
151
152const fn default_true() -> bool {
153 true
154}
155
156const fn default_cache_size() -> usize {
157 500
158}
159
160const fn default_cache_ttl() -> u64 {
161 3600
162}
163
164#[derive(Debug, Clone, Copy, PartialEq, Eq)]
166pub enum IntelligenceMode {
167 Light,
169 Medium,
171 Hard,
173}
174
175impl IntelligenceMode {
176 pub fn model_config(&self, provider: &str) -> ProviderConfig {
178 match (self, provider) {
179 (Self::Light, "openai") => ProviderConfig {
180 model: "text-embedding-3-small".to_string(),
181 dimensions: Some(256),
182 },
183 (Self::Medium, "openai") => ProviderConfig {
184 model: "text-embedding-3-small".to_string(),
185 dimensions: Some(1536),
186 },
187 (Self::Hard, "openai") => ProviderConfig {
188 model: "text-embedding-3-large".to_string(),
189 dimensions: Some(3072),
190 },
191 (Self::Light, "gemini") => ProviderConfig {
192 model: "gemini-embedding-001".to_string(),
193 dimensions: Some(256),
194 },
195 (Self::Medium, "gemini") => ProviderConfig {
196 model: "gemini-embedding-001".to_string(),
197 dimensions: Some(768),
198 },
199 (Self::Hard, "gemini") => ProviderConfig {
200 model: "gemini-embedding-exp-03-07".to_string(),
201 dimensions: Some(1536),
202 },
203 (Self::Light, "voyage") => ProviderConfig {
204 model: "voyage-3.5-lite".to_string(),
205 dimensions: None, },
207 (Self::Medium, "voyage") => ProviderConfig {
208 model: "voyage-3.5".to_string(),
209 dimensions: None,
210 },
211 (Self::Hard, "voyage") => ProviderConfig {
212 model: "voyage-3-large".to_string(),
213 dimensions: None,
214 },
215 _ => ProviderConfig {
216 model: "text-embedding-3-small".to_string(),
217 dimensions: Some(1536),
218 },
219 }
220 }
221}
222
223#[derive(Debug, Clone)]
225pub struct ProviderConfig {
226 pub model: String,
227 pub dimensions: Option<usize>,
228}
229
230pub fn load_embeddings_config() -> EmbeddingsConfig {
232 if let Ok(config_str) = std::fs::read_to_string(
234 dirs::home_dir()
235 .unwrap_or_default()
236 .join(".agcodex")
237 .join("config.toml"),
238 ) && let Ok(config) = toml::from_str::<HashMap<String, toml::Value>>(&config_str)
239 && let Some(embeddings) = config.get("embeddings")
240 && let Ok(config) = embeddings.clone().try_into::<EmbeddingsConfig>()
241 {
242 return config;
243 }
244
245 EmbeddingsConfig::default()
247}
248
249pub fn embeddings_available() -> bool {
251 let config = load_embeddings_config();
252 config.enabled && has_any_embedding_key()
253}
254
255pub fn has_any_embedding_key() -> bool {
257 std::env::var("OPENAI_EMBEDDING_KEY").is_ok()
258 || std::env::var("GEMINI_API_KEY").is_ok()
259 || std::env::var("VOYAGE_API_KEY").is_ok()
260}
261
262pub fn get_embedding_api_key(provider: &str) -> Option<String> {
264 match provider {
265 "openai" => std::env::var("OPENAI_EMBEDDING_KEY").ok(),
266 "gemini" => std::env::var("GEMINI_API_KEY").ok(),
267 "voyage" => std::env::var("VOYAGE_API_KEY").ok(),
268 _ => None,
269 }
270}
271
272#[cfg(test)]
273mod tests {
274 use super::*;
275
276 #[test]
277 fn test_default_config_is_disabled() {
278 let config = EmbeddingsConfig::default();
279 assert!(!config.enabled);
280 }
281
282 #[test]
283 fn test_intelligence_mode_mapping() {
284 let light_openai = IntelligenceMode::Light.model_config("openai");
285 assert_eq!(light_openai.model, "text-embedding-3-small");
286 assert_eq!(light_openai.dimensions, Some(256));
287
288 let hard_openai = IntelligenceMode::Hard.model_config("openai");
289 assert_eq!(hard_openai.model, "text-embedding-3-large");
290 assert_eq!(hard_openai.dimensions, Some(3072));
291
292 let medium_gemini = IntelligenceMode::Medium.model_config("gemini");
293 assert_eq!(medium_gemini.model, "gemini-embedding-001");
294 assert_eq!(medium_gemini.dimensions, Some(768));
295 }
296}