1use anyhow::{anyhow, bail, Result};
17use memvid_core::{EmbeddingConfig, EmbeddingProvider, VecEmbedder};
18use reqwest::blocking::Client;
19use serde::{Deserialize, Serialize};
20use std::sync::atomic::{AtomicBool, Ordering};
21use std::time::Duration;
22use tracing::{debug, info, warn};
23
24const OPENAI_EMBEDDINGS_URL: &str = "https://api.openai.com/v1/embeddings";
26
27const MAX_BATCH_SIZE: usize = 100;
29
30const REQUEST_TIMEOUT: Duration = Duration::from_secs(60);
32
33const MAX_EMBEDDING_TEXT_LEN: usize = 20_000;
36
37fn truncate_for_embedding(text: &str) -> std::borrow::Cow<'_, str> {
39 if text.len() <= MAX_EMBEDDING_TEXT_LEN {
40 std::borrow::Cow::Borrowed(text)
41 } else {
42 let end = text[..MAX_EMBEDDING_TEXT_LEN]
44 .char_indices()
45 .rev()
46 .next()
47 .map(|(i, c)| i + c.len_utf8())
48 .unwrap_or(MAX_EMBEDDING_TEXT_LEN);
49 warn!(
50 "Truncating embedding text from {} to {} chars to avoid token limit",
51 text.len(),
52 end
53 );
54 std::borrow::Cow::Owned(text[..end].to_string())
55 }
56}
57
58#[derive(Debug, Serialize)]
60struct OpenAIEmbeddingRequest<'a> {
61 model: &'a str,
62 input: Vec<&'a str>,
63 #[serde(skip_serializing_if = "Option::is_none")]
64 dimensions: Option<usize>,
65}
66
67#[derive(Debug, Deserialize)]
69struct OpenAIEmbeddingResponse {
70 data: Vec<OpenAIEmbeddingData>,
71 model: String,
72 usage: OpenAIUsage,
73}
74
75#[derive(Debug, Deserialize)]
76struct OpenAIEmbeddingData {
77 embedding: Vec<f32>,
78 index: usize,
79}
80
81#[derive(Debug, Deserialize)]
82struct OpenAIUsage {
83 #[allow(dead_code)]
84 prompt_tokens: usize,
85 total_tokens: usize,
86}
87
88#[derive(Debug, Deserialize)]
90struct OpenAIErrorResponse {
91 error: OpenAIError,
92}
93
94#[derive(Debug, Deserialize)]
95struct OpenAIError {
96 message: String,
97 #[serde(rename = "type")]
98 error_type: String,
99}
100
101#[derive(Clone)]
105pub struct OpenAIEmbeddingProvider {
106 api_key: String,
107 config: EmbeddingConfig,
108 client: Client,
109 ready: std::sync::Arc<AtomicBool>,
110}
111
112impl std::fmt::Debug for OpenAIEmbeddingProvider {
113 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
114 f.debug_struct("OpenAIEmbeddingProvider")
115 .field("model", &self.config.model)
116 .field("dimension", &self.config.dimension)
117 .field("ready", &self.ready.load(Ordering::Relaxed))
118 .finish()
119 }
120}
121
122impl OpenAIEmbeddingProvider {
123 pub fn new(api_key: String, config: EmbeddingConfig) -> Result<Self> {
137 if api_key.is_empty() {
138 bail!("OpenAI API key cannot be empty");
139 }
140
141 let client = crate::http::blocking_client(REQUEST_TIMEOUT)
142 .map_err(|e| anyhow!("Failed to create HTTP client: {}", e))?;
143
144 Ok(Self {
145 api_key,
146 config,
147 client,
148 ready: std::sync::Arc::new(AtomicBool::new(false)),
149 })
150 }
151
152 pub fn from_env() -> Result<Self> {
157 let api_key = std::env::var("OPENAI_API_KEY")
158 .map_err(|_| anyhow!("OPENAI_API_KEY environment variable not set"))?;
159
160 let config = match std::env::var("OPENAI_EMBEDDING_MODEL") {
161 Ok(model) => match model.as_str() {
162 "text-embedding-3-small" => EmbeddingConfig::openai_small(),
163 "text-embedding-ada-002" => EmbeddingConfig::openai_ada(),
164 "text-embedding-3-large" | _ => EmbeddingConfig::openai_large(),
165 },
166 Err(_) => EmbeddingConfig::openai_large(),
167 };
168
169 Self::new(api_key, config)
170 }
171
172 pub fn large(api_key: String) -> Result<Self> {
174 Self::new(api_key, EmbeddingConfig::openai_large())
175 }
176
177 pub fn small(api_key: String) -> Result<Self> {
179 Self::new(api_key, EmbeddingConfig::openai_small())
180 }
181
182 pub fn ada(api_key: String) -> Result<Self> {
184 Self::new(api_key, EmbeddingConfig::openai_ada())
185 }
186
187 fn call_openai(&self, texts: &[&str]) -> Result<Vec<Vec<f32>>> {
189 if texts.is_empty() {
190 return Ok(Vec::new());
191 }
192
193 let request = OpenAIEmbeddingRequest {
194 model: &self.config.model,
195 input: texts.to_vec(),
196 dimensions: None, };
198
199 let response = self
200 .client
201 .post(OPENAI_EMBEDDINGS_URL)
202 .header("Authorization", format!("Bearer {}", self.api_key))
203 .header("Content-Type", "application/json")
204 .json(&request)
205 .send()
206 .map_err(|e| anyhow!("OpenAI API request failed: {}", e))?;
207
208 let status = response.status();
209 let body = response
210 .text()
211 .map_err(|e| anyhow!("Failed to read response body: {}", e))?;
212
213 if !status.is_success() {
214 if let Ok(error_response) = serde_json::from_str::<OpenAIErrorResponse>(&body) {
216 bail!(
217 "OpenAI API error ({}): {}",
218 error_response.error.error_type,
219 error_response.error.message
220 );
221 }
222 bail!("OpenAI API request failed with status {}: {}", status, body);
223 }
224
225 let embedding_response: OpenAIEmbeddingResponse = serde_json::from_str(&body)
226 .map_err(|e| anyhow!("Failed to parse OpenAI response: {}", e))?;
227
228 debug!(
229 "OpenAI embeddings: {} texts, {} tokens, model={}",
230 texts.len(),
231 embedding_response.usage.total_tokens,
232 embedding_response.model
233 );
234
235 let mut data = embedding_response.data;
237 data.sort_by_key(|d| d.index);
238
239 let embeddings: Vec<Vec<f32>> = data.into_iter().map(|d| d.embedding).collect();
240
241 if let Some(first) = embeddings.first() {
243 if first.len() != self.config.dimension {
244 warn!(
245 "OpenAI returned dimension {} but expected {}",
246 first.len(),
247 self.config.dimension
248 );
249 }
250 }
251
252 Ok(embeddings)
253 }
254
255 fn embed_with_retry(&self, texts: &[&str], max_retries: usize) -> Result<Vec<Vec<f32>>> {
257 let mut last_error = None;
258
259 for attempt in 0..max_retries {
260 match self.call_openai(texts) {
261 Ok(embeddings) => return Ok(embeddings),
262 Err(e) => {
263 let error_str = e.to_string();
264 if error_str.contains("rate_limit") || error_str.contains("429") {
265 let backoff = Duration::from_millis(500 * (1 << attempt));
266 warn!(
267 "Rate limited by OpenAI, retrying in {:?} (attempt {}/{})",
268 backoff,
269 attempt + 1,
270 max_retries
271 );
272 std::thread::sleep(backoff);
273 last_error = Some(e);
274 continue;
275 }
276 return Err(e);
277 }
278 }
279 }
280
281 Err(last_error.unwrap_or_else(|| anyhow!("Failed to embed after {} retries", max_retries)))
282 }
283}
284
285impl EmbeddingProvider for OpenAIEmbeddingProvider {
286 fn kind(&self) -> &str {
287 "openai"
288 }
289
290 fn model(&self) -> &str {
291 &self.config.model
292 }
293
294 fn dimension(&self) -> usize {
295 self.config.dimension
296 }
297
298 fn embed_text(&self, text: &str) -> memvid_core::Result<Vec<f32>> {
299 let text = truncate_for_embedding(text);
300 self.embed_with_retry(&[&text], 3)
301 .map(|mut v| v.pop().unwrap_or_default())
302 .map_err(|e| memvid_core::MemvidError::EmbeddingFailed {
303 reason: e.to_string().into_boxed_str(),
304 })
305 }
306
307 fn embed_batch(&self, texts: &[&str]) -> memvid_core::Result<Vec<Vec<f32>>> {
308 if texts.is_empty() {
309 return Ok(Vec::new());
310 }
311
312 let truncated: Vec<std::borrow::Cow<'_, str>> =
314 texts.iter().map(|t| truncate_for_embedding(t)).collect();
315 let truncated_refs: Vec<&str> = truncated.iter().map(|c| c.as_ref()).collect();
316
317 let batch_size = self
319 .config
320 .batch_size
321 .unwrap_or(MAX_BATCH_SIZE)
322 .min(MAX_BATCH_SIZE);
323 let mut all_embeddings = Vec::with_capacity(texts.len());
324
325 for chunk in truncated_refs.chunks(batch_size) {
326 let embeddings = self.embed_with_retry(chunk, 3).map_err(|e| {
327 memvid_core::MemvidError::EmbeddingFailed {
328 reason: e.to_string().into_boxed_str(),
329 }
330 })?;
331 all_embeddings.extend(embeddings);
332 }
333
334 Ok(all_embeddings)
335 }
336
337 fn is_ready(&self) -> bool {
338 self.ready.load(Ordering::Relaxed)
339 }
340
341 fn init(&mut self) -> memvid_core::Result<()> {
342 info!(
344 "Initializing OpenAI embedding provider with model: {}",
345 self.config.model
346 );
347
348 let test_embedding = self.embed_with_retry(&["test"], 1).map_err(|e| {
349 memvid_core::MemvidError::EmbeddingFailed {
350 reason: format!("Failed to initialize OpenAI provider: {}", e).into_boxed_str(),
351 }
352 })?;
353
354 if let Some(emb) = test_embedding.first() {
355 info!(
356 "OpenAI provider initialized: model={}, dimension={}",
357 self.config.model,
358 emb.len()
359 );
360 if emb.len() != self.config.dimension {
362 warn!(
363 "Updating dimension from {} to {}",
364 self.config.dimension,
365 emb.len()
366 );
367 }
368 }
369
370 self.ready.store(true, Ordering::Relaxed);
371 Ok(())
372 }
373}
374
375impl VecEmbedder for OpenAIEmbeddingProvider {
377 fn embed_query(&self, text: &str) -> memvid_core::Result<Vec<f32>> {
378 self.embed_text(text)
379 }
380
381 fn embed_chunks(&self, texts: &[&str]) -> memvid_core::Result<Vec<Vec<f32>>> {
382 self.embed_batch(texts)
383 }
384
385 fn embedding_dimension(&self) -> usize {
386 self.dimension()
387 }
388}
389
390pub fn try_openai_provider() -> Option<OpenAIEmbeddingProvider> {
392 match OpenAIEmbeddingProvider::from_env() {
393 Ok(provider) => {
394 info!("OpenAI embedding provider available");
395 Some(provider)
396 }
397 Err(e) => {
398 debug!("OpenAI provider not available: {}", e);
399 None
400 }
401 }
402}
403
404#[cfg(test)]
405mod tests {
406 use super::*;
407
408 #[test]
409 fn test_config_dimensions() {
410 assert_eq!(EmbeddingConfig::openai_large().dimension, 3072);
411 assert_eq!(EmbeddingConfig::openai_small().dimension, 1536);
412 assert_eq!(EmbeddingConfig::openai_ada().dimension, 1536);
413 }
414
415 #[test]
416 fn test_empty_api_key() {
417 let result = OpenAIEmbeddingProvider::new(String::new(), EmbeddingConfig::openai_large());
418 assert!(result.is_err());
419 }
420
421 #[test]
422 #[ignore] fn test_real_embedding() {
424 let provider = OpenAIEmbeddingProvider::from_env().expect("OPENAI_API_KEY must be set");
425 let embedding = provider.embed_text("Hello, world!").expect("embed");
426 assert!(!embedding.is_empty());
427 assert_eq!(embedding.len(), 3072); }
429}