1use crate::config::{EnvironmentProvider, SystemEnvironmentProvider};
8use crate::{Result, config::Config, error::SubXError};
9use config::{Config as ConfigCrate, ConfigBuilder, Environment, File, builder::DefaultState};
10use log::debug;
11use std::path::{Path, PathBuf};
12use std::sync::{Arc, RwLock};
13
14pub trait ConfigService: Send + Sync {
19 fn get_config(&self) -> Result<Config>;
39
40 fn reload(&self) -> Result<()>;
49
50 fn save_config(&self) -> Result<()>;
59
60 fn save_config_to_file(&self, path: &Path) -> Result<()>;
73
74 fn get_config_file_path(&self) -> Result<PathBuf>;
81
82 fn get_config_value(&self, key: &str) -> Result<String>;
92
93 fn reset_to_defaults(&self) -> Result<()>;
102}
103
104pub struct ProductionConfigService {
113 config_builder: ConfigBuilder<DefaultState>,
114 cached_config: Arc<RwLock<Option<Config>>>,
115 env_provider: Arc<dyn EnvironmentProvider>,
116}
117
118impl ProductionConfigService {
119 pub fn new() -> Result<Self> {
126 Self::with_env_provider(Arc::new(SystemEnvironmentProvider::new()))
127 }
128
129 pub fn with_env_provider(env_provider: Arc<dyn EnvironmentProvider>) -> Result<Self> {
134 let config_builder = ConfigCrate::builder()
135 .add_source(File::with_name("config/default").required(false))
136 .add_source(File::from(Self::user_config_path()).required(false))
137 .add_source(Environment::with_prefix("SUBX").separator("_"));
138
139 Ok(Self {
140 config_builder,
141 cached_config: Arc::new(RwLock::new(None)),
142 env_provider,
143 })
144 }
145
146 pub fn with_custom_file(mut self, file_path: PathBuf) -> Result<Self> {
158 self.config_builder = self.config_builder.add_source(File::from(file_path));
159 Ok(self)
160 }
161
162 fn user_config_path() -> PathBuf {
167 dirs::config_dir()
168 .unwrap_or_else(|| PathBuf::from("."))
169 .join("subx")
170 .join("config.toml")
171 }
172
173 fn load_and_validate(&self) -> Result<Config> {
179 debug!("ProductionConfigService: Loading configuration from sources");
180
181 let config_crate = self.config_builder.build_cloned().map_err(|e| {
183 debug!("ProductionConfigService: Config build failed: {}", e);
184 SubXError::config(format!("Failed to build configuration: {}", e))
185 })?;
186
187 let mut app_config = Config::default();
189
190 if let Ok(config) = config_crate.clone().try_deserialize::<Config>() {
192 app_config = config;
193 debug!("ProductionConfigService: Full configuration loaded successfully");
194 } else {
195 debug!("ProductionConfigService: Full deserialization failed, attempting partial load");
196
197 if let Ok(raw_map) = config_crate
199 .try_deserialize::<std::collections::HashMap<String, serde_json::Value>>()
200 {
201 if let Some(ai_section) = raw_map.get("ai") {
203 if let Some(ai_obj) = ai_section.as_object() {
204 if let Some(api_key) = ai_obj.get("apikey").and_then(|v| v.as_str()) {
206 app_config.ai.api_key = Some(api_key.to_string());
207 debug!(
208 "ProductionConfigService: AI API key loaded from SUBX_AI_APIKEY"
209 );
210 }
211 if let Some(provider) = ai_obj.get("provider").and_then(|v| v.as_str()) {
212 app_config.ai.provider = provider.to_string();
213 debug!(
214 "ProductionConfigService: AI provider loaded from SUBX_AI_PROVIDER"
215 );
216 }
217 if let Some(model) = ai_obj.get("model").and_then(|v| v.as_str()) {
218 app_config.ai.model = model.to_string();
219 debug!("ProductionConfigService: AI model loaded from SUBX_AI_MODEL");
220 }
221 if let Some(base_url) = ai_obj.get("base_url").and_then(|v| v.as_str()) {
222 app_config.ai.base_url = base_url.to_string();
223 debug!(
224 "ProductionConfigService: AI base URL loaded from SUBX_AI_BASE_URL"
225 );
226 }
227 }
228 }
229 }
230 }
231
232 if app_config.ai.api_key.is_none() {
235 if let Some(api_key) = self.env_provider.get_var("OPENAI_API_KEY") {
236 debug!("ProductionConfigService: Found OPENAI_API_KEY environment variable");
237 app_config.ai.api_key = Some(api_key);
238 }
239 }
240
241 if let Some(base_url) = self.env_provider.get_var("OPENAI_BASE_URL") {
243 debug!("ProductionConfigService: Found OPENAI_BASE_URL environment variable");
244 app_config.ai.base_url = base_url;
245 }
246
247 crate::config::validator::validate_config(&app_config).map_err(|e| {
249 debug!("ProductionConfigService: Config validation failed: {}", e);
250 SubXError::config(format!("Configuration validation failed: {}", e))
251 })?;
252
253 debug!("ProductionConfigService: Configuration loaded and validated successfully");
254 Ok(app_config)
255 }
256}
257
258impl ConfigService for ProductionConfigService {
259 fn get_config(&self) -> Result<Config> {
260 {
262 let cache = self.cached_config.read().unwrap();
263 if let Some(config) = cache.as_ref() {
264 debug!("ProductionConfigService: Returning cached configuration");
265 return Ok(config.clone());
266 }
267 }
268
269 let app_config = self.load_and_validate()?;
271
272 {
274 let mut cache = self.cached_config.write().unwrap();
275 *cache = Some(app_config.clone());
276 }
277
278 Ok(app_config)
279 }
280
281 fn reload(&self) -> Result<()> {
282 debug!("ProductionConfigService: Reloading configuration");
283
284 {
286 let mut cache = self.cached_config.write().unwrap();
287 *cache = None;
288 }
289
290 self.get_config()?;
292
293 debug!("ProductionConfigService: Configuration reloaded successfully");
294 Ok(())
295 }
296
297 fn save_config(&self) -> Result<()> {
298 let _config = self.get_config()?;
299 let path = self.get_config_file_path()?;
300 self.save_config_to_file(&path)
301 }
302
303 fn save_config_to_file(&self, path: &Path) -> Result<()> {
304 let config = self.get_config()?;
305 let toml_content = toml::to_string_pretty(&config)
306 .map_err(|e| SubXError::config(format!("TOML serialization error: {}", e)))?;
307
308 if let Some(parent) = path.parent() {
309 std::fs::create_dir_all(parent).map_err(|e| {
310 SubXError::config(format!("Failed to create config directory: {}", e))
311 })?;
312 }
313
314 std::fs::write(path, toml_content)
315 .map_err(|e| SubXError::config(format!("Failed to write config file: {}", e)))?;
316
317 Ok(())
318 }
319
320 fn get_config_file_path(&self) -> Result<PathBuf> {
321 if let Some(custom) = self.env_provider.get_var("SUBX_CONFIG_PATH") {
323 return Ok(PathBuf::from(custom));
324 }
325
326 let config_dir = dirs::config_dir()
327 .ok_or_else(|| SubXError::config("Unable to determine config directory"))?;
328 Ok(config_dir.join("subx").join("config.toml"))
329 }
330
331 fn get_config_value(&self, key: &str) -> Result<String> {
332 let config = self.get_config()?;
333 let parts: Vec<&str> = key.split('.').collect();
334 match parts.as_slice() {
335 ["ai", "provider"] => Ok(config.ai.provider.clone()),
336 ["ai", "model"] => Ok(config.ai.model.clone()),
337 ["ai", "api_key"] => Ok(config.ai.api_key.clone().unwrap_or_default()),
338 ["ai", "base_url"] => Ok(config.ai.base_url.clone()),
339 ["ai", "temperature"] => Ok(config.ai.temperature.to_string()),
340 ["formats", "default_output"] => Ok(config.formats.default_output.clone()),
341 ["formats", "default_encoding"] => Ok(config.formats.default_encoding.clone()),
342 ["formats", "preserve_styling"] => Ok(config.formats.preserve_styling.to_string()),
343 ["sync", "max_offset_seconds"] => Ok(config.sync.max_offset_seconds.to_string()),
344 ["sync", "correlation_threshold"] => Ok(config.sync.correlation_threshold.to_string()),
345 ["sync", "audio_sample_rate"] => Ok(config.sync.audio_sample_rate.to_string()),
346 ["general", "backup_enabled"] => Ok(config.general.backup_enabled.to_string()),
347 ["general", "max_concurrent_jobs"] => {
348 Ok(config.general.max_concurrent_jobs.to_string())
349 }
350 ["general", "log_level"] => Ok(config.general.log_level.clone()),
351 ["parallel", "max_workers"] => Ok(config.parallel.max_workers.to_string()),
352 ["parallel", "chunk_size"] => Ok(config.parallel.chunk_size.to_string()),
353 _ => Err(SubXError::config(format!(
354 "Unknown configuration key: {}",
355 key
356 ))),
357 }
358 }
359
360 fn reset_to_defaults(&self) -> Result<()> {
361 let default_config = Config::default();
362 let path = self.get_config_file_path()?;
363
364 let toml_content = toml::to_string_pretty(&default_config)
365 .map_err(|e| SubXError::config(format!("TOML serialization error: {}", e)))?;
366
367 if let Some(parent) = path.parent() {
368 std::fs::create_dir_all(parent).map_err(|e| {
369 SubXError::config(format!("Failed to create config directory: {}", e))
370 })?;
371 }
372
373 std::fs::write(&path, toml_content)
374 .map_err(|e| SubXError::config(format!("Failed to write config file: {}", e)))?;
375
376 self.reload()
377 }
378}
379
380impl Default for ProductionConfigService {
381 fn default() -> Self {
382 Self::new().expect("Failed to create default ProductionConfigService")
383 }
384}
385
386#[cfg(test)]
387mod tests {
388 use super::*;
389 use crate::config::TestConfigService;
390 use crate::config::TestEnvironmentProvider;
391 use std::sync::Arc;
392
393 #[test]
394 fn test_production_config_service_creation() {
395 let service = ProductionConfigService::new();
396 assert!(service.is_ok());
397 }
398
399 #[test]
400 fn test_production_config_service_with_custom_file() {
401 let service = ProductionConfigService::new()
402 .unwrap()
403 .with_custom_file(PathBuf::from("test.toml"));
404 assert!(service.is_ok());
405 }
406
407 #[test]
408 fn test_production_service_implements_config_service_trait() {
409 let service = ProductionConfigService::new().unwrap();
410
411 let config1 = service.get_config();
413 assert!(config1.is_ok());
414
415 let reload_result = service.reload();
416 assert!(reload_result.is_ok());
417
418 let config2 = service.get_config();
419 assert!(config2.is_ok());
420 }
421
422 #[test]
423 fn test_config_service_with_openai_api_key() {
424 let test_service = TestConfigService::with_ai_settings_and_key(
426 "openai",
427 "gpt-4.1-mini",
428 "sk-test-openai-key-123",
429 );
430
431 let config = test_service.get_config().unwrap();
432 assert_eq!(
433 config.ai.api_key,
434 Some("sk-test-openai-key-123".to_string())
435 );
436 assert_eq!(config.ai.provider, "openai");
437 assert_eq!(config.ai.model, "gpt-4.1-mini");
438 }
439
440 #[test]
441 fn test_config_service_with_custom_base_url() {
442 let mut config = Config::default();
444 config.ai.base_url = "https://custom.openai.endpoint".to_string();
445
446 let test_service = TestConfigService::new(config);
447 let loaded_config = test_service.get_config().unwrap();
448
449 assert_eq!(loaded_config.ai.base_url, "https://custom.openai.endpoint");
450 }
451
452 #[test]
453 fn test_config_service_with_both_openai_settings() {
454 let mut config = Config::default();
456 config.ai.api_key = Some("sk-test-api-key-combined".to_string());
457 config.ai.base_url = "https://api.custom-openai.com".to_string();
458
459 let test_service = TestConfigService::new(config);
460 let loaded_config = test_service.get_config().unwrap();
461
462 assert_eq!(
463 loaded_config.ai.api_key,
464 Some("sk-test-api-key-combined".to_string())
465 );
466 assert_eq!(loaded_config.ai.base_url, "https://api.custom-openai.com");
467 }
468
469 #[test]
470 fn test_config_service_provider_precedence() {
471 let test_service =
473 TestConfigService::with_ai_settings_and_key("openai", "gpt-4.1", "sk-explicit-key");
474
475 let config = test_service.get_config().unwrap();
476 assert_eq!(config.ai.api_key, Some("sk-explicit-key".to_string()));
477 assert_eq!(config.ai.provider, "openai");
478 assert_eq!(config.ai.model, "gpt-4.1");
479 }
480
481 #[test]
482 fn test_config_service_fallback_behavior() {
483 let test_service = TestConfigService::with_defaults();
485 let config = test_service.get_config().unwrap();
486
487 assert_eq!(config.ai.provider, "openai");
489 assert_eq!(config.ai.model, "gpt-4.1-mini");
490 assert_eq!(config.ai.base_url, "https://api.openai.com/v1");
491 assert_eq!(config.ai.api_key, None); }
493
494 #[test]
495 fn test_config_service_reload_functionality() {
496 let test_service = TestConfigService::with_defaults();
498
499 let config1 = test_service.get_config().unwrap();
501 assert_eq!(config1.ai.provider, "openai");
502
503 let reload_result = test_service.reload();
505 assert!(reload_result.is_ok());
506
507 let config2 = test_service.get_config().unwrap();
509 assert_eq!(config2.ai.provider, "openai");
510 }
511
512 #[test]
513 fn test_config_service_custom_base_url_override() {
514 let mut config = Config::default();
516 config.ai.base_url = "https://my-proxy.openai.com/v1".to_string();
517
518 let test_service = TestConfigService::new(config);
519 let loaded_config = test_service.get_config().unwrap();
520
521 assert_eq!(loaded_config.ai.base_url, "https://my-proxy.openai.com/v1");
522 }
523
524 #[test]
525 fn test_config_service_sync_settings() {
526 let test_service = TestConfigService::with_sync_settings(0.8, 45.0);
528 let config = test_service.get_config().unwrap();
529
530 assert_eq!(config.sync.correlation_threshold, 0.8);
531 assert_eq!(config.sync.max_offset_seconds, 45.0);
532 }
533
534 #[test]
535 fn test_config_service_parallel_settings() {
536 let test_service = TestConfigService::with_parallel_settings(8, 200);
538 let config = test_service.get_config().unwrap();
539
540 assert_eq!(config.general.max_concurrent_jobs, 8);
541 assert_eq!(config.parallel.task_queue_size, 200);
542 }
543
544 #[test]
545 fn test_config_service_direct_access() {
546 let mut test_service = TestConfigService::with_defaults();
548
549 assert_eq!(test_service.config().ai.provider, "openai");
551
552 test_service.config_mut().ai.provider = "modified".to_string();
554 assert_eq!(test_service.config().ai.provider, "modified");
555
556 let config = test_service.get_config().unwrap();
558 assert_eq!(config.ai.provider, "modified");
559 }
560
561 #[test]
562 fn test_production_config_service_openai_api_key_loading() {
563 let mut env_provider = TestEnvironmentProvider::new();
565 env_provider.set_var("OPENAI_API_KEY", "sk-test-openai-key-env");
566
567 let service = ProductionConfigService::with_env_provider(Arc::new(env_provider))
568 .expect("Failed to create config service");
569
570 let config = service.get_config().expect("Failed to get config");
571
572 assert_eq!(
573 config.ai.api_key,
574 Some("sk-test-openai-key-env".to_string())
575 );
576 }
577
578 #[test]
579 fn test_production_config_service_openai_base_url_loading() {
580 let mut env_provider = TestEnvironmentProvider::new();
582 env_provider.set_var("OPENAI_BASE_URL", "https://test.openai.com/v1");
583
584 let service = ProductionConfigService::with_env_provider(Arc::new(env_provider))
585 .expect("Failed to create config service");
586
587 let config = service.get_config().expect("Failed to get config");
588
589 assert_eq!(config.ai.base_url, "https://test.openai.com/v1");
590 }
591
592 #[test]
593 fn test_production_config_service_both_openai_env_vars() {
594 let mut env_provider = TestEnvironmentProvider::new();
596 env_provider.set_var("OPENAI_API_KEY", "sk-test-key-both");
597 env_provider.set_var("OPENAI_BASE_URL", "https://both.openai.com/v1");
598
599 let service = ProductionConfigService::with_env_provider(Arc::new(env_provider))
600 .expect("Failed to create config service");
601
602 let config = service.get_config().expect("Failed to get config");
603
604 assert_eq!(config.ai.api_key, Some("sk-test-key-both".to_string()));
605 assert_eq!(config.ai.base_url, "https://both.openai.com/v1");
606 }
607
608 #[test]
609 fn test_production_config_service_no_openai_env_vars() {
610 let env_provider = TestEnvironmentProvider::new(); let service = ProductionConfigService::with_env_provider(Arc::new(env_provider))
614 .expect("Failed to create config service");
615
616 let config = service.get_config().expect("Failed to get config");
617
618 assert_eq!(config.ai.api_key, None);
620 assert_eq!(config.ai.base_url, "https://api.openai.com/v1"); }
622
623 #[test]
624 fn test_production_config_service_api_key_priority() {
625 let mut env_provider = TestEnvironmentProvider::new();
627 env_provider.set_var("OPENAI_API_KEY", "sk-env-key");
628 env_provider.set_var("SUBX_AI_APIKEY", "sk-config-key");
630
631 let service = ProductionConfigService::with_env_provider(Arc::new(env_provider))
632 .expect("Failed to create config service");
633
634 let config = service.get_config().expect("Failed to get config");
635
636 assert!(config.ai.api_key.is_some());
639 }
640}