subx_cli/config/
service.rs

1//! Configuration service system for dependency injection and test isolation.
2//!
3//! This module provides a clean abstraction for configuration management
4//! that enables dependency injection and complete test isolation without
5//! requiring unsafe code or global state resets.
6
7use crate::config::{EnvironmentProvider, SystemEnvironmentProvider};
8use crate::{Result, config::Config, error::SubXError};
9use config::{Config as ConfigCrate, ConfigBuilder, Environment, File, builder::DefaultState};
10use log::debug;
11use std::path::PathBuf;
12use std::sync::{Arc, RwLock};
13
14/// Configuration service trait for dependency injection.
15///
16/// This trait abstracts configuration loading and reloading operations,
17/// allowing different implementations for production and testing environments.
18pub trait ConfigService: Send + Sync {
19    /// Get the current configuration.
20    ///
21    /// Returns a clone of the current configuration state. This method
22    /// may use internal caching for performance.
23    ///
24    /// # Errors
25    ///
26    /// Returns an error if configuration loading or validation fails.
27    fn get_config(&self) -> Result<Config>;
28
29    /// Reload configuration from sources.
30    ///
31    /// Forces a reload of configuration from all configured sources.
32    /// This is useful for dynamic configuration updates.
33    ///
34    /// # Errors
35    ///
36    /// Returns an error if configuration reloading fails.
37    fn reload(&self) -> Result<()>;
38}
39
40/// Production configuration service implementation.
41///
42/// This service loads configuration from multiple sources in order of priority:
43/// 1. Environment variables (highest priority)
44/// 2. User configuration file
45/// 3. Default configuration file (lowest priority)
46///
47/// Configuration is cached after first load for performance.
48pub struct ProductionConfigService {
49    config_builder: ConfigBuilder<DefaultState>,
50    cached_config: Arc<RwLock<Option<Config>>>,
51    env_provider: Arc<dyn EnvironmentProvider>,
52}
53
54impl ProductionConfigService {
55    /// Create a new production configuration service.
56    ///
57    /// # Errors
58    ///
59    /// Returns an error if the configuration builder cannot be initialized.
60    /// Creates a configuration service using the default environment variable provider (maintains compatibility with existing methods).
61    pub fn new() -> Result<Self> {
62        Self::with_env_provider(Arc::new(SystemEnvironmentProvider::new()))
63    }
64
65    /// Create a configuration service using the specified environment variable provider.
66    ///
67    /// # Arguments
68    /// * `env_provider` - Environment variable provider
69    pub fn with_env_provider(env_provider: Arc<dyn EnvironmentProvider>) -> Result<Self> {
70        let config_builder = ConfigCrate::builder()
71            .add_source(File::with_name("config/default").required(false))
72            .add_source(File::from(Self::user_config_path()).required(false))
73            .add_source(Environment::with_prefix("SUBX").separator("_"));
74
75        Ok(Self {
76            config_builder,
77            cached_config: Arc::new(RwLock::new(None)),
78            env_provider,
79        })
80    }
81
82    /// Create a configuration service with custom sources.
83    ///
84    /// This allows adding additional configuration sources for specific use cases.
85    ///
86    /// # Arguments
87    ///
88    /// * `sources` - Additional configuration sources to add
89    ///
90    /// # Errors
91    ///
92    /// Returns an error if the configuration builder cannot be updated.
93    pub fn with_custom_file(mut self, file_path: PathBuf) -> Result<Self> {
94        self.config_builder = self.config_builder.add_source(File::from(file_path));
95        Ok(self)
96    }
97
98    /// Get the user configuration file path.
99    ///
100    /// Returns the path to the user's configuration file, which is typically
101    /// located in the user's configuration directory.
102    fn user_config_path() -> PathBuf {
103        dirs::config_dir()
104            .unwrap_or_else(|| PathBuf::from("."))
105            .join("subx")
106            .join("config.toml")
107    }
108
109    /// Load and validate configuration from all sources.
110    ///
111    /// # Errors
112    ///
113    /// Returns an error if configuration loading or validation fails.
114    fn load_and_validate(&self) -> Result<Config> {
115        debug!("ProductionConfigService: Loading configuration from sources");
116
117        // Build configuration from all sources
118        let config_crate = self.config_builder.build_cloned().map_err(|e| {
119            debug!("ProductionConfigService: Config build failed: {}", e);
120            SubXError::config(format!("Failed to build configuration: {}", e))
121        })?;
122
123        // Start with default configuration
124        let mut app_config = Config::default();
125
126        // Try to deserialize from config crate, but fall back to defaults if needed
127        if let Ok(config) = config_crate.clone().try_deserialize::<Config>() {
128            app_config = config;
129            debug!("ProductionConfigService: Full configuration loaded successfully");
130        } else {
131            debug!("ProductionConfigService: Full deserialization failed, attempting partial load");
132
133            // Try to load partial configurations from environment
134            if let Ok(raw_map) = config_crate
135                .try_deserialize::<std::collections::HashMap<String, serde_json::Value>>()
136            {
137                // Extract AI configuration if available
138                if let Some(ai_section) = raw_map.get("ai") {
139                    if let Some(ai_obj) = ai_section.as_object() {
140                        // Extract individual AI fields that are available
141                        if let Some(api_key) = ai_obj.get("apikey").and_then(|v| v.as_str()) {
142                            app_config.ai.api_key = Some(api_key.to_string());
143                            debug!(
144                                "ProductionConfigService: AI API key loaded from SUBX_AI_APIKEY"
145                            );
146                        }
147                        if let Some(provider) = ai_obj.get("provider").and_then(|v| v.as_str()) {
148                            app_config.ai.provider = provider.to_string();
149                            debug!(
150                                "ProductionConfigService: AI provider loaded from SUBX_AI_PROVIDER"
151                            );
152                        }
153                        if let Some(model) = ai_obj.get("model").and_then(|v| v.as_str()) {
154                            app_config.ai.model = model.to_string();
155                            debug!("ProductionConfigService: AI model loaded from SUBX_AI_MODEL");
156                        }
157                        if let Some(base_url) = ai_obj.get("base_url").and_then(|v| v.as_str()) {
158                            app_config.ai.base_url = base_url.to_string();
159                            debug!(
160                                "ProductionConfigService: AI base URL loaded from SUBX_AI_BASE_URL"
161                            );
162                        }
163                    }
164                }
165            }
166        }
167
168        // Special handling for OPENAI_API_KEY environment variable
169        // This provides backward compatibility with direct OPENAI_API_KEY usage
170        if app_config.ai.api_key.is_none() {
171            if let Some(api_key) = self.env_provider.get_var("OPENAI_API_KEY") {
172                debug!("ProductionConfigService: Found OPENAI_API_KEY environment variable");
173                app_config.ai.api_key = Some(api_key);
174            }
175        }
176
177        // Special handling for OPENAI_BASE_URL environment variable
178        if let Some(base_url) = self.env_provider.get_var("OPENAI_BASE_URL") {
179            debug!("ProductionConfigService: Found OPENAI_BASE_URL environment variable");
180            app_config.ai.base_url = base_url;
181        }
182
183        // Validate the configuration
184        crate::config::validator::validate_config(&app_config).map_err(|e| {
185            debug!("ProductionConfigService: Config validation failed: {}", e);
186            SubXError::config(format!("Configuration validation failed: {}", e))
187        })?;
188
189        debug!("ProductionConfigService: Configuration loaded and validated successfully");
190        Ok(app_config)
191    }
192}
193
194impl ConfigService for ProductionConfigService {
195    fn get_config(&self) -> Result<Config> {
196        // Check cache first
197        {
198            let cache = self.cached_config.read().unwrap();
199            if let Some(config) = cache.as_ref() {
200                debug!("ProductionConfigService: Returning cached configuration");
201                return Ok(config.clone());
202            }
203        }
204
205        // Load configuration
206        let app_config = self.load_and_validate()?;
207
208        // Update cache
209        {
210            let mut cache = self.cached_config.write().unwrap();
211            *cache = Some(app_config.clone());
212        }
213
214        Ok(app_config)
215    }
216
217    fn reload(&self) -> Result<()> {
218        debug!("ProductionConfigService: Reloading configuration");
219
220        // Clear cache to force reload
221        {
222            let mut cache = self.cached_config.write().unwrap();
223            *cache = None;
224        }
225
226        // Trigger reload by calling get_config
227        self.get_config()?;
228
229        debug!("ProductionConfigService: Configuration reloaded successfully");
230        Ok(())
231    }
232}
233
234impl Default for ProductionConfigService {
235    fn default() -> Self {
236        Self::new().expect("Failed to create default ProductionConfigService")
237    }
238}
239
240#[cfg(test)]
241mod tests {
242    use super::*;
243    use crate::config::TestConfigService;
244    use crate::config::TestEnvironmentProvider;
245    use std::sync::Arc;
246
247    #[test]
248    fn test_production_config_service_creation() {
249        let service = ProductionConfigService::new();
250        assert!(service.is_ok());
251    }
252
253    #[test]
254    fn test_production_config_service_with_custom_file() {
255        let service = ProductionConfigService::new()
256            .unwrap()
257            .with_custom_file(PathBuf::from("test.toml"));
258        assert!(service.is_ok());
259    }
260
261    #[test]
262    fn test_production_service_implements_config_service_trait() {
263        let service = ProductionConfigService::new().unwrap();
264
265        // Test trait methods
266        let config1 = service.get_config();
267        assert!(config1.is_ok());
268
269        let reload_result = service.reload();
270        assert!(reload_result.is_ok());
271
272        let config2 = service.get_config();
273        assert!(config2.is_ok());
274    }
275
276    #[test]
277    fn test_config_service_with_openai_api_key() {
278        // Test configuration with OpenAI API key using TestConfigService
279        let test_service = TestConfigService::with_ai_settings_and_key(
280            "openai",
281            "gpt-4o-mini",
282            "sk-test-openai-key-123",
283        );
284
285        let config = test_service.get_config().unwrap();
286        assert_eq!(
287            config.ai.api_key,
288            Some("sk-test-openai-key-123".to_string())
289        );
290        assert_eq!(config.ai.provider, "openai");
291        assert_eq!(config.ai.model, "gpt-4o-mini");
292    }
293
294    #[test]
295    fn test_config_service_with_custom_base_url() {
296        // Test configuration with custom base URL
297        let mut config = Config::default();
298        config.ai.base_url = "https://custom.openai.endpoint".to_string();
299
300        let test_service = TestConfigService::new(config);
301        let loaded_config = test_service.get_config().unwrap();
302
303        assert_eq!(loaded_config.ai.base_url, "https://custom.openai.endpoint");
304    }
305
306    #[test]
307    fn test_config_service_with_both_openai_settings() {
308        // Test configuration with both API key and base URL
309        let mut config = Config::default();
310        config.ai.api_key = Some("sk-test-api-key-combined".to_string());
311        config.ai.base_url = "https://api.custom-openai.com".to_string();
312
313        let test_service = TestConfigService::new(config);
314        let loaded_config = test_service.get_config().unwrap();
315
316        assert_eq!(
317            loaded_config.ai.api_key,
318            Some("sk-test-api-key-combined".to_string())
319        );
320        assert_eq!(loaded_config.ai.base_url, "https://api.custom-openai.com");
321    }
322
323    #[test]
324    fn test_config_service_provider_precedence() {
325        // Test that manually configured values take precedence
326        let test_service =
327            TestConfigService::with_ai_settings_and_key("openai", "gpt-4", "sk-explicit-key");
328
329        let config = test_service.get_config().unwrap();
330        assert_eq!(config.ai.api_key, Some("sk-explicit-key".to_string()));
331        assert_eq!(config.ai.provider, "openai");
332        assert_eq!(config.ai.model, "gpt-4");
333    }
334
335    #[test]
336    fn test_config_service_fallback_behavior() {
337        // Test fallback to default values when no specific configuration provided
338        let test_service = TestConfigService::with_defaults();
339        let config = test_service.get_config().unwrap();
340
341        // Should use default values
342        assert_eq!(config.ai.provider, "openai");
343        assert_eq!(config.ai.model, "gpt-4o-mini");
344        assert_eq!(config.ai.base_url, "https://api.openai.com/v1");
345        assert_eq!(config.ai.api_key, None); // No API key by default
346    }
347
348    #[test]
349    fn test_config_service_reload_functionality() {
350        // Test configuration reload capability
351        let test_service = TestConfigService::with_defaults();
352
353        // First load
354        let config1 = test_service.get_config().unwrap();
355        assert_eq!(config1.ai.provider, "openai");
356
357        // Reload should always succeed for test service
358        let reload_result = test_service.reload();
359        assert!(reload_result.is_ok());
360
361        // Second load should still work
362        let config2 = test_service.get_config().unwrap();
363        assert_eq!(config2.ai.provider, "openai");
364    }
365
366    #[test]
367    fn test_config_service_custom_base_url_override() {
368        // Test that custom base URL properly overrides default
369        let mut config = Config::default();
370        config.ai.base_url = "https://my-proxy.openai.com/v1".to_string();
371
372        let test_service = TestConfigService::new(config);
373        let loaded_config = test_service.get_config().unwrap();
374
375        assert_eq!(loaded_config.ai.base_url, "https://my-proxy.openai.com/v1");
376    }
377
378    #[test]
379    fn test_config_service_sync_settings() {
380        // Test sync configuration settings
381        let test_service = TestConfigService::with_sync_settings(0.8, 45.0);
382        let config = test_service.get_config().unwrap();
383
384        assert_eq!(config.sync.correlation_threshold, 0.8);
385        assert_eq!(config.sync.max_offset_seconds, 45.0);
386    }
387
388    #[test]
389    fn test_config_service_parallel_settings() {
390        // Test parallel processing configuration
391        let test_service = TestConfigService::with_parallel_settings(8, 200);
392        let config = test_service.get_config().unwrap();
393
394        assert_eq!(config.general.max_concurrent_jobs, 8);
395        assert_eq!(config.parallel.task_queue_size, 200);
396    }
397
398    #[test]
399    fn test_config_service_direct_access() {
400        // Test direct configuration access and mutation
401        let mut test_service = TestConfigService::with_defaults();
402
403        // Test direct read access
404        assert_eq!(test_service.config().ai.provider, "openai");
405
406        // Test mutable access
407        test_service.config_mut().ai.provider = "modified".to_string();
408        assert_eq!(test_service.config().ai.provider, "modified");
409
410        // Test that get_config reflects the changes
411        let config = test_service.get_config().unwrap();
412        assert_eq!(config.ai.provider, "modified");
413    }
414
415    #[test]
416    fn test_production_config_service_openai_api_key_loading() {
417        // Test OPENAI_API_KEY environment variable loading
418        let mut env_provider = TestEnvironmentProvider::new();
419        env_provider.set_var("OPENAI_API_KEY", "sk-test-openai-key-env");
420
421        let service = ProductionConfigService::with_env_provider(Arc::new(env_provider))
422            .expect("Failed to create config service");
423
424        let config = service.get_config().expect("Failed to get config");
425
426        assert_eq!(
427            config.ai.api_key,
428            Some("sk-test-openai-key-env".to_string())
429        );
430    }
431
432    #[test]
433    fn test_production_config_service_openai_base_url_loading() {
434        // Test OPENAI_BASE_URL environment variable loading
435        let mut env_provider = TestEnvironmentProvider::new();
436        env_provider.set_var("OPENAI_BASE_URL", "https://test.openai.com/v1");
437
438        let service = ProductionConfigService::with_env_provider(Arc::new(env_provider))
439            .expect("Failed to create config service");
440
441        let config = service.get_config().expect("Failed to get config");
442
443        assert_eq!(config.ai.base_url, "https://test.openai.com/v1");
444    }
445
446    #[test]
447    fn test_production_config_service_both_openai_env_vars() {
448        // Test setting both OPENAI environment variables simultaneously
449        let mut env_provider = TestEnvironmentProvider::new();
450        env_provider.set_var("OPENAI_API_KEY", "sk-test-key-both");
451        env_provider.set_var("OPENAI_BASE_URL", "https://both.openai.com/v1");
452
453        let service = ProductionConfigService::with_env_provider(Arc::new(env_provider))
454            .expect("Failed to create config service");
455
456        let config = service.get_config().expect("Failed to get config");
457
458        assert_eq!(config.ai.api_key, Some("sk-test-key-both".to_string()));
459        assert_eq!(config.ai.base_url, "https://both.openai.com/v1");
460    }
461
462    #[test]
463    fn test_production_config_service_no_openai_env_vars() {
464        // Test the case with no OPENAI environment variables
465        let env_provider = TestEnvironmentProvider::new(); // Empty provider
466
467        let service = ProductionConfigService::with_env_provider(Arc::new(env_provider))
468            .expect("Failed to create config service");
469
470        let config = service.get_config().expect("Failed to get config");
471
472        // Should use default values
473        assert_eq!(config.ai.api_key, None);
474        assert_eq!(config.ai.base_url, "https://api.openai.com/v1"); // Default value
475    }
476
477    #[test]
478    fn test_production_config_service_api_key_priority() {
479        // Test API key priority: existing API key should not be overwritten
480        let mut env_provider = TestEnvironmentProvider::new();
481        env_provider.set_var("OPENAI_API_KEY", "sk-env-key");
482        // Simulate API key loaded from other sources (e.g., configuration file)
483        env_provider.set_var("SUBX_AI_APIKEY", "sk-config-key");
484
485        let service = ProductionConfigService::with_env_provider(Arc::new(env_provider))
486            .expect("Failed to create config service");
487
488        let config = service.get_config().expect("Failed to get config");
489
490        // SUBX_AI_APIKEY should have higher priority (since it's processed first)
491        // This test only verifies priority order, should at least have a value
492        assert!(config.ai.api_key.is_some());
493    }
494}