Skip to main content

subx_cli/core/
factory.rs

1//! Component factory for creating configured instances of core components.
2//!
3//! This module provides a centralized factory for creating instances of core
4//! components with proper configuration injection, eliminating the need for
5//! global configuration access within individual components.
6
7use crate::services::ai::openai::OpenAIClient;
8use crate::services::ai::openrouter::OpenRouterClient;
9use crate::services::vad::{LocalVadDetector, VadAudioProcessor, VadSyncDetector};
10use crate::{
11    Result,
12    config::{Config, ConfigService},
13    core::{file_manager::FileManager, matcher::engine::MatchEngine},
14    error::SubXError,
15    services::ai::AIProvider,
16};
17
18/// Component factory for creating configured instances.
19///
20/// This factory provides a centralized way to create core components
21/// with proper configuration injection, ensuring consistent component
22/// initialization across the application.
23///
24/// # Examples
25///
26/// ```rust
27/// use subx_cli::core::ComponentFactory;
28/// use subx_cli::config::ProductionConfigService;
29/// use std::sync::Arc;
30///
31/// # async fn example() -> subx_cli::Result<()> {
32/// let config_service = Arc::new(ProductionConfigService::new()?);
33/// let factory = ComponentFactory::new(config_service.as_ref())?;
34///
35/// // Create components with proper configuration
36/// let match_engine = factory.create_match_engine()?;
37/// let file_manager = factory.create_file_manager();
38/// let ai_provider = factory.create_ai_provider()?;
39/// # Ok(())
40/// # }
41/// ```
42pub struct ComponentFactory {
43    config: Config,
44}
45
46impl ComponentFactory {
47    /// Create a new component factory with the given configuration service.
48    ///
49    /// # Arguments
50    ///
51    /// * `config_service` - Configuration service to load configuration from
52    ///
53    /// # Errors
54    ///
55    /// Returns an error if configuration loading fails.
56    pub fn new(config_service: &dyn ConfigService) -> Result<Self> {
57        let config = config_service.get_config()?;
58        Ok(Self { config })
59    }
60
61    /// Create a match engine with AI configuration.
62    ///
63    /// Returns a properly configured MatchEngine instance using
64    /// the AI configuration section.
65    ///
66    /// # Errors
67    ///
68    /// Returns an error if AI provider creation fails.
69    pub fn create_match_engine(&self) -> Result<MatchEngine> {
70        let ai_provider = self.create_ai_provider()?;
71        let match_config = crate::core::matcher::MatchConfig {
72            confidence_threshold: 0.8, // Default value, can be configurable
73            max_sample_length: self.config.ai.max_sample_length,
74            enable_content_analysis: true,
75            backup_enabled: self.config.general.backup_enabled,
76            relocation_mode: crate::core::matcher::engine::FileRelocationMode::None,
77            conflict_resolution: crate::core::matcher::engine::ConflictResolution::AutoRename,
78            ai_model: self.config.ai.model.clone(),
79            max_subtitle_bytes: self.config.general.max_subtitle_bytes,
80        };
81        Ok(MatchEngine::new(ai_provider, match_config))
82    }
83
84    /// Create a file manager with general configuration.
85    ///
86    /// Returns a properly configured FileManager instance using
87    /// the general configuration section.
88    pub fn create_file_manager(&self) -> FileManager {
89        // For now, FileManager doesn't take configuration in its constructor
90        // This will be updated when FileManager is refactored to accept config
91        FileManager::new()
92    }
93
94    /// Create an AI provider with AI configuration.
95    ///
96    /// Returns a properly configured AI provider instance based on
97    /// the provider type specified in the AI configuration.
98    ///
99    /// # Errors
100    ///
101    /// Returns an error if the provider type is unsupported or
102    /// provider creation fails.
103    pub fn create_ai_provider(&self) -> Result<Box<dyn AIProvider>> {
104        create_ai_provider(&self.config.ai)
105    }
106
107    /// Get a reference to the current configuration.
108    ///
109    /// Returns a reference to the configuration used by this factory.
110    pub fn config(&self) -> &Config {
111        &self.config
112    }
113
114    /// Create a VAD sync detector with VAD configuration.
115    ///
116    /// Returns a properly configured VadSyncDetector instance using the VAD settings.
117    ///
118    /// # Errors
119    ///
120    /// Returns an error if VAD sync detector creation fails.
121    pub fn create_vad_sync_detector(&self) -> Result<VadSyncDetector> {
122        VadSyncDetector::new(self.config.sync.vad.clone())
123    }
124
125    /// Create a local VAD detector for audio processing.
126    ///
127    /// Returns a properly configured LocalVadDetector instance.
128    ///
129    /// # Errors
130    ///
131    /// Returns an error if local VAD detector initialization fails.
132    pub fn create_vad_detector(&self) -> Result<LocalVadDetector> {
133        LocalVadDetector::new(self.config.sync.vad.clone())
134    }
135
136    /// Create an audio processor for VAD operations.
137    ///
138    /// Returns a properly configured VadAudioProcessor instance.
139    ///
140    /// # Errors
141    ///
142    /// Returns an error if audio processor initialization fails.
143    pub fn create_audio_processor(&self) -> Result<VadAudioProcessor> {
144        VadAudioProcessor::new()
145    }
146
147    /// Create a translation engine using the configured AI provider and
148    /// translation settings.
149    ///
150    /// # Errors
151    ///
152    /// Returns an error when AI provider creation fails or the configured
153    /// translation batch size is invalid.
154    pub fn create_translation_engine(&self) -> Result<crate::core::translation::TranslationEngine> {
155        let ai_provider: std::sync::Arc<dyn AIProvider> =
156            std::sync::Arc::from(self.create_ai_provider()?);
157        crate::core::translation::TranslationEngine::new(
158            ai_provider,
159            self.config.translation.batch_size,
160        )
161    }
162}
163
164/// Create an AI provider from AI configuration.
165///
166/// This function creates the appropriate AI provider based on the
167/// provider type specified in the configuration.
168///
169/// # Arguments
170///
171/// * `ai_config` - AI configuration containing provider settings
172///
173/// # Errors
174///
175/// Returns an error if the provider type is unsupported or creation fails.
176/// Validate AI configuration parameters.
177fn validate_ai_config(ai_config: &crate::config::AIConfig) -> Result<()> {
178    let canonical = crate::config::field_validator::normalize_ai_provider(&ai_config.provider);
179    let is_local = canonical == "local";
180
181    // The `local` provider treats `api_key` as optional because most local
182    // OpenAI-compatible runtimes (Ollama, LM Studio, llama.cpp `llama-server`)
183    // accept unauthenticated requests. All hosted providers still require
184    // an api_key.
185    if !is_local && ai_config.api_key.as_deref().unwrap_or("").trim().is_empty() {
186        return Err(SubXError::config(
187            "AI API key is required. Set ai.api_key in configuration or use environment variable."
188                .to_string(),
189        ));
190    }
191    if ai_config.model.trim().is_empty() {
192        return Err(SubXError::config(
193            "AI model is required. Set ai.model in configuration.".to_string(),
194        ));
195    }
196    if ai_config.temperature < 0.0 || ai_config.temperature > 2.0 {
197        return Err(SubXError::config(
198            "AI temperature must be between 0.0 and 2.0.".to_string(),
199        ));
200    }
201    if ai_config.max_tokens == 0 {
202        return Err(SubXError::config(
203            "AI max_tokens must be greater than 0.".to_string(),
204        ));
205    }
206    Ok(())
207}
208
209/// Create an AI provider from AI configuration.
210///
211/// This function creates the appropriate AI provider based on the
212/// provider type specified in the configuration.
213pub fn create_ai_provider(ai_config: &crate::config::AIConfig) -> Result<Box<dyn AIProvider>> {
214    let canonical = crate::config::field_validator::normalize_ai_provider(&ai_config.provider);
215    match canonical.as_str() {
216        "openai" => {
217            validate_ai_config(ai_config)?;
218            let client = OpenAIClient::from_config(ai_config)?;
219            Ok(Box::new(client))
220        }
221        "openrouter" => {
222            validate_ai_config(ai_config)?;
223            let client = OpenRouterClient::from_config(ai_config)?;
224            Ok(Box::new(client))
225        }
226        "azure-openai" => {
227            validate_ai_config(ai_config)?;
228            let client =
229                crate::services::ai::azure_openai::AzureOpenAIClient::from_config(ai_config)?;
230            Ok(Box::new(client))
231        }
232        "local" => {
233            validate_ai_config(ai_config)?;
234            let client = crate::services::ai::local::LocalLLMClient::from_config(ai_config)?;
235            Ok(Box::new(client))
236        }
237        other => Err(SubXError::config(format!(
238            "Unsupported AI provider: {}. Supported providers: openai, openrouter, anthropic, azure-openai, local",
239            other
240        ))),
241    }
242}
243
244#[cfg(test)]
245mod tests {
246    use super::*;
247    use crate::config::test_service::TestConfigService;
248
249    #[test]
250    fn test_component_factory_creation() {
251        let config_service = TestConfigService::default();
252        let factory = ComponentFactory::new(&config_service);
253        assert!(factory.is_ok());
254    }
255
256    #[test]
257    fn test_factory_creation() {
258        let config_service = TestConfigService::default();
259        let factory = ComponentFactory::new(&config_service);
260        assert!(factory.is_ok());
261    }
262
263    #[test]
264    fn test_create_file_manager() {
265        let config_service = TestConfigService::default();
266        let factory = ComponentFactory::new(&config_service).unwrap();
267
268        let _file_manager = factory.create_file_manager();
269        // Basic validation that file manager was created
270        // FileManager doesn't expose config yet, so just verify creation succeeds
271    }
272
273    #[test]
274    fn test_unsupported_ai_provider() {
275        let mut config = crate::config::Config::default();
276        config.ai.provider = "unsupported".to_string();
277
278        let result: Result<Box<dyn AIProvider>> = create_ai_provider(&config.ai);
279        assert!(result.is_err());
280
281        match result {
282            Err(e) => {
283                let error_msg = e.to_string();
284                assert!(error_msg.contains("Unsupported AI provider"));
285                // The error message must enumerate all supported providers,
286                // including the new `local` provider added by the
287                // add-local-llm-provider change.
288                assert!(error_msg.contains("openai"), "missing openai: {error_msg}");
289                assert!(
290                    error_msg.contains("openrouter"),
291                    "missing openrouter: {error_msg}"
292                );
293                assert!(
294                    error_msg.contains("azure-openai"),
295                    "missing azure-openai: {error_msg}"
296                );
297                assert!(error_msg.contains("local"), "missing local: {error_msg}");
298            }
299            Ok(_) => panic!("Expected error for unsupported provider"),
300        }
301    }
302
303    #[test]
304    fn test_create_vad_sync_detector() {
305        let config_service = TestConfigService::default();
306        let factory = ComponentFactory::new(&config_service).unwrap();
307        let result = factory.create_vad_sync_detector();
308        assert!(result.is_ok());
309    }
310
311    #[test]
312    fn test_create_vad_detector() {
313        let config_service = TestConfigService::default();
314        let factory = ComponentFactory::new(&config_service).unwrap();
315        let result = factory.create_vad_detector();
316        assert!(result.is_ok());
317    }
318
319    #[test]
320    fn test_create_audio_processor() {
321        let config_service = TestConfigService::default();
322        let factory = ComponentFactory::new(&config_service).unwrap();
323        let result = factory.create_audio_processor();
324        assert!(result.is_ok());
325    }
326
327    #[test]
328    fn test_create_ai_provider_openai_success() {
329        let config_service = TestConfigService::default();
330        config_service.set_ai_settings_and_key("openai", "gpt-4.1-mini", "test-api-key");
331        let factory = ComponentFactory::new(&config_service).unwrap();
332        let result = factory.create_ai_provider();
333        assert!(result.is_ok());
334    }
335
336    #[test]
337    fn test_create_ai_provider_missing_api_key() {
338        let config_service = TestConfigService::default();
339        config_service.set_ai_settings_and_key("openai", "gpt-4.1-mini", "");
340        let factory = ComponentFactory::new(&config_service).unwrap();
341        let result = factory.create_ai_provider();
342        assert!(result.is_err());
343        let error_msg = result.err().unwrap().to_string();
344        assert!(error_msg.contains("API key is required"));
345    }
346
347    #[test]
348    fn test_create_ai_provider_unsupported_provider() {
349        let config_service = TestConfigService::default();
350        config_service.set_ai_settings_and_key("unsupported-provider", "model", "key");
351        let factory = ComponentFactory::new(&config_service).unwrap();
352        let result = factory.create_ai_provider();
353        assert!(result.is_err());
354        let error_msg = result.err().unwrap().to_string();
355        assert!(error_msg.contains("Unsupported AI provider"));
356    }
357
358    #[test]
359    fn test_create_ai_provider_with_custom_base_url() {
360        let config_service = TestConfigService::default();
361        config_service.set_ai_settings_and_key("openai", "gpt-4.1-mini", "test-api-key");
362        config_service.config_mut().ai.base_url = "https://custom-api.com/v1".to_string();
363        let factory = ComponentFactory::new(&config_service).unwrap();
364        let result = factory.create_ai_provider();
365        assert!(result.is_ok());
366    }
367
368    #[test]
369    fn test_create_ai_provider_openrouter_success() {
370        let config_service = TestConfigService::default();
371        config_service.set_ai_settings_and_key(
372            "openrouter",
373            "deepseek/deepseek-r1-0528:free",
374            "test-openrouter-key",
375        );
376        let factory = ComponentFactory::new(&config_service).unwrap();
377        let result = factory.create_ai_provider();
378        assert!(result.is_ok());
379    }
380
381    #[test]
382    fn test_create_ai_provider_azure_openai_success() {
383        let mut config = crate::config::Config::default();
384        config.ai.provider = "azure-openai".to_string();
385        config.ai.api_key = Some("azure-key-123".to_string());
386        config.ai.model = "dep123".to_string();
387        config.ai.api_version = Some("2025-04-01-preview".to_string());
388        config.ai.base_url = "https://example.openai.azure.com".to_string();
389        let result = create_ai_provider(&config.ai);
390        assert!(result.is_ok());
391    }
392
393    #[test]
394    fn test_create_ai_provider_local_success() {
395        // Mirror `test_create_ai_provider_openai_success`: build a
396        // `TestConfigService` via `TestConfigBuilder` configured for the
397        // local provider, with NO api_key (intentionally absent) and an
398        // OpenAI-compatible Ollama-style base URL. The factory must accept
399        // this and return a working AI provider.
400        use crate::config::builder::TestConfigBuilder;
401        let config_service = TestConfigBuilder::new()
402            .with_ai_provider("local")
403            .with_ai_model("llama3.1")
404            .with_ai_base_url("http://localhost:11434/v1")
405            .build_service();
406        let factory = ComponentFactory::new(&config_service).unwrap();
407        let result = factory.create_ai_provider();
408        assert!(
409            result.is_ok(),
410            "local provider must construct without api_key: {:?}",
411            result.err()
412        );
413
414        // The `ollama` alias normalizes to `local` and must take the same
415        // factory path.
416        let alias_service = TestConfigBuilder::new()
417            .with_ai_provider("ollama")
418            .with_ai_model("llama3.1")
419            .with_ai_base_url("http://localhost:11434/v1")
420            .build_service();
421        let alias_factory = ComponentFactory::new(&alias_service).unwrap();
422        assert!(
423            alias_factory.create_ai_provider().is_ok(),
424            "`ollama` alias must reach the local arm"
425        );
426    }
427}