1use crate::services::ai::openai::OpenAIClient;
8use crate::services::ai::openrouter::OpenRouterClient;
9use crate::services::vad::{LocalVadDetector, VadAudioProcessor, VadSyncDetector};
10use crate::{
11 Result,
12 config::{Config, ConfigService},
13 core::{file_manager::FileManager, matcher::engine::MatchEngine},
14 error::SubXError,
15 services::ai::AIProvider,
16};
17
18pub struct ComponentFactory {
43 config: Config,
44}
45
46impl ComponentFactory {
47 pub fn new(config_service: &dyn ConfigService) -> Result<Self> {
57 let config = config_service.get_config()?;
58 Ok(Self { config })
59 }
60
61 pub fn create_match_engine(&self) -> Result<MatchEngine> {
70 let ai_provider = self.create_ai_provider()?;
71 let match_config = crate::core::matcher::MatchConfig {
72 confidence_threshold: 0.8, max_sample_length: self.config.ai.max_sample_length,
74 enable_content_analysis: true,
75 backup_enabled: self.config.general.backup_enabled,
76 relocation_mode: crate::core::matcher::engine::FileRelocationMode::None,
77 conflict_resolution: crate::core::matcher::engine::ConflictResolution::AutoRename,
78 ai_model: self.config.ai.model.clone(),
79 max_subtitle_bytes: self.config.general.max_subtitle_bytes,
80 };
81 Ok(MatchEngine::new(ai_provider, match_config))
82 }
83
84 pub fn create_file_manager(&self) -> FileManager {
89 FileManager::new()
92 }
93
94 pub fn create_ai_provider(&self) -> Result<Box<dyn AIProvider>> {
104 create_ai_provider(&self.config.ai)
105 }
106
107 pub fn config(&self) -> &Config {
111 &self.config
112 }
113
114 pub fn create_vad_sync_detector(&self) -> Result<VadSyncDetector> {
122 VadSyncDetector::new(self.config.sync.vad.clone())
123 }
124
125 pub fn create_vad_detector(&self) -> Result<LocalVadDetector> {
133 LocalVadDetector::new(self.config.sync.vad.clone())
134 }
135
136 pub fn create_audio_processor(&self) -> Result<VadAudioProcessor> {
144 VadAudioProcessor::new()
145 }
146
147 pub fn create_translation_engine(&self) -> Result<crate::core::translation::TranslationEngine> {
155 let ai_provider: std::sync::Arc<dyn AIProvider> =
156 std::sync::Arc::from(self.create_ai_provider()?);
157 crate::core::translation::TranslationEngine::new(
158 ai_provider,
159 self.config.translation.batch_size,
160 )
161 }
162}
163
164fn validate_ai_config(ai_config: &crate::config::AIConfig) -> Result<()> {
178 let canonical = crate::config::field_validator::normalize_ai_provider(&ai_config.provider);
179 let is_local = canonical == "local";
180
181 if !is_local && ai_config.api_key.as_deref().unwrap_or("").trim().is_empty() {
186 return Err(SubXError::config(
187 "AI API key is required. Set ai.api_key in configuration or use environment variable."
188 .to_string(),
189 ));
190 }
191 if ai_config.model.trim().is_empty() {
192 return Err(SubXError::config(
193 "AI model is required. Set ai.model in configuration.".to_string(),
194 ));
195 }
196 if ai_config.temperature < 0.0 || ai_config.temperature > 2.0 {
197 return Err(SubXError::config(
198 "AI temperature must be between 0.0 and 2.0.".to_string(),
199 ));
200 }
201 if ai_config.max_tokens == 0 {
202 return Err(SubXError::config(
203 "AI max_tokens must be greater than 0.".to_string(),
204 ));
205 }
206 Ok(())
207}
208
209pub fn create_ai_provider(ai_config: &crate::config::AIConfig) -> Result<Box<dyn AIProvider>> {
214 let canonical = crate::config::field_validator::normalize_ai_provider(&ai_config.provider);
215 match canonical.as_str() {
216 "openai" => {
217 validate_ai_config(ai_config)?;
218 let client = OpenAIClient::from_config(ai_config)?;
219 Ok(Box::new(client))
220 }
221 "openrouter" => {
222 validate_ai_config(ai_config)?;
223 let client = OpenRouterClient::from_config(ai_config)?;
224 Ok(Box::new(client))
225 }
226 "azure-openai" => {
227 validate_ai_config(ai_config)?;
228 let client =
229 crate::services::ai::azure_openai::AzureOpenAIClient::from_config(ai_config)?;
230 Ok(Box::new(client))
231 }
232 "local" => {
233 validate_ai_config(ai_config)?;
234 let client = crate::services::ai::local::LocalLLMClient::from_config(ai_config)?;
235 Ok(Box::new(client))
236 }
237 other => Err(SubXError::config(format!(
238 "Unsupported AI provider: {}. Supported providers: openai, openrouter, anthropic, azure-openai, local",
239 other
240 ))),
241 }
242}
243
244#[cfg(test)]
245mod tests {
246 use super::*;
247 use crate::config::test_service::TestConfigService;
248
249 #[test]
250 fn test_component_factory_creation() {
251 let config_service = TestConfigService::default();
252 let factory = ComponentFactory::new(&config_service);
253 assert!(factory.is_ok());
254 }
255
256 #[test]
257 fn test_factory_creation() {
258 let config_service = TestConfigService::default();
259 let factory = ComponentFactory::new(&config_service);
260 assert!(factory.is_ok());
261 }
262
263 #[test]
264 fn test_create_file_manager() {
265 let config_service = TestConfigService::default();
266 let factory = ComponentFactory::new(&config_service).unwrap();
267
268 let _file_manager = factory.create_file_manager();
269 }
272
273 #[test]
274 fn test_unsupported_ai_provider() {
275 let mut config = crate::config::Config::default();
276 config.ai.provider = "unsupported".to_string();
277
278 let result: Result<Box<dyn AIProvider>> = create_ai_provider(&config.ai);
279 assert!(result.is_err());
280
281 match result {
282 Err(e) => {
283 let error_msg = e.to_string();
284 assert!(error_msg.contains("Unsupported AI provider"));
285 assert!(error_msg.contains("openai"), "missing openai: {error_msg}");
289 assert!(
290 error_msg.contains("openrouter"),
291 "missing openrouter: {error_msg}"
292 );
293 assert!(
294 error_msg.contains("azure-openai"),
295 "missing azure-openai: {error_msg}"
296 );
297 assert!(error_msg.contains("local"), "missing local: {error_msg}");
298 }
299 Ok(_) => panic!("Expected error for unsupported provider"),
300 }
301 }
302
303 #[test]
304 fn test_create_vad_sync_detector() {
305 let config_service = TestConfigService::default();
306 let factory = ComponentFactory::new(&config_service).unwrap();
307 let result = factory.create_vad_sync_detector();
308 assert!(result.is_ok());
309 }
310
311 #[test]
312 fn test_create_vad_detector() {
313 let config_service = TestConfigService::default();
314 let factory = ComponentFactory::new(&config_service).unwrap();
315 let result = factory.create_vad_detector();
316 assert!(result.is_ok());
317 }
318
319 #[test]
320 fn test_create_audio_processor() {
321 let config_service = TestConfigService::default();
322 let factory = ComponentFactory::new(&config_service).unwrap();
323 let result = factory.create_audio_processor();
324 assert!(result.is_ok());
325 }
326
327 #[test]
328 fn test_create_ai_provider_openai_success() {
329 let config_service = TestConfigService::default();
330 config_service.set_ai_settings_and_key("openai", "gpt-4.1-mini", "test-api-key");
331 let factory = ComponentFactory::new(&config_service).unwrap();
332 let result = factory.create_ai_provider();
333 assert!(result.is_ok());
334 }
335
336 #[test]
337 fn test_create_ai_provider_missing_api_key() {
338 let config_service = TestConfigService::default();
339 config_service.set_ai_settings_and_key("openai", "gpt-4.1-mini", "");
340 let factory = ComponentFactory::new(&config_service).unwrap();
341 let result = factory.create_ai_provider();
342 assert!(result.is_err());
343 let error_msg = result.err().unwrap().to_string();
344 assert!(error_msg.contains("API key is required"));
345 }
346
347 #[test]
348 fn test_create_ai_provider_unsupported_provider() {
349 let config_service = TestConfigService::default();
350 config_service.set_ai_settings_and_key("unsupported-provider", "model", "key");
351 let factory = ComponentFactory::new(&config_service).unwrap();
352 let result = factory.create_ai_provider();
353 assert!(result.is_err());
354 let error_msg = result.err().unwrap().to_string();
355 assert!(error_msg.contains("Unsupported AI provider"));
356 }
357
358 #[test]
359 fn test_create_ai_provider_with_custom_base_url() {
360 let config_service = TestConfigService::default();
361 config_service.set_ai_settings_and_key("openai", "gpt-4.1-mini", "test-api-key");
362 config_service.config_mut().ai.base_url = "https://custom-api.com/v1".to_string();
363 let factory = ComponentFactory::new(&config_service).unwrap();
364 let result = factory.create_ai_provider();
365 assert!(result.is_ok());
366 }
367
368 #[test]
369 fn test_create_ai_provider_openrouter_success() {
370 let config_service = TestConfigService::default();
371 config_service.set_ai_settings_and_key(
372 "openrouter",
373 "deepseek/deepseek-r1-0528:free",
374 "test-openrouter-key",
375 );
376 let factory = ComponentFactory::new(&config_service).unwrap();
377 let result = factory.create_ai_provider();
378 assert!(result.is_ok());
379 }
380
381 #[test]
382 fn test_create_ai_provider_azure_openai_success() {
383 let mut config = crate::config::Config::default();
384 config.ai.provider = "azure-openai".to_string();
385 config.ai.api_key = Some("azure-key-123".to_string());
386 config.ai.model = "dep123".to_string();
387 config.ai.api_version = Some("2025-04-01-preview".to_string());
388 config.ai.base_url = "https://example.openai.azure.com".to_string();
389 let result = create_ai_provider(&config.ai);
390 assert!(result.is_ok());
391 }
392
393 #[test]
394 fn test_create_ai_provider_local_success() {
395 use crate::config::builder::TestConfigBuilder;
401 let config_service = TestConfigBuilder::new()
402 .with_ai_provider("local")
403 .with_ai_model("llama3.1")
404 .with_ai_base_url("http://localhost:11434/v1")
405 .build_service();
406 let factory = ComponentFactory::new(&config_service).unwrap();
407 let result = factory.create_ai_provider();
408 assert!(
409 result.is_ok(),
410 "local provider must construct without api_key: {:?}",
411 result.err()
412 );
413
414 let alias_service = TestConfigBuilder::new()
417 .with_ai_provider("ollama")
418 .with_ai_model("llama3.1")
419 .with_ai_base_url("http://localhost:11434/v1")
420 .build_service();
421 let alias_factory = ComponentFactory::new(&alias_service).unwrap();
422 assert!(
423 alias_factory.create_ai_provider().is_ok(),
424 "`ollama` alias must reach the local arm"
425 );
426 }
427}