use crate::services::ai::openai::OpenAIClient;
use crate::services::ai::openrouter::OpenRouterClient;
use crate::services::vad::{LocalVadDetector, VadAudioProcessor, VadSyncDetector};
use crate::{
Result,
config::{Config, ConfigService},
core::{file_manager::FileManager, matcher::engine::MatchEngine},
error::SubXError,
services::ai::AIProvider,
};
pub struct ComponentFactory {
config: Config,
}
impl ComponentFactory {
pub fn new(config_service: &dyn ConfigService) -> Result<Self> {
let config = config_service.get_config()?;
Ok(Self { config })
}
pub fn create_match_engine(&self) -> Result<MatchEngine> {
let ai_provider = self.create_ai_provider()?;
let match_config = crate::core::matcher::MatchConfig {
confidence_threshold: 0.8, max_sample_length: self.config.ai.max_sample_length,
enable_content_analysis: true,
backup_enabled: self.config.general.backup_enabled,
relocation_mode: crate::core::matcher::engine::FileRelocationMode::None,
conflict_resolution: crate::core::matcher::engine::ConflictResolution::AutoRename,
ai_model: self.config.ai.model.clone(),
max_subtitle_bytes: self.config.general.max_subtitle_bytes,
};
Ok(MatchEngine::new(ai_provider, match_config))
}
pub fn create_file_manager(&self) -> FileManager {
FileManager::new()
}
pub fn create_ai_provider(&self) -> Result<Box<dyn AIProvider>> {
create_ai_provider(&self.config.ai)
}
pub fn config(&self) -> &Config {
&self.config
}
pub fn create_vad_sync_detector(&self) -> Result<VadSyncDetector> {
VadSyncDetector::new(self.config.sync.vad.clone())
}
pub fn create_vad_detector(&self) -> Result<LocalVadDetector> {
LocalVadDetector::new(self.config.sync.vad.clone())
}
pub fn create_audio_processor(&self) -> Result<VadAudioProcessor> {
VadAudioProcessor::new()
}
pub fn create_translation_engine(&self) -> Result<crate::core::translation::TranslationEngine> {
let ai_provider: std::sync::Arc<dyn AIProvider> =
std::sync::Arc::from(self.create_ai_provider()?);
crate::core::translation::TranslationEngine::new(
ai_provider,
self.config.translation.batch_size,
)
}
}
fn validate_ai_config(ai_config: &crate::config::AIConfig) -> Result<()> {
let canonical = crate::config::field_validator::normalize_ai_provider(&ai_config.provider);
let is_local = canonical == "local";
if !is_local && ai_config.api_key.as_deref().unwrap_or("").trim().is_empty() {
return Err(SubXError::config(
"AI API key is required. Set ai.api_key in configuration or use environment variable."
.to_string(),
));
}
if ai_config.model.trim().is_empty() {
return Err(SubXError::config(
"AI model is required. Set ai.model in configuration.".to_string(),
));
}
if ai_config.temperature < 0.0 || ai_config.temperature > 2.0 {
return Err(SubXError::config(
"AI temperature must be between 0.0 and 2.0.".to_string(),
));
}
if ai_config.max_tokens == 0 {
return Err(SubXError::config(
"AI max_tokens must be greater than 0.".to_string(),
));
}
Ok(())
}
pub fn create_ai_provider(ai_config: &crate::config::AIConfig) -> Result<Box<dyn AIProvider>> {
let canonical = crate::config::field_validator::normalize_ai_provider(&ai_config.provider);
match canonical.as_str() {
"openai" => {
validate_ai_config(ai_config)?;
let client = OpenAIClient::from_config(ai_config)?;
Ok(Box::new(client))
}
"openrouter" => {
validate_ai_config(ai_config)?;
let client = OpenRouterClient::from_config(ai_config)?;
Ok(Box::new(client))
}
"azure-openai" => {
validate_ai_config(ai_config)?;
let client =
crate::services::ai::azure_openai::AzureOpenAIClient::from_config(ai_config)?;
Ok(Box::new(client))
}
"local" => {
validate_ai_config(ai_config)?;
let client = crate::services::ai::local::LocalLLMClient::from_config(ai_config)?;
Ok(Box::new(client))
}
other => Err(SubXError::config(format!(
"Unsupported AI provider: {}. Supported providers: openai, openrouter, anthropic, azure-openai, local",
other
))),
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::config::test_service::TestConfigService;
#[test]
fn test_component_factory_creation() {
let config_service = TestConfigService::default();
let factory = ComponentFactory::new(&config_service);
assert!(factory.is_ok());
}
#[test]
fn test_factory_creation() {
let config_service = TestConfigService::default();
let factory = ComponentFactory::new(&config_service);
assert!(factory.is_ok());
}
#[test]
fn test_create_file_manager() {
let config_service = TestConfigService::default();
let factory = ComponentFactory::new(&config_service).unwrap();
let _file_manager = factory.create_file_manager();
}
#[test]
fn test_unsupported_ai_provider() {
let mut config = crate::config::Config::default();
config.ai.provider = "unsupported".to_string();
let result: Result<Box<dyn AIProvider>> = create_ai_provider(&config.ai);
assert!(result.is_err());
match result {
Err(e) => {
let error_msg = e.to_string();
assert!(error_msg.contains("Unsupported AI provider"));
assert!(error_msg.contains("openai"), "missing openai: {error_msg}");
assert!(
error_msg.contains("openrouter"),
"missing openrouter: {error_msg}"
);
assert!(
error_msg.contains("azure-openai"),
"missing azure-openai: {error_msg}"
);
assert!(error_msg.contains("local"), "missing local: {error_msg}");
}
Ok(_) => panic!("Expected error for unsupported provider"),
}
}
#[test]
fn test_create_vad_sync_detector() {
let config_service = TestConfigService::default();
let factory = ComponentFactory::new(&config_service).unwrap();
let result = factory.create_vad_sync_detector();
assert!(result.is_ok());
}
#[test]
fn test_create_vad_detector() {
let config_service = TestConfigService::default();
let factory = ComponentFactory::new(&config_service).unwrap();
let result = factory.create_vad_detector();
assert!(result.is_ok());
}
#[test]
fn test_create_audio_processor() {
let config_service = TestConfigService::default();
let factory = ComponentFactory::new(&config_service).unwrap();
let result = factory.create_audio_processor();
assert!(result.is_ok());
}
#[test]
fn test_create_ai_provider_openai_success() {
let config_service = TestConfigService::default();
config_service.set_ai_settings_and_key("openai", "gpt-4.1-mini", "test-api-key");
let factory = ComponentFactory::new(&config_service).unwrap();
let result = factory.create_ai_provider();
assert!(result.is_ok());
}
#[test]
fn test_create_ai_provider_missing_api_key() {
let config_service = TestConfigService::default();
config_service.set_ai_settings_and_key("openai", "gpt-4.1-mini", "");
let factory = ComponentFactory::new(&config_service).unwrap();
let result = factory.create_ai_provider();
assert!(result.is_err());
let error_msg = result.err().unwrap().to_string();
assert!(error_msg.contains("API key is required"));
}
#[test]
fn test_create_ai_provider_unsupported_provider() {
let config_service = TestConfigService::default();
config_service.set_ai_settings_and_key("unsupported-provider", "model", "key");
let factory = ComponentFactory::new(&config_service).unwrap();
let result = factory.create_ai_provider();
assert!(result.is_err());
let error_msg = result.err().unwrap().to_string();
assert!(error_msg.contains("Unsupported AI provider"));
}
#[test]
fn test_create_ai_provider_with_custom_base_url() {
let config_service = TestConfigService::default();
config_service.set_ai_settings_and_key("openai", "gpt-4.1-mini", "test-api-key");
config_service.config_mut().ai.base_url = "https://custom-api.com/v1".to_string();
let factory = ComponentFactory::new(&config_service).unwrap();
let result = factory.create_ai_provider();
assert!(result.is_ok());
}
#[test]
fn test_create_ai_provider_openrouter_success() {
let config_service = TestConfigService::default();
config_service.set_ai_settings_and_key(
"openrouter",
"deepseek/deepseek-r1-0528:free",
"test-openrouter-key",
);
let factory = ComponentFactory::new(&config_service).unwrap();
let result = factory.create_ai_provider();
assert!(result.is_ok());
}
#[test]
fn test_create_ai_provider_azure_openai_success() {
let mut config = crate::config::Config::default();
config.ai.provider = "azure-openai".to_string();
config.ai.api_key = Some("azure-key-123".to_string());
config.ai.model = "dep123".to_string();
config.ai.api_version = Some("2025-04-01-preview".to_string());
config.ai.base_url = "https://example.openai.azure.com".to_string();
let result = create_ai_provider(&config.ai);
assert!(result.is_ok());
}
#[test]
fn test_create_ai_provider_local_success() {
use crate::config::builder::TestConfigBuilder;
let config_service = TestConfigBuilder::new()
.with_ai_provider("local")
.with_ai_model("llama3.1")
.with_ai_base_url("http://localhost:11434/v1")
.build_service();
let factory = ComponentFactory::new(&config_service).unwrap();
let result = factory.create_ai_provider();
assert!(
result.is_ok(),
"local provider must construct without api_key: {:?}",
result.err()
);
let alias_service = TestConfigBuilder::new()
.with_ai_provider("ollama")
.with_ai_model("llama3.1")
.with_ai_base_url("http://localhost:11434/v1")
.build_service();
let alias_factory = ComponentFactory::new(&alias_service).unwrap();
assert!(
alias_factory.create_ai_provider().is_ok(),
"`ollama` alias must reach the local arm"
);
}
}