1use serde::{Deserialize, Serialize};
5use std::path::PathBuf;
6
7use crate::{Result, error::SubXError};
8use log::debug;
9
10pub mod cache;
12pub mod manager;
13pub mod partial;
14pub mod source;
15pub mod validator;
16
17use crate::config::manager::ConfigManager;
18use crate::config::source::{CliSource, EnvSource, FileSource};
19use std::sync::{Mutex, OnceLock};
20
21static GLOBAL_CONFIG_MANAGER: OnceLock<Mutex<ConfigManager>> = OnceLock::new();
22
23pub fn init_config_manager() -> Result<()> {
25 let lock = GLOBAL_CONFIG_MANAGER.get_or_init(|| Mutex::new(ConfigManager::new()));
26
27 let config_path = Config::config_file_path()?;
29 debug!("init_config_manager: Using config path: {:?}", config_path);
30 debug!(
31 "init_config_manager: Config path exists: {}",
32 config_path.exists()
33 );
34
35 let manager = ConfigManager::new()
36 .add_source(Box::new(FileSource::new(config_path)))
37 .add_source(Box::new(EnvSource::new()))
38 .add_source(Box::new(CliSource::new()));
39 debug!("init_config_manager: Created manager with 3 sources");
40
41 manager.load().map_err(|e| {
42 debug!("init_config_manager: Manager load failed: {}", e);
43 SubXError::config(e.to_string())
44 })?;
45 debug!("init_config_manager: Manager loaded successfully");
46
47 let mut guard = lock.lock().unwrap();
48 *guard = manager;
49 debug!("init_config_manager: Updated global manager");
50 Ok(())
51}
52
53pub fn load_config() -> Result<Config> {
55 debug!("load_config: Getting global config manager");
56 let lock = GLOBAL_CONFIG_MANAGER.get().ok_or_else(|| {
57 debug!("load_config: Global config manager not initialized");
58 SubXError::config("配置管理器尚未初始化,請先呼叫 init_config_manager()".to_string())
59 })?;
60 debug!("load_config: Locking manager");
61 let manager = lock.lock().unwrap();
62 let config_lock = manager.config();
63 debug!("load_config: Getting partial config");
64 let partial_config = config_lock.read().unwrap();
65 debug!(
66 "load_config: partial_config.ai.max_sample_length = {:?}",
67 partial_config.ai.max_sample_length
68 );
69 debug!("load_config: Converting to complete config");
70 let config = partial_config.to_complete_config().map_err(|e| {
71 debug!("load_config: to_complete_config failed: {}", e);
72 SubXError::config(e.to_string())
73 })?;
74 debug!(
75 "load_config: Final config.ai.max_sample_length = {}",
76 config.ai.max_sample_length
77 );
78 Ok(config)
79}
80
81#[derive(Debug, Serialize, Deserialize, Clone)]
83pub struct Config {
84 pub ai: AIConfig,
85 pub formats: FormatsConfig,
86 pub sync: SyncConfig,
87 pub general: GeneralConfig,
88 pub parallel: ParallelConfig,
89 #[serde(skip)]
90 pub loaded_from: Option<PathBuf>,
91}
92
93#[cfg(test)]
95#[serial_test::serial]
96mod tests {
97 use super::*;
98 use serial_test::serial;
99 use std::env;
100 use tempfile::TempDir;
101
102 #[test]
103 fn test_default_config_creation() {
104 let config = Config::default();
105 assert_eq!(config.ai.provider, "openai");
106 assert_eq!(config.ai.model, "gpt-4o-mini");
107 assert_eq!(config.formats.default_output, "srt");
108 assert!(!config.general.backup_enabled);
109 assert_eq!(config.general.max_concurrent_jobs, 4);
110 }
111
112 #[test]
113 fn test_config_serialization() {
114 let config = Config::default();
115 let toml_str = toml::to_string(&config).unwrap();
116 assert!(toml_str.contains("[ai]"));
117 assert!(toml_str.contains("[formats]"));
118 assert!(toml_str.contains("[sync]"));
119 assert!(toml_str.contains("[general]"));
120 }
121
122 #[test]
123 #[serial]
124 fn test_env_var_override() {
125 unsafe {
127 env::remove_var("OPENAI_API_KEY");
128 env::remove_var("SUBX_AI_MODEL");
129 env::set_var("OPENAI_API_KEY", "test-key-123");
130 env::set_var("SUBX_AI_MODEL", "gpt-3.5-turbo");
131 }
132
133 let mut config = Config::default();
134 config.apply_env_vars();
135 assert!(config.ai.api_key.is_some());
136 assert_eq!(config.ai.model, "gpt-3.5-turbo");
137
138 unsafe {
139 env::remove_var("OPENAI_API_KEY");
140 env::remove_var("SUBX_AI_MODEL");
141 }
142 }
143
144 #[test]
145 #[serial]
146 fn test_config_validation_missing_api_key() {
147 unsafe {
148 env::remove_var("OPENAI_API_KEY");
149 }
150 let config = Config::default();
151 assert!(config.validate().is_ok());
153 }
154
155 #[test]
156 fn test_config_validation_invalid_provider() {
157 let mut config = Config::default();
158 config.ai.provider = "invalid-provider".to_string();
159 assert!(config.validate().is_err());
160 }
161
162 #[test]
163 fn test_config_file_save_and_load() {
164 let temp_dir = TempDir::new().unwrap();
165 let config_path = temp_dir.path().join("config.toml");
166
167 let original_config = Config::default();
168 let toml_content = toml::to_string_pretty(&original_config).unwrap();
169 std::fs::write(&config_path, toml_content).unwrap();
170
171 let file_content = std::fs::read_to_string(&config_path).unwrap();
172 let loaded_config: Config = toml::from_str(&file_content).unwrap();
173
174 assert_eq!(original_config.ai.model, loaded_config.ai.model);
175 assert_eq!(
176 original_config.formats.default_output,
177 loaded_config.formats.default_output
178 );
179 }
180
181 #[test]
182 fn test_config_merge() {
183 let mut base_config = Config::default();
184 let mut override_config = Config::default();
185 override_config.ai.model = "gpt-4".to_string();
186 override_config.general.backup_enabled = true;
187
188 base_config.merge(override_config);
189
190 assert_eq!(base_config.ai.model, "gpt-4");
191 assert!(base_config.general.backup_enabled);
192 }
193
194 #[test]
195 fn test_global_config_manager_initialization() {
196 let temp_dir = TempDir::new().unwrap();
197 let config_path = temp_dir.path().join("config.toml");
198
199 let test_config = Config::default();
200 let toml_content = toml::to_string_pretty(&test_config).unwrap();
201 std::fs::write(&config_path, toml_content).unwrap();
202
203 unsafe {
204 std::env::set_var("SUBX_CONFIG_PATH", config_path.to_str().unwrap());
205 }
206
207 assert!(init_config_manager().is_ok());
208
209 let loaded_config = load_config().unwrap();
210 assert_eq!(loaded_config.ai.model, test_config.ai.model);
211
212 unsafe {
213 std::env::remove_var("SUBX_CONFIG_PATH");
214 }
215 }
216
217 #[test]
218 fn test_env_var_override_with_new_system() {
219 unsafe {
220 std::env::set_var("OPENAI_API_KEY", "test-key-from-env");
221 std::env::set_var("SUBX_AI_MODEL", "gpt-4-from-env");
222 }
223
224 let _ = init_config_manager();
225 let config = load_config().unwrap();
226
227 assert_eq!(config.ai.api_key, Some("test-key-from-env".to_string()));
228 assert_eq!(config.ai.model, "gpt-4-from-env");
229
230 unsafe {
231 std::env::remove_var("OPENAI_API_KEY");
232 std::env::remove_var("SUBX_AI_MODEL");
233 }
234 }
235}
236
237#[derive(Debug, Serialize, Deserialize, Clone)]
239pub struct AIConfig {
240 pub provider: String,
241 pub api_key: Option<String>,
242 pub model: String,
243 pub base_url: String,
244 pub max_sample_length: usize,
245 pub temperature: f32,
246 pub retry_attempts: u32,
247 pub retry_delay_ms: u64,
248}
249
250#[derive(Debug, Serialize, Deserialize, Clone)]
252pub struct FormatsConfig {
253 pub default_output: String,
254 pub preserve_styling: bool,
255 pub default_encoding: String,
256 pub encoding_detection_confidence: f32,
258}
259
260#[derive(Debug, Serialize, Deserialize, Clone)]
262pub struct SyncConfig {
263 pub max_offset_seconds: f32,
264 pub audio_sample_rate: u32,
265 pub correlation_threshold: f32,
266 pub dialogue_detection_threshold: f32,
267 pub min_dialogue_duration_ms: u64,
268 pub dialogue_merge_gap_ms: u64,
270 pub enable_dialogue_detection: bool,
272 pub auto_detect_sample_rate: bool,
274}
275
276impl SyncConfig {
277 pub fn auto_detect_sample_rate(&self) -> bool {
279 self.auto_detect_sample_rate
280 }
281}
282
283#[derive(Debug, Serialize, Deserialize, Clone)]
285pub struct GeneralConfig {
286 pub backup_enabled: bool,
287 pub max_concurrent_jobs: usize,
288 pub task_timeout_seconds: u64,
289 pub enable_progress_bar: bool,
290 pub worker_idle_timeout_seconds: u64,
291}
292
293#[derive(Debug, Serialize, Deserialize, Clone)]
295pub struct ParallelConfig {
296 pub task_queue_size: usize,
297 pub enable_task_priorities: bool,
298 pub auto_balance_workers: bool,
299 pub queue_overflow_strategy: OverflowStrategy,
301}
302
303impl Default for ParallelConfig {
304 fn default() -> Self {
305 ParallelConfig {
306 task_queue_size: 100,
307 enable_task_priorities: true,
308 auto_balance_workers: true,
309 queue_overflow_strategy: OverflowStrategy::Block,
310 }
311 }
312}
313
314#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
316#[serde(rename_all = "lowercase")]
317pub enum OverflowStrategy {
318 Block,
320 DropOldest,
322 Reject,
324}
325
326impl Default for Config {
327 fn default() -> Self {
328 Config {
329 ai: AIConfig {
330 provider: "openai".to_string(),
331 api_key: None,
332 model: "gpt-4o-mini".to_string(),
333 base_url: "https://api.openai.com/v1".to_string(),
334 max_sample_length: 2000,
335 temperature: 0.3,
336 retry_attempts: 3,
337 retry_delay_ms: 1000,
338 },
339 formats: FormatsConfig {
340 default_output: "srt".to_string(),
341 preserve_styling: true,
342 default_encoding: "utf-8".to_string(),
343 encoding_detection_confidence: 0.7,
344 },
345 sync: SyncConfig {
346 max_offset_seconds: 30.0,
347 audio_sample_rate: 16000,
348 correlation_threshold: 0.7,
349 dialogue_detection_threshold: 0.01,
350 min_dialogue_duration_ms: 500,
351 dialogue_merge_gap_ms: 500,
352 enable_dialogue_detection: true,
353 auto_detect_sample_rate: true,
354 },
355 general: GeneralConfig {
356 backup_enabled: false,
357 max_concurrent_jobs: 4,
358 task_timeout_seconds: 3600,
359 enable_progress_bar: true,
360 worker_idle_timeout_seconds: 300,
361 },
362 parallel: ParallelConfig::default(),
363 loaded_from: None,
364 }
365 }
366}
367
368impl Config {
369 pub fn save(&self) -> Result<()> {
371 let path = Config::config_file_path()?;
372 if let Some(parent) = path.parent() {
373 std::fs::create_dir_all(parent)?;
374 }
375 let toml = toml::to_string_pretty(self)
376 .map_err(|e| SubXError::config(format!("TOML 序列化錯誤: {}", e)))?;
377 std::fs::write(path, toml)?;
378 Ok(())
379 }
380
381 pub fn config_file_path() -> Result<PathBuf> {
383 debug!("config_file_path: Checking SUBX_CONFIG_PATH environment variable");
384 if let Ok(custom) = std::env::var("SUBX_CONFIG_PATH") {
385 debug!("config_file_path: Using custom path from env: {}", custom);
386 let path = PathBuf::from(custom);
387 debug!("config_file_path: Custom path exists: {}", path.exists());
388 return Ok(path);
389 }
390 debug!("config_file_path: SUBX_CONFIG_PATH not set, using default");
391 let dir = dirs::config_dir().ok_or_else(|| SubXError::config("無法確定配置目錄"))?;
392 let default_path = dir.join("subx").join("config.toml");
393 debug!("config_file_path: Default path: {:?}", default_path);
394 Ok(default_path)
395 }
396
397 #[allow(dead_code)]
398 fn apply_env_vars(&mut self) {
399 if let Ok(key) = std::env::var("OPENAI_API_KEY") {
400 self.ai.api_key = Some(key);
401 }
402 if let Ok(model) = std::env::var("SUBX_AI_MODEL") {
403 self.ai.model = model;
404 }
405 }
406
407 #[allow(dead_code)]
408 fn validate(&self) -> Result<()> {
409 if self.ai.provider != "openai" {
410 return Err(SubXError::config(format!(
411 "不支援的 AI provider: {}",
412 self.ai.provider
413 )));
414 }
415 Ok(())
416 }
417
418 pub fn get_value(&self, key: &str) -> Result<String> {
420 let parts: Vec<&str> = key.splitn(2, '.').collect();
421 if parts.len() != 2 {
422 return Err(SubXError::config(format!("無效的配置鍵格式: {}", key)));
423 }
424 match parts[0] {
425 "ai" => match parts[1] {
426 "provider" => Ok(self.ai.provider.clone()),
427 "api_key" => Ok(self.ai.api_key.clone().unwrap_or_default()),
428 "model" => Ok(self.ai.model.clone()),
429 "base_url" => Ok(self.ai.base_url.clone()),
430 _ => Err(SubXError::config(format!("無效的 AI 配置鍵: {}", key))),
431 },
432 "formats" => match parts[1] {
433 "default_output" => Ok(self.formats.default_output.clone()),
434 _ => Err(SubXError::config(format!("無效的 Formats 配置鍵: {}", key))),
435 },
436 _ => Err(SubXError::config(format!("無效的配置區段: {}", parts[0]))),
437 }
438 }
439
440 #[allow(dead_code)]
441 fn merge(&mut self, other: Config) {
442 *self = other;
443 }
444}