Skip to main content

voirs_recognizer/
error_enhancement.rs

1//! Enhanced error messages and solutions for `VoiRS` Recognizer
2//!
3//! This module provides comprehensive error enhancement functionality that adds
4//! detailed context, recovery suggestions, and actionable solutions to errors.
5
6use crate::RecognitionError;
7use std::collections::HashMap;
8use std::fmt;
9
10/// Enhanced error information with context and solutions
11#[derive(Debug, Clone)]
12pub struct ErrorEnhancement {
13    /// Original error message
14    pub original_message: String,
15    /// Error category for better classification
16    pub category: ErrorCategory,
17    /// Severity level of the error
18    pub severity: ErrorSeverity,
19    /// Detailed context about when and how the error occurred
20    pub context: ErrorContext,
21    /// Suggested solutions and recovery actions
22    pub solutions: Vec<Solution>,
23    /// Related documentation links
24    pub documentation_links: Vec<String>,
25    /// Troubleshooting steps
26    pub troubleshooting_steps: Vec<String>,
27}
28
29/// Error category for better classification
30#[derive(Debug, Clone, PartialEq, Eq, Hash)]
31pub enum ErrorCategory {
32    /// Configuration or setup issues
33    Configuration,
34    /// Resource availability issues (memory, disk, network)
35    Resources,
36    /// Audio format or quality issues
37    AudioFormat,
38    /// Model loading or inference issues
39    ModelIssues,
40    /// Performance or timeout issues
41    Performance,
42    /// Input validation issues
43    InputValidation,
44    /// System integration issues
45    Integration,
46    /// Feature availability issues
47    FeatureSupport,
48}
49
50/// Error severity levels
51#[derive(Debug, Clone, PartialEq, PartialOrd)]
52pub enum ErrorSeverity {
53    /// Critical errors that prevent core functionality
54    Critical,
55    /// High priority errors that significantly impact functionality
56    High,
57    /// Medium priority errors that may cause degraded experience
58    Medium,
59    /// Low priority errors that have minimal impact
60    Low,
61    /// Informational messages
62    Info,
63}
64
65/// Error context information
66#[derive(Debug, Clone)]
67pub struct ErrorContext {
68    /// Component or module where the error occurred
69    pub component: String,
70    /// Operation being performed when error occurred
71    pub operation: String,
72    /// Input parameters or configuration relevant to the error
73    pub input_summary: String,
74    /// System state when error occurred
75    pub system_state: String,
76    /// Timestamp when error occurred
77    pub timestamp: std::time::SystemTime,
78    /// Additional context information
79    pub additional_info: std::collections::HashMap<String, String>,
80}
81
82/// Solution with actionable steps
83#[derive(Debug, Clone)]
84pub struct Solution {
85    /// Solution title
86    pub title: String,
87    /// Detailed description of the solution
88    pub description: String,
89    /// Priority of this solution (1 = highest)
90    pub priority: u8,
91    /// Estimated time to implement
92    pub estimated_time: String,
93    /// Difficulty level
94    pub difficulty: SolutionDifficulty,
95    /// Step-by-step instructions
96    pub steps: Vec<String>,
97    /// Code example if applicable
98    pub code_example: Option<String>,
99    /// Success indicators
100    pub success_indicators: Vec<String>,
101}
102
103/// Solution difficulty levels
104#[derive(Debug, Clone, PartialEq)]
105pub enum SolutionDifficulty {
106    /// Easy to implement
107    Easy,
108    /// Moderate complexity
109    Moderate,
110    /// Advanced solution requiring expertise
111    Advanced,
112}
113
114/// Error enhancement trait
115pub trait ErrorEnhancer {
116    /// Enhance an error with detailed context and solutions
117    fn enhance_error(&self) -> ErrorEnhancement;
118
119    /// Get formatted error message with solutions
120    fn get_enhanced_message(&self) -> String;
121
122    /// Get quick fix suggestions
123    fn get_quick_fixes(&self) -> Vec<String>;
124
125    /// Check if error is recoverable
126    fn is_recoverable(&self) -> bool;
127
128    /// Get context-aware error message based on system state
129    fn get_contextual_message(&self, system_info: &SystemInfo) -> String;
130
131    /// Get environment-specific solutions
132    fn get_environment_solutions(&self, env: &EnvironmentInfo) -> Vec<Solution>;
133}
134
135/// System information for context-aware error messages
136#[derive(Debug, Clone)]
137pub struct SystemInfo {
138    /// Operating system
139    pub os: String,
140    /// Architecture (`x86_64`, aarch64, etc.)
141    pub arch: String,
142    /// Available memory in MB
143    pub available_memory_mb: u64,
144    /// CPU count
145    pub cpu_count: usize,
146    /// Available disk space in MB
147    pub available_disk_mb: u64,
148    /// GPU availability
149    pub has_gpu: bool,
150    /// Network connectivity
151    pub has_network: bool,
152}
153
154/// Environment information for targeted solutions
155#[derive(Debug, Clone)]
156pub struct EnvironmentInfo {
157    /// Development vs production environment
158    pub environment_type: EnvironmentType,
159    /// Container environment (Docker, Kubernetes, etc.)
160    pub container_type: Option<String>,
161    /// Cloud provider (AWS, GCP, Azure, etc.)
162    pub cloud_provider: Option<String>,
163    /// Programming language integration (Python, JavaScript, etc.)
164    pub language_binding: Option<String>,
165    /// Framework integration (Flask, `FastAPI`, etc.)
166    pub framework: Option<String>,
167}
168
169/// Environment type classification
170#[derive(Debug, Clone, PartialEq)]
171pub enum EnvironmentType {
172    /// Development environment
173    Development,
174    /// Testing environment
175    Testing,
176    /// Staging environment
177    Staging,
178    /// Production environment
179    Production,
180    /// CI/CD environment
181    CI,
182}
183
184impl Default for SystemInfo {
185    fn default() -> Self {
186        Self {
187            os: std::env::consts::OS.to_string(),
188            arch: std::env::consts::ARCH.to_string(),
189            available_memory_mb: 8192, // Default estimate
190            cpu_count: num_cpus::get(),
191            available_disk_mb: 10240, // Default estimate
192            has_gpu: false,           // Conservative default
193            has_network: true,        // Optimistic default
194        }
195    }
196}
197
198impl Default for EnvironmentInfo {
199    fn default() -> Self {
200        Self {
201            environment_type: EnvironmentType::Development,
202            container_type: None,
203            cloud_provider: None,
204            language_binding: None,
205            framework: None,
206        }
207    }
208}
209
210impl ErrorEnhancer for RecognitionError {
211    fn enhance_error(&self) -> ErrorEnhancement {
212        match self {
213            RecognitionError::ModelLoadError { message, .. } => {
214                create_model_load_enhancement(message)
215            }
216            RecognitionError::ModelError { message, .. } => create_model_error_enhancement(message),
217            RecognitionError::AudioProcessingError { message, .. } => {
218                create_audio_processing_enhancement(message)
219            }
220            RecognitionError::TranscriptionError { message, .. } => {
221                create_transcription_enhancement(message)
222            }
223            RecognitionError::PhonemeRecognitionError { message, .. } => {
224                create_phoneme_recognition_enhancement(message)
225            }
226            RecognitionError::AudioAnalysisError { message, .. } => {
227                create_audio_analysis_enhancement(message)
228            }
229            RecognitionError::ConfigurationError { message, .. } => {
230                create_configuration_enhancement(message)
231            }
232            RecognitionError::InsufficientMemory {
233                required_mb,
234                available_mb,
235                ..
236            } => create_memory_error_enhancement(&format!(
237                "Insufficient memory: {required_mb} MB required, {available_mb} MB available"
238            )),
239            RecognitionError::MemoryError { message, .. } => {
240                create_memory_error_enhancement(message)
241            }
242            RecognitionError::UnsupportedFormat(format) => create_format_error_enhancement(format),
243            RecognitionError::FeatureNotSupported { feature, .. } => {
244                create_feature_not_supported_enhancement(feature)
245            }
246            RecognitionError::InvalidInput { message, .. } => {
247                create_invalid_input_enhancement(message)
248            }
249            RecognitionError::ResourceError { message, .. } => {
250                create_resource_error_enhancement(message)
251            }
252            RecognitionError::InvalidFormat(format) => create_invalid_format_enhancement(format),
253            RecognitionError::ModelNotFound {
254                model,
255                available,
256                suggestions,
257            } => create_model_not_found_enhancement(model, available, suggestions),
258            RecognitionError::LanguageNotSupported {
259                language,
260                supported,
261                suggestions,
262            } => create_language_not_supported_enhancement(language, supported, suggestions),
263            RecognitionError::DeviceNotAvailable {
264                device,
265                reason,
266                fallback,
267            } => create_device_not_available_enhancement(device, reason, fallback),
268            RecognitionError::RecognitionTimeout {
269                timeout_ms,
270                audio_duration_ms,
271                suggestion,
272            } => {
273                create_recognition_timeout_enhancement(*timeout_ms, *audio_duration_ms, suggestion)
274            }
275            RecognitionError::TrainingError { message, .. } => {
276                create_training_error_enhancement(message)
277            }
278            RecognitionError::SynchronizationError { message } => {
279                create_synchronization_error_enhancement(message)
280            }
281        }
282    }
283
284    fn get_enhanced_message(&self) -> String {
285        let enhancement = self.enhance_error();
286        format_enhanced_error(&enhancement)
287    }
288
289    fn get_quick_fixes(&self) -> Vec<String> {
290        let enhancement = self.enhance_error();
291        enhancement
292            .solutions
293            .iter()
294            .filter(|s| s.priority <= 2 && s.difficulty == SolutionDifficulty::Easy)
295            .map(|s| s.title.clone())
296            .collect()
297    }
298
299    fn is_recoverable(&self) -> bool {
300        match self {
301            RecognitionError::ModelLoadError { .. } => true,
302            RecognitionError::AudioProcessingError { .. } => true,
303            RecognitionError::ConfigurationError { .. } => true,
304            RecognitionError::InvalidInput { .. } => true,
305            RecognitionError::ResourceError { .. } => true,
306            RecognitionError::UnsupportedFormat(_) => true,
307            RecognitionError::InvalidFormat(_) => true,
308            RecognitionError::ModelNotFound { .. } => true,
309            RecognitionError::LanguageNotSupported { .. } => true,
310            RecognitionError::DeviceNotAvailable { .. } => true,
311            RecognitionError::RecognitionTimeout { .. } => true,
312            RecognitionError::InsufficientMemory { .. } => false,
313            RecognitionError::MemoryError { .. } => false,
314            _ => false,
315        }
316    }
317
318    fn get_contextual_message(&self, system_info: &SystemInfo) -> String {
319        let base_enhancement = self.enhance_error();
320        let mut contextual_message = format!(
321            "Error: {message}\n",
322            message = base_enhancement.original_message
323        );
324
325        // Add system context
326        contextual_message.push_str(&format!(
327            "System Context: {} {} ({} cores, {}MB RAM available)\n",
328            system_info.os,
329            system_info.arch,
330            system_info.cpu_count,
331            system_info.available_memory_mb
332        ));
333
334        // Add specific context based on error type
335        match self {
336            RecognitionError::InsufficientMemory {
337                required_mb,
338                available_mb,
339                ..
340            } => {
341                contextual_message.push_str(&format!(
342                    "Memory Requirements: {}MB required vs {}MB available (system reports {}MB)\n",
343                    required_mb, available_mb, system_info.available_memory_mb
344                ));
345
346                if system_info.available_memory_mb < *required_mb {
347                    contextual_message
348                        .push_str("āš ļø  System memory may be insufficient for this model size.\n");
349                }
350            }
351            RecognitionError::DeviceNotAvailable { device, .. } => {
352                if device.to_lowercase().contains("gpu") && !system_info.has_gpu {
353                    contextual_message.push_str(
354                        "ā„¹ļø  No GPU detected on this system. Consider using CPU-only mode.\n",
355                    );
356                }
357            }
358            RecognitionError::ModelLoadError { .. } => {
359                if system_info.available_disk_mb < 1000 {
360                    contextual_message.push_str("āš ļø  Low disk space detected. Consider freeing up space for model storage.\n");
361                }
362                if !system_info.has_network {
363                    contextual_message.push_str(
364                        "āš ļø  No network connectivity detected. Model download may fail.\n",
365                    );
366                }
367            }
368            _ => {}
369        }
370
371        // Add prioritized solutions
372        let solutions = base_enhancement.solutions;
373        if !solutions.is_empty() {
374            contextual_message.push_str("\nRecommended Solutions:\n");
375            for (i, solution) in solutions.iter().take(3).enumerate() {
376                contextual_message.push_str(&format!(
377                    "{}. {} ({})\n   {}\n",
378                    i + 1,
379                    solution.title,
380                    solution.estimated_time,
381                    solution.description
382                ));
383            }
384        }
385
386        contextual_message
387    }
388
389    fn get_environment_solutions(&self, env: &EnvironmentInfo) -> Vec<Solution> {
390        let mut solutions = Vec::new();
391
392        match self {
393            RecognitionError::ModelLoadError { .. } => {
394                match env.environment_type {
395                    EnvironmentType::Production => {
396                        solutions.push(Solution {
397                            title: "Production model deployment check".to_string(),
398                            description: "Verify model deployment in production environment"
399                                .to_string(),
400                            priority: 1,
401                            estimated_time: "5-10 minutes".to_string(),
402                            difficulty: SolutionDifficulty::Moderate,
403                            steps: vec![
404                                "Check if model is included in production build".to_string(),
405                                "Verify model path configuration for production".to_string(),
406                                "Check production filesystem permissions".to_string(),
407                                "Validate model integrity in deployment".to_string(),
408                            ],
409                            code_example: Some(
410                                r#"
411# Check model in production environment
412if [ ! -f "/app/models/whisper-base.bin" ]; then
413    echo "Model missing in production"
414    exit 1
415fi
416
417# Verify permissions
418ls -la /app/models/whisper-base.bin
419"#
420                                .to_string(),
421                            ),
422                            success_indicators: vec![
423                                "Model file exists in production path".to_string(),
424                                "Correct permissions set".to_string(),
425                            ],
426                        });
427                    }
428                    EnvironmentType::Development => {
429                        solutions.push(Solution {
430                            title: "Development environment setup".to_string(),
431                            description: "Set up model for local development".to_string(),
432                            priority: 1,
433                            estimated_time: "2-5 minutes".to_string(),
434                            difficulty: SolutionDifficulty::Easy,
435                            steps: vec![
436                                "Download model to local development directory".to_string(),
437                                "Set up development configuration".to_string(),
438                                "Add model path to environment variables".to_string(),
439                            ],
440                            code_example: Some(
441                                r#"
442# Set up development environment
443mkdir -p ./models
444export VOIRS_MODEL_PATH="./models/whisper-base.bin"
445
446# Download model (example)
447curl -L "https://example.com/model.bin" -o ./models/whisper-base.bin
448"#
449                                .to_string(),
450                            ),
451                            success_indicators: vec![
452                                "Model downloaded to development directory".to_string(),
453                                "Environment variables configured".to_string(),
454                            ],
455                        });
456                    }
457                    EnvironmentType::CI => {
458                        solutions.push(Solution {
459                            title: "CI/CD model handling".to_string(),
460                            description: "Configure model access in CI environment".to_string(),
461                            priority: 1,
462                            estimated_time: "10-15 minutes".to_string(),
463                            difficulty: SolutionDifficulty::Moderate,
464                            steps: vec![
465                                "Add model to CI artifacts or cache".to_string(),
466                                "Configure CI environment variables".to_string(),
467                                "Set up model download in CI pipeline".to_string(),
468                                "Add model validation step".to_string(),
469                            ],
470                            code_example: Some(
471                                r#"
472# CI configuration (GitHub Actions example)
473- name: Setup models
474  run: |
475    mkdir -p models
476    if [ ! -f models/whisper-base.bin ]; then
477      curl -L "$MODEL_URL" -o models/whisper-base.bin
478    fi
479  env:
480    MODEL_URL: ${{ secrets.MODEL_URL }}
481"#
482                                .to_string(),
483                            ),
484                            success_indicators: vec![
485                                "Models cached in CI".to_string(),
486                                "Pipeline completes successfully".to_string(),
487                            ],
488                        });
489                    }
490                    _ => {}
491                }
492
493                // Add container-specific solutions
494                if let Some(container_type) = &env.container_type {
495                    match container_type.as_str() {
496                        "docker" => {
497                            solutions.push(Solution {
498                                title: "Docker container model setup".to_string(),
499                                description: "Configure model access in Docker container"
500                                    .to_string(),
501                                priority: 2,
502                                estimated_time: "5-10 minutes".to_string(),
503                                difficulty: SolutionDifficulty::Moderate,
504                                steps: vec![
505                                    "Add model to Docker image or volume mount".to_string(),
506                                    "Set correct file permissions in container".to_string(),
507                                    "Configure model path environment variables".to_string(),
508                                ],
509                                code_example: Some(
510                                    r"
511# Dockerfile
512COPY models/ /app/models/
513RUN chmod -R 644 /app/models/
514
515# docker-compose.yml
516volumes:
517  - ./models:/app/models:ro
518
519# Environment
520ENV VOIRS_MODEL_PATH=/app/models/whisper-base.bin
521"
522                                    .to_string(),
523                                ),
524                                success_indicators: vec![
525                                    "Model accessible in container".to_string(),
526                                    "Permissions correctly set".to_string(),
527                                ],
528                            });
529                        }
530                        "kubernetes" => {
531                            solutions.push(Solution {
532                                title: "Kubernetes model deployment".to_string(),
533                                description: "Deploy model in Kubernetes environment".to_string(),
534                                priority: 2,
535                                estimated_time: "15-20 minutes".to_string(),
536                                difficulty: SolutionDifficulty::Advanced,
537                                steps: vec![
538                                    "Create ConfigMap or Secret for model".to_string(),
539                                    "Mount model in Pod specification".to_string(),
540                                    "Set up persistent volume if needed".to_string(),
541                                    "Configure resource limits".to_string(),
542                                ],
543                                code_example: Some(
544                                    r#"
545# model-configmap.yaml
546apiVersion: v1
547kind: ConfigMap
548metadata:
549  name: voirs-models
550data:
551  model-path: "/models/whisper-base.bin"
552
553# deployment.yaml
554spec:
555  template:
556    spec:
557      volumes:
558      - name: model-volume
559        persistentVolumeClaim:
560          claimName: model-pvc
561      containers:
562      - name: voirs
563        volumeMounts:
564        - name: model-volume
565          mountPath: /models
566"#
567                                    .to_string(),
568                                ),
569                                success_indicators: vec![
570                                    "Model accessible in all pods".to_string(),
571                                    "Persistent storage configured".to_string(),
572                                ],
573                            });
574                        }
575                        _ => {}
576                    }
577                }
578            }
579            _ => {
580                // Add general environment-specific solutions for other error types
581                solutions.extend(self.enhance_error().solutions);
582            }
583        }
584
585        solutions
586    }
587}
588
589// Helper functions for creating specific error enhancements
590
591fn create_model_load_enhancement(message: &str) -> ErrorEnhancement {
592    ErrorEnhancement {
593        original_message: message.to_string(),
594        category: ErrorCategory::ModelIssues,
595        severity: ErrorSeverity::Critical,
596        context: ErrorContext {
597            component: "Model Loader".to_string(),
598            operation: "Loading recognition model".to_string(),
599            input_summary: "Model path and configuration".to_string(),
600            system_state: "Model initialization".to_string(),
601            timestamp: std::time::SystemTime::now(),
602            additional_info: std::collections::HashMap::new(),
603        },
604        solutions: vec![
605            Solution {
606                title: "Check model file existence and permissions".to_string(),
607                description: "Verify that the model file exists and is readable".to_string(),
608                priority: 1,
609                estimated_time: "1-2 minutes".to_string(),
610                difficulty: SolutionDifficulty::Easy,
611                steps: vec![
612                    "Check if the model file exists at the specified path".to_string(),
613                    "Verify file permissions (should be readable)".to_string(),
614                    "Check available disk space".to_string(),
615                    "Ensure model file is not corrupted".to_string(),
616                ],
617                code_example: Some(
618                    r#"
619use std::fs;
620use std::path::Path;
621
622// Check model file
623if !Path::new("path/to/model.bin").exists() {
624    println!("Model file not found!");
625}
626
627// Check permissions
628if let Ok(metadata) = fs::metadata("path/to/model.bin") {
629    if metadata.permissions().readonly() {
630        println!("Model file is read-only");
631    }
632}
633"#
634                    .to_string(),
635                ),
636                success_indicators: vec![
637                    "Model file exists and is readable".to_string(),
638                    "No permission errors".to_string(),
639                ],
640            },
641            Solution {
642                title: "Download or regenerate model".to_string(),
643                description: "Download a fresh copy of the model or regenerate it".to_string(),
644                priority: 2,
645                estimated_time: "5-10 minutes".to_string(),
646                difficulty: SolutionDifficulty::Easy,
647                steps: vec![
648                    "Download model from official source".to_string(),
649                    "Verify model checksum".to_string(),
650                    "Place model in correct directory".to_string(),
651                    "Update model path in configuration".to_string(),
652                ],
653                code_example: Some(
654                    r#"
655// Re-download model
656let model_url = "https://example.com/model.bin";
657let response = reqwest::get(model_url).await?;
658let model_data = response.bytes().await?;
659fs::write("models/model.bin", model_data)?;
660"#
661                    .to_string(),
662                ),
663                success_indicators: vec![
664                    "Model downloads successfully".to_string(),
665                    "Checksum verification passes".to_string(),
666                ],
667            },
668        ],
669        documentation_links: vec![
670            "https://docs.voirs.ai/models/loading".to_string(),
671            "https://docs.voirs.ai/troubleshooting/model-errors".to_string(),
672        ],
673        troubleshooting_steps: vec![
674            "Check system logs for more details".to_string(),
675            "Verify model format compatibility".to_string(),
676            "Test with a smaller test model first".to_string(),
677            "Check memory availability".to_string(),
678        ],
679    }
680}
681
682fn create_audio_processing_enhancement(message: &str) -> ErrorEnhancement {
683    ErrorEnhancement {
684        original_message: message.to_string(),
685        category: ErrorCategory::AudioFormat,
686        severity: ErrorSeverity::High,
687        context: ErrorContext {
688            component: "Audio Processor".to_string(),
689            operation: "Processing audio input".to_string(),
690            input_summary: "Audio file or stream".to_string(),
691            system_state: "Audio processing pipeline".to_string(),
692            timestamp: std::time::SystemTime::now(),
693            additional_info: std::collections::HashMap::new(),
694        },
695        solutions: vec![
696            Solution {
697                title: "Verify audio format compatibility".to_string(),
698                description: "Check if the audio format is supported".to_string(),
699                priority: 1,
700                estimated_time: "1-2 minutes".to_string(),
701                difficulty: SolutionDifficulty::Easy,
702                steps: vec![
703                    "Check audio file format (WAV, MP3, FLAC, etc.)".to_string(),
704                    "Verify sample rate (16kHz recommended)".to_string(),
705                    "Check bit depth (16-bit recommended)".to_string(),
706                    "Ensure audio is not corrupted".to_string(),
707                ],
708                code_example: Some(
709                    r#"
710use voirs_recognizer::audio_formats::{load_audio, AudioFormat};
711
712// Load and inspect audio
713match load_audio("path/to/audio.wav") {
714    Ok(audio) => {
715        println!("Sample rate: {}", audio.sample_rate);
716        println!("Channels: {}", audio.channels);
717        println!("Duration: {:.2}s", audio.duration_seconds);
718    }
719    Err(e) => println!("Audio loading error: {}", e),
720}
721"#
722                    .to_string(),
723                ),
724                success_indicators: vec![
725                    "Audio format is supported".to_string(),
726                    "Sample rate is compatible".to_string(),
727                    "Audio loads without errors".to_string(),
728                ],
729            },
730            Solution {
731                title: "Convert audio to supported format".to_string(),
732                description: "Convert audio to a supported format and sample rate".to_string(),
733                priority: 2,
734                estimated_time: "2-5 minutes".to_string(),
735                difficulty: SolutionDifficulty::Moderate,
736                steps: vec![
737                    "Convert to WAV format".to_string(),
738                    "Resample to 16kHz".to_string(),
739                    "Convert to mono if needed".to_string(),
740                    "Ensure 16-bit depth".to_string(),
741                ],
742                code_example: Some(
743                    r#"
744use voirs_recognizer::audio_formats::load_audio_with_sample_rate;
745
746// Load with automatic resampling
747let audio = load_audio_with_sample_rate("input.mp3", 16000)?;
748"#
749                    .to_string(),
750                ),
751                success_indicators: vec![
752                    "Audio converts successfully".to_string(),
753                    "Sample rate is 16kHz".to_string(),
754                    "Audio quality is preserved".to_string(),
755                ],
756            },
757        ],
758        documentation_links: vec![
759            "https://docs.voirs.ai/audio/formats".to_string(),
760            "https://docs.voirs.ai/audio/preprocessing".to_string(),
761        ],
762        troubleshooting_steps: vec![
763            "Test with a simple WAV file".to_string(),
764            "Check audio file integrity".to_string(),
765            "Verify codec support".to_string(),
766            "Test with different sample rates".to_string(),
767        ],
768    }
769}
770
771fn create_configuration_enhancement(message: &str) -> ErrorEnhancement {
772    ErrorEnhancement {
773        original_message: message.to_string(),
774        category: ErrorCategory::Configuration,
775        severity: ErrorSeverity::Medium,
776        context: ErrorContext {
777            component: "Configuration Manager".to_string(),
778            operation: "Loading configuration".to_string(),
779            input_summary: "Configuration file or parameters".to_string(),
780            system_state: "System initialization".to_string(),
781            timestamp: std::time::SystemTime::now(),
782            additional_info: std::collections::HashMap::new(),
783        },
784        solutions: vec![
785            Solution {
786                title: "Check configuration file syntax".to_string(),
787                description: "Verify configuration file format and syntax".to_string(),
788                priority: 1,
789                estimated_time: "1-2 minutes".to_string(),
790                difficulty: SolutionDifficulty::Easy,
791                steps: vec![
792                    "Check JSON/YAML syntax".to_string(),
793                    "Verify required fields are present".to_string(),
794                    "Check data types match expected values".to_string(),
795                    "Ensure no invalid characters".to_string(),
796                ],
797                code_example: Some(
798                    r#"
799use serde_json;
800use std::fs;
801
802// Validate JSON configuration
803let config_content = fs::read_to_string("config.json")?;
804match serde_json::from_str::<serde_json::Value>(&config_content) {
805    Ok(_) => println!("Configuration is valid JSON"),
806    Err(e) => println!("JSON error: {}", e),
807}
808"#
809                    .to_string(),
810                ),
811                success_indicators: vec![
812                    "Configuration parses successfully".to_string(),
813                    "All required fields are present".to_string(),
814                    "No syntax errors".to_string(),
815                ],
816            },
817            Solution {
818                title: "Use default configuration".to_string(),
819                description: "Generate and use a default configuration".to_string(),
820                priority: 2,
821                estimated_time: "1 minute".to_string(),
822                difficulty: SolutionDifficulty::Easy,
823                steps: vec![
824                    "Generate default configuration".to_string(),
825                    "Save to configuration file".to_string(),
826                    "Customize as needed".to_string(),
827                    "Test with default settings".to_string(),
828                ],
829                code_example: Some(
830                    r#"
831use voirs_recognizer::ASRConfig;
832
833// Create default configuration
834let config = ASRConfig::default();
835let config_json = serde_json::to_string_pretty(&config)?;
836std::fs::write("config.json", config_json)?;
837"#
838                    .to_string(),
839                ),
840                success_indicators: vec![
841                    "Default configuration loads successfully".to_string(),
842                    "System initializes with default settings".to_string(),
843                ],
844            },
845        ],
846        documentation_links: vec![
847            "https://docs.voirs.ai/configuration".to_string(),
848            "https://docs.voirs.ai/configuration/examples".to_string(),
849        ],
850        troubleshooting_steps: vec![
851            "Check configuration file permissions".to_string(),
852            "Validate against schema".to_string(),
853            "Test with minimal configuration".to_string(),
854            "Compare with working examples".to_string(),
855        ],
856    }
857}
858
859fn create_memory_error_enhancement(message: &str) -> ErrorEnhancement {
860    ErrorEnhancement {
861        original_message: message.to_string(),
862        category: ErrorCategory::Resources,
863        severity: ErrorSeverity::Critical,
864        context: ErrorContext {
865            component: "Memory Manager".to_string(),
866            operation: "Memory allocation for processing".to_string(),
867            input_summary: "Large audio files or models".to_string(),
868            system_state: "High memory usage".to_string(),
869            timestamp: std::time::SystemTime::now(),
870            additional_info: std::collections::HashMap::new(),
871        },
872        solutions: vec![
873            Solution {
874                title: "Reduce batch size and enable streaming".to_string(),
875                description: "Process audio in smaller chunks to reduce memory usage".to_string(),
876                priority: 1,
877                estimated_time: "1-2 minutes".to_string(),
878                difficulty: SolutionDifficulty::Easy,
879                steps: vec![
880                    "Enable streaming mode in configuration".to_string(),
881                    "Reduce batch size to 1-4 samples".to_string(),
882                    "Use memory-efficient model variants".to_string(),
883                    "Clear cache between processing".to_string(),
884                ],
885                code_example: Some(
886                    r"
887use voirs_recognizer::{ASRConfig, StreamingConfig};
888
889let mut config = ASRConfig::default();
890config.streaming = Some(StreamingConfig {
891    chunk_length_ms: 30000,  // 30 second chunks
892    overlap_ms: 3000,        // 3 second overlap
893    ..Default::default()
894});
895config.batch_size = 1;  // Process one sample at a time
896"
897                    .to_string(),
898                ),
899                success_indicators: vec![
900                    "Memory usage stays below 2GB".to_string(),
901                    "Processing completes without OOM errors".to_string(),
902                    "Audio quality is preserved".to_string(),
903                ],
904            },
905            Solution {
906                title: "Use quantized models".to_string(),
907                description: "Switch to quantized model variants that use less memory".to_string(),
908                priority: 2,
909                estimated_time: "2-3 minutes".to_string(),
910                difficulty: SolutionDifficulty::Moderate,
911                steps: vec![
912                    "Download quantized model variant".to_string(),
913                    "Update model path in configuration".to_string(),
914                    "Enable quantization in runtime config".to_string(),
915                    "Test accuracy on sample data".to_string(),
916                ],
917                code_example: Some(
918                    r#"
919let mut config = ASRConfig::default();
920config.model_path = "models/whisper-base-q8.bin".to_string();
921config.quantization_enabled = true;
922"#
923                    .to_string(),
924                ),
925                success_indicators: vec![
926                    "Model loads with reduced memory footprint".to_string(),
927                    "Inference speed is maintained or improved".to_string(),
928                    "Acceptable accuracy for use case".to_string(),
929                ],
930            },
931        ],
932        documentation_links: vec![
933            "https://docs.voirs.ai/memory/optimization".to_string(),
934            "https://docs.voirs.ai/models/quantization".to_string(),
935        ],
936        troubleshooting_steps: vec![
937            "Monitor system memory usage".to_string(),
938            "Profile memory allocation patterns".to_string(),
939            "Test with progressively smaller inputs".to_string(),
940            "Check for memory leaks".to_string(),
941        ],
942    }
943}
944
945fn create_device_error_enhancement(message: &str) -> ErrorEnhancement {
946    ErrorEnhancement {
947        original_message: message.to_string(),
948        category: ErrorCategory::Resources,
949        severity: ErrorSeverity::High,
950        context: ErrorContext {
951            component: "Device Manager".to_string(),
952            operation: "Device initialization and selection".to_string(),
953            input_summary: "GPU/CPU device configuration".to_string(),
954            system_state: "Device enumeration".to_string(),
955            timestamp: std::time::SystemTime::now(),
956            additional_info: std::collections::HashMap::new(),
957        },
958        solutions: vec![
959            Solution {
960                title: "Fallback to CPU processing".to_string(),
961                description: "Use CPU when GPU is unavailable or incompatible".to_string(),
962                priority: 1,
963                estimated_time: "30 seconds".to_string(),
964                difficulty: SolutionDifficulty::Easy,
965                steps: vec![
966                    "Set device type to CPU in configuration".to_string(),
967                    "Disable GPU acceleration".to_string(),
968                    "Verify CPU meets minimum requirements".to_string(),
969                    "Test with CPU-optimized settings".to_string(),
970                ],
971                code_example: Some(
972                    r"
973let mut config = ASRConfig::default();
974config.device_type = DeviceType::Cpu;
975config.gpu_enabled = false;
976config.num_threads = std::thread::available_parallelism()?.get();
977"
978                    .to_string(),
979                ),
980                success_indicators: vec![
981                    "Processing works on CPU".to_string(),
982                    "No device-related errors".to_string(),
983                    "Reasonable processing speed".to_string(),
984                ],
985            },
986            Solution {
987                title: "Update GPU drivers and check compatibility".to_string(),
988                description: "Ensure GPU drivers are up-to-date and compatible".to_string(),
989                priority: 2,
990                estimated_time: "10-15 minutes".to_string(),
991                difficulty: SolutionDifficulty::Moderate,
992                steps: vec![
993                    "Check GPU compatibility list".to_string(),
994                    "Update GPU drivers to latest version".to_string(),
995                    "Verify CUDA/OpenCL installation".to_string(),
996                    "Test GPU functionality".to_string(),
997                ],
998                code_example: Some(
999                    r#"
1000// Check device availability
1001use candle_core::Device;
1002
1003match Device::cuda_if_available(0) {
1004    Ok(device) => println!("GPU available: {:?}", device),
1005    Err(e) => println!("GPU not available: {}", e),
1006}
1007"#
1008                    .to_string(),
1009                ),
1010                success_indicators: vec![
1011                    "GPU device is detected".to_string(),
1012                    "No driver compatibility issues".to_string(),
1013                    "GPU memory is accessible".to_string(),
1014                ],
1015            },
1016        ],
1017        documentation_links: vec![
1018            "https://docs.voirs.ai/setup/gpu".to_string(),
1019            "https://docs.voirs.ai/troubleshooting/devices".to_string(),
1020        ],
1021        troubleshooting_steps: vec![
1022            "Check device manager for GPU".to_string(),
1023            "Test with different device indices".to_string(),
1024            "Verify compute capability".to_string(),
1025            "Check for conflicting GPU processes".to_string(),
1026        ],
1027    }
1028}
1029
1030fn create_format_error_enhancement(format: &str) -> ErrorEnhancement {
1031    ErrorEnhancement {
1032        original_message: format!("Unsupported audio format: {format}"),
1033        category: ErrorCategory::AudioFormat,
1034        severity: ErrorSeverity::Medium,
1035        context: ErrorContext {
1036            component: "Audio Format Detector".to_string(),
1037            operation: "Audio format validation".to_string(),
1038            input_summary: format!("Audio file with format: {format}"),
1039            system_state: "Format detection".to_string(),
1040            timestamp: std::time::SystemTime::now(),
1041            additional_info: std::collections::HashMap::new(),
1042        },
1043        solutions: vec![Solution {
1044            title: "Convert to supported format".to_string(),
1045            description: "Convert audio to WAV, FLAC, MP3, or OGG format".to_string(),
1046            priority: 1,
1047            estimated_time: "1-3 minutes".to_string(),
1048            difficulty: SolutionDifficulty::Easy,
1049            steps: vec![
1050                "Use FFmpeg or similar tool to convert".to_string(),
1051                "Convert to WAV format (recommended)".to_string(),
1052                "Ensure 16kHz sample rate".to_string(),
1053                "Use mono channel if possible".to_string(),
1054            ],
1055            code_example: Some(format!(
1056                r#"
1057// Convert using FFmpeg command line
1058// ffmpeg -i input.{format} -ar 16000 -ac 1 output.wav
1059
1060// Or use the built-in audio loader with conversion
1061use voirs_recognizer::audio_formats::load_audio;
1062
1063let audio = load_audio("input.wav")?;  // Will auto-convert
1064"#
1065            )),
1066            success_indicators: vec![
1067                "Audio converts without quality loss".to_string(),
1068                "Converted file loads successfully".to_string(),
1069                "Recognition works with converted audio".to_string(),
1070            ],
1071        }],
1072        documentation_links: vec![
1073            "https://docs.voirs.ai/audio/supported-formats".to_string(),
1074            "https://docs.voirs.ai/audio/conversion".to_string(),
1075        ],
1076        troubleshooting_steps: vec![
1077            "Check file extension matches content".to_string(),
1078            "Verify file is not corrupted".to_string(),
1079            "Test with simple WAV file first".to_string(),
1080            "Check codec requirements".to_string(),
1081        ],
1082    }
1083}
1084
1085// Additional helper functions for other error types...
1086
1087fn create_model_error_enhancement(message: &str) -> ErrorEnhancement {
1088    // Implementation for model errors
1089    ErrorEnhancement {
1090        original_message: message.to_string(),
1091        category: ErrorCategory::ModelIssues,
1092        severity: ErrorSeverity::High,
1093        context: ErrorContext {
1094            component: "Model Runtime".to_string(),
1095            operation: "Model inference".to_string(),
1096            input_summary: "Model input data".to_string(),
1097            system_state: "Model execution".to_string(),
1098            timestamp: std::time::SystemTime::now(),
1099            additional_info: std::collections::HashMap::new(),
1100        },
1101        solutions: vec![Solution {
1102            title: "Check model compatibility".to_string(),
1103            description: "Verify model is compatible with current version".to_string(),
1104            priority: 1,
1105            estimated_time: "2-3 minutes".to_string(),
1106            difficulty: SolutionDifficulty::Easy,
1107            steps: vec![
1108                "Check model version".to_string(),
1109                "Verify system requirements".to_string(),
1110                "Update to compatible model".to_string(),
1111            ],
1112            code_example: None,
1113            success_indicators: vec![
1114                "Model version is compatible".to_string(),
1115                "System requirements met".to_string(),
1116            ],
1117        }],
1118        documentation_links: vec!["https://docs.voirs.ai/models/compatibility".to_string()],
1119        troubleshooting_steps: vec![
1120            "Test with different model".to_string(),
1121            "Check system resources".to_string(),
1122        ],
1123    }
1124}
1125
1126// Implement the remaining helper functions...
1127fn create_transcription_enhancement(message: &str) -> ErrorEnhancement {
1128    // Similar structure for transcription errors
1129    ErrorEnhancement {
1130        original_message: message.to_string(),
1131        category: ErrorCategory::ModelIssues,
1132        severity: ErrorSeverity::Medium,
1133        context: ErrorContext {
1134            component: "Transcription Engine".to_string(),
1135            operation: "Speech transcription".to_string(),
1136            input_summary: "Audio for transcription".to_string(),
1137            system_state: "Transcription processing".to_string(),
1138            timestamp: std::time::SystemTime::now(),
1139            additional_info: std::collections::HashMap::new(),
1140        },
1141        solutions: vec![Solution {
1142            title: "Check audio quality".to_string(),
1143            description: "Ensure audio quality is sufficient for transcription".to_string(),
1144            priority: 1,
1145            estimated_time: "1-2 minutes".to_string(),
1146            difficulty: SolutionDifficulty::Easy,
1147            steps: vec![
1148                "Check for background noise".to_string(),
1149                "Verify speech clarity".to_string(),
1150                "Ensure adequate volume".to_string(),
1151            ],
1152            code_example: None,
1153            success_indicators: vec![
1154                "Audio is clear and audible".to_string(),
1155                "Minimal background noise".to_string(),
1156            ],
1157        }],
1158        documentation_links: vec!["https://docs.voirs.ai/transcription/quality".to_string()],
1159        troubleshooting_steps: vec![
1160            "Test with high-quality audio".to_string(),
1161            "Check microphone settings".to_string(),
1162        ],
1163    }
1164}
1165
1166// Implement remaining helper functions with similar structure...
1167fn create_phoneme_recognition_enhancement(message: &str) -> ErrorEnhancement {
1168    create_default_enhancement(message, ErrorCategory::ModelIssues, "Phoneme Recognition")
1169}
1170
1171fn create_audio_analysis_enhancement(message: &str) -> ErrorEnhancement {
1172    create_default_enhancement(message, ErrorCategory::AudioFormat, "Audio Analysis")
1173}
1174
1175fn create_feature_not_supported_enhancement(feature: &str) -> ErrorEnhancement {
1176    create_default_enhancement(feature, ErrorCategory::FeatureSupport, "Feature Support")
1177}
1178
1179fn create_invalid_input_enhancement(message: &str) -> ErrorEnhancement {
1180    create_default_enhancement(message, ErrorCategory::InputValidation, "Input Validation")
1181}
1182
1183fn create_io_error_enhancement(message: &str) -> ErrorEnhancement {
1184    create_default_enhancement(message, ErrorCategory::Resources, "I/O Operations")
1185}
1186
1187fn create_network_error_enhancement(message: &str) -> ErrorEnhancement {
1188    create_default_enhancement(message, ErrorCategory::Resources, "Network Operations")
1189}
1190
1191fn create_timeout_error_enhancement(message: &str) -> ErrorEnhancement {
1192    create_default_enhancement(message, ErrorCategory::Performance, "Timeout Handling")
1193}
1194
1195fn create_concurrency_error_enhancement(message: &str) -> ErrorEnhancement {
1196    create_default_enhancement(message, ErrorCategory::Integration, "Concurrency")
1197}
1198
1199fn create_other_error_enhancement(message: &str) -> ErrorEnhancement {
1200    create_default_enhancement(message, ErrorCategory::Integration, "General")
1201}
1202
1203fn create_resource_error_enhancement(message: &str) -> ErrorEnhancement {
1204    create_default_enhancement(message, ErrorCategory::Resources, "Resource Management")
1205}
1206
1207fn create_unsupported_format_enhancement(format: &str) -> ErrorEnhancement {
1208    ErrorEnhancement {
1209        original_message: format!("Unsupported audio format: {format}"),
1210        category: ErrorCategory::AudioFormat,
1211        severity: ErrorSeverity::Medium,
1212        context: ErrorContext {
1213            component: "Audio Format Handler".to_string(),
1214            operation: "Audio format detection".to_string(),
1215            input_summary: format!("Audio format: {format}"),
1216            system_state: "Format processing".to_string(),
1217            timestamp: std::time::SystemTime::now(),
1218            additional_info: std::collections::HashMap::new(),
1219        },
1220        solutions: vec![Solution {
1221            title: "Convert to supported format".to_string(),
1222            description: "Convert audio to a supported format like WAV or FLAC".to_string(),
1223            priority: 1,
1224            estimated_time: "2-5 minutes".to_string(),
1225            difficulty: SolutionDifficulty::Easy,
1226            steps: vec![
1227                "Convert to WAV format".to_string(),
1228                "Use 16kHz sample rate".to_string(),
1229                "Ensure 16-bit depth".to_string(),
1230                "Convert to mono if needed".to_string(),
1231            ],
1232            code_example: Some(
1233                r#"
1234// Convert using ffmpeg command line
1235ffmpeg -i input.{format} -ar 16000 -ac 1 -c:a pcm_s16le output.wav
1236
1237// Or use VoiRS built-in conversion
1238use voirs_recognizer::audio_formats::load_audio_with_sample_rate;
1239let audio = load_audio_with_sample_rate("input.{format}", 16000)?;
1240"#
1241                .replace("{format}", format),
1242            ),
1243            success_indicators: vec![
1244                "Audio converts successfully".to_string(),
1245                "VoiRS can process the converted audio".to_string(),
1246            ],
1247        }],
1248        documentation_links: vec![
1249            "https://docs.voirs.ai/audio/formats".to_string(),
1250            "https://docs.voirs.ai/audio/conversion".to_string(),
1251        ],
1252        troubleshooting_steps: vec![
1253            "Check supported formats list".to_string(),
1254            "Verify audio file integrity".to_string(),
1255            "Test with minimal audio sample".to_string(),
1256        ],
1257    }
1258}
1259
1260fn create_invalid_format_enhancement(format: &str) -> ErrorEnhancement {
1261    create_default_enhancement(
1262        &format!("Invalid audio format: {format}"),
1263        ErrorCategory::AudioFormat,
1264        "Audio Format Validation",
1265    )
1266}
1267
1268fn create_model_not_found_enhancement(
1269    model: &str,
1270    available: &[String],
1271    suggestions: &[String],
1272) -> ErrorEnhancement {
1273    ErrorEnhancement {
1274        original_message: format!("Model '{model}' not found. Available models: {available:?}"),
1275        category: ErrorCategory::ModelIssues,
1276        severity: ErrorSeverity::High,
1277        context: ErrorContext {
1278            component: "Model Manager".to_string(),
1279            operation: "Model loading".to_string(),
1280            input_summary: format!("Requested model: {model}"),
1281            system_state: "Model discovery".to_string(),
1282            timestamp: std::time::SystemTime::now(),
1283            additional_info: std::collections::HashMap::new(),
1284        },
1285        solutions: vec![Solution {
1286            title: "Use available model".to_string(),
1287            description: "Select from available models".to_string(),
1288            priority: 1,
1289            estimated_time: "1 minute".to_string(),
1290            difficulty: SolutionDifficulty::Easy,
1291            steps: vec![
1292                format!("Available models: {available:?}"),
1293                format!("Suggested alternatives: {:?}", suggestions),
1294                "Update configuration to use available model".to_string(),
1295            ],
1296            code_example: Some(format!(
1297                r#"
1298// Use available model instead
1299let config = ASRConfig::default().with_model("{}");
1300"#,
1301                available.first().unwrap_or(&"base".to_string())
1302            )),
1303            success_indicators: vec![
1304                "Model loads successfully".to_string(),
1305                "System initializes properly".to_string(),
1306            ],
1307        }],
1308        documentation_links: vec!["https://docs.voirs.ai/models/available".to_string()],
1309        troubleshooting_steps: vec![
1310            "List available models".to_string(),
1311            "Check model installation".to_string(),
1312        ],
1313    }
1314}
1315
1316fn create_language_not_supported_enhancement(
1317    language: &str,
1318    supported: &[String],
1319    suggestions: &[String],
1320) -> ErrorEnhancement {
1321    ErrorEnhancement {
1322        original_message: format!(
1323            "Language '{language}' not supported. Supported languages: {supported:?}"
1324        ),
1325        category: ErrorCategory::FeatureSupport,
1326        severity: ErrorSeverity::Medium,
1327        context: ErrorContext {
1328            component: "Language Support".to_string(),
1329            operation: "Language validation".to_string(),
1330            input_summary: format!("Requested language: {language}"),
1331            system_state: "Language processing".to_string(),
1332            timestamp: std::time::SystemTime::now(),
1333            additional_info: std::collections::HashMap::new(),
1334        },
1335        solutions: vec![Solution {
1336            title: "Use supported language".to_string(),
1337            description: "Select from supported languages".to_string(),
1338            priority: 1,
1339            estimated_time: "1 minute".to_string(),
1340            difficulty: SolutionDifficulty::Easy,
1341            steps: vec![
1342                format!("Supported languages: {:?}", supported),
1343                format!("Suggested alternatives: {:?}", suggestions),
1344                "Update configuration to use supported language".to_string(),
1345            ],
1346            code_example: Some(format!(
1347                r#"
1348// Use supported language instead
1349let config = ASRConfig::default().with_language("{}");
1350"#,
1351                supported.first().unwrap_or(&"en".to_string())
1352            )),
1353            success_indicators: vec![
1354                "Language is recognized".to_string(),
1355                "Processing continues successfully".to_string(),
1356            ],
1357        }],
1358        documentation_links: vec!["https://docs.voirs.ai/languages/supported".to_string()],
1359        troubleshooting_steps: vec![
1360            "Check language code format".to_string(),
1361            "Verify language pack installation".to_string(),
1362        ],
1363    }
1364}
1365
1366fn create_device_not_available_enhancement(
1367    device: &str,
1368    reason: &str,
1369    fallback: &str,
1370) -> ErrorEnhancement {
1371    ErrorEnhancement {
1372        original_message: format!(
1373            "Device '{device}' not available: {reason}. Fallback: {fallback}"
1374        ),
1375        category: ErrorCategory::Resources,
1376        severity: ErrorSeverity::Medium,
1377        context: ErrorContext {
1378            component: "Device Manager".to_string(),
1379            operation: "Device initialization".to_string(),
1380            input_summary: format!("Requested device: {device}"),
1381            system_state: "Device discovery".to_string(),
1382            timestamp: std::time::SystemTime::now(),
1383            additional_info: std::collections::HashMap::new(),
1384        },
1385        solutions: vec![Solution {
1386            title: "Use fallback device".to_string(),
1387            description: format!("Use fallback device: {fallback}"),
1388            priority: 1,
1389            estimated_time: "Immediate".to_string(),
1390            difficulty: SolutionDifficulty::Easy,
1391            steps: vec![
1392                format!("Fallback device: {}", fallback),
1393                "System will automatically use fallback".to_string(),
1394                "Performance may be reduced".to_string(),
1395            ],
1396            code_example: Some(format!(
1397                r#"
1398// Configure fallback device
1399let config = ASRConfig::default().with_device("{fallback}");
1400"#
1401            )),
1402            success_indicators: vec![
1403                "Processing continues with fallback".to_string(),
1404                "No device errors occur".to_string(),
1405            ],
1406        }],
1407        documentation_links: vec!["https://docs.voirs.ai/devices/configuration".to_string()],
1408        troubleshooting_steps: vec![
1409            "Check device availability".to_string(),
1410            "Verify device drivers".to_string(),
1411            "Test with CPU fallback".to_string(),
1412        ],
1413    }
1414}
1415
1416fn create_insufficient_memory_enhancement(
1417    required_mb: u64,
1418    available_mb: u64,
1419    recommendation: &str,
1420) -> ErrorEnhancement {
1421    ErrorEnhancement {
1422        original_message: format!(
1423            "Insufficient memory: need {required_mb}MB, have {available_mb}MB. Recommendation: {recommendation}"
1424        ),
1425        category: ErrorCategory::Resources,
1426        severity: ErrorSeverity::High,
1427        context: ErrorContext {
1428            component: "Memory Manager".to_string(),
1429            operation: "Memory allocation".to_string(),
1430            input_summary: format!("Required: {required_mb}MB, Available: {available_mb}MB"),
1431            system_state: "Memory allocation".to_string(),
1432            timestamp: std::time::SystemTime::now(),
1433            additional_info: std::collections::HashMap::new(),
1434        },
1435        solutions: vec![Solution {
1436            title: "Free up memory".to_string(),
1437            description: "Close other applications and free up memory".to_string(),
1438            priority: 1,
1439            estimated_time: "2-5 minutes".to_string(),
1440            difficulty: SolutionDifficulty::Easy,
1441            steps: vec![
1442                "Close unnecessary applications".to_string(),
1443                "Clear system cache".to_string(),
1444                "Restart the application".to_string(),
1445                recommendation.to_string(),
1446            ],
1447            code_example: Some(
1448                r#"
1449// Use smaller model to reduce memory usage
1450let config = ASRConfig::default().with_model_size("small");
1451"#
1452                .to_string(),
1453            ),
1454            success_indicators: vec![
1455                "Sufficient memory available".to_string(),
1456                "Model loads successfully".to_string(),
1457            ],
1458        }],
1459        documentation_links: vec!["https://docs.voirs.ai/performance/memory".to_string()],
1460        troubleshooting_steps: vec![
1461            "Check system memory usage".to_string(),
1462            "Use memory profiler".to_string(),
1463            "Consider smaller model".to_string(),
1464        ],
1465    }
1466}
1467
1468fn create_recognition_timeout_enhancement(
1469    timeout_ms: u64,
1470    audio_duration_ms: u64,
1471    suggestion: &str,
1472) -> ErrorEnhancement {
1473    ErrorEnhancement {
1474        original_message: format!(
1475            "Recognition timed out after {timeout_ms}ms. Audio duration: {audio_duration_ms}ms. Suggestion: {suggestion}"
1476        ),
1477        category: ErrorCategory::Performance,
1478        severity: ErrorSeverity::Medium,
1479        context: ErrorContext {
1480            component: "Recognition Engine".to_string(),
1481            operation: "Speech recognition".to_string(),
1482            input_summary: format!("Timeout: {timeout_ms}ms, Audio: {audio_duration_ms}ms"),
1483            system_state: "Recognition processing".to_string(),
1484            timestamp: std::time::SystemTime::now(),
1485            additional_info: std::collections::HashMap::new(),
1486        },
1487        solutions: vec![Solution {
1488            title: "Increase timeout".to_string(),
1489            description: "Increase recognition timeout for longer audio".to_string(),
1490            priority: 1,
1491            estimated_time: "1 minute".to_string(),
1492            difficulty: SolutionDifficulty::Easy,
1493            steps: vec![
1494                "Increase timeout setting".to_string(),
1495                suggestion.to_string(),
1496                "Test with new timeout".to_string(),
1497            ],
1498            code_example: Some(
1499                r"
1500// Increase timeout
1501let config = ASRConfig::default().with_timeout_ms(30000); // 30 seconds
1502"
1503                .to_string(),
1504            ),
1505            success_indicators: vec![
1506                "Recognition completes within timeout".to_string(),
1507                "No timeout errors occur".to_string(),
1508            ],
1509        }],
1510        documentation_links: vec!["https://docs.voirs.ai/performance/timeouts".to_string()],
1511        troubleshooting_steps: vec![
1512            "Check audio length".to_string(),
1513            "Monitor processing time".to_string(),
1514            "Test with shorter audio".to_string(),
1515        ],
1516    }
1517}
1518
1519fn create_default_enhancement(
1520    message: &str,
1521    category: ErrorCategory,
1522    component: &str,
1523) -> ErrorEnhancement {
1524    ErrorEnhancement {
1525        original_message: message.to_string(),
1526        category,
1527        severity: ErrorSeverity::Medium,
1528        context: ErrorContext {
1529            component: component.to_string(),
1530            operation: "Processing".to_string(),
1531            input_summary: "User input".to_string(),
1532            system_state: "Runtime".to_string(),
1533            timestamp: std::time::SystemTime::now(),
1534            additional_info: std::collections::HashMap::new(),
1535        },
1536        solutions: vec![Solution {
1537            title: "Check system logs".to_string(),
1538            description: "Review system logs for more details".to_string(),
1539            priority: 1,
1540            estimated_time: "1-2 minutes".to_string(),
1541            difficulty: SolutionDifficulty::Easy,
1542            steps: vec![
1543                "Check application logs".to_string(),
1544                "Look for related error messages".to_string(),
1545                "Check system resource usage".to_string(),
1546            ],
1547            code_example: None,
1548            success_indicators: vec![
1549                "Logs provide additional context".to_string(),
1550                "Root cause identified".to_string(),
1551            ],
1552        }],
1553        documentation_links: vec!["https://docs.voirs.ai/troubleshooting".to_string()],
1554        troubleshooting_steps: vec![
1555            "Check recent system changes".to_string(),
1556            "Test with minimal configuration".to_string(),
1557            "Verify system requirements".to_string(),
1558        ],
1559    }
1560}
1561
1562fn format_enhanced_error(enhancement: &ErrorEnhancement) -> String {
1563    let mut output = String::new();
1564
1565    output.push_str(&format!(
1566        "🚨 {} Error: {}\n",
1567        match enhancement.severity {
1568            ErrorSeverity::Critical => "CRITICAL",
1569            ErrorSeverity::High => "HIGH",
1570            ErrorSeverity::Medium => "MEDIUM",
1571            ErrorSeverity::Low => "LOW",
1572            ErrorSeverity::Info => "INFO",
1573        },
1574        enhancement.original_message
1575    ));
1576
1577    output.push_str(&format!("šŸ“‚ Category: {:?}\n", enhancement.category));
1578    output.push_str(&format!(
1579        "āš™ļø  Component: {}\n",
1580        enhancement.context.component
1581    ));
1582    output.push_str(&format!(
1583        "šŸ”§ Operation: {}\n",
1584        enhancement.context.operation
1585    ));
1586
1587    if !enhancement.solutions.is_empty() {
1588        output.push_str("\nšŸ’” Suggested Solutions:\n");
1589        for (i, solution) in enhancement.solutions.iter().enumerate() {
1590            output.push_str(&format!(
1591                "  {}. {} ({} - {})\n",
1592                i + 1,
1593                solution.title,
1594                solution.estimated_time,
1595                match solution.difficulty {
1596                    SolutionDifficulty::Easy => "Easy",
1597                    SolutionDifficulty::Moderate => "Moderate",
1598                    SolutionDifficulty::Advanced => "Advanced",
1599                }
1600            ));
1601            output.push_str(&format!("     {}\n", solution.description));
1602
1603            if !solution.steps.is_empty() {
1604                output.push_str("     Steps:\n");
1605                for step in &solution.steps {
1606                    output.push_str(&format!("     - {step}\n"));
1607                }
1608            }
1609
1610            if let Some(code) = &solution.code_example {
1611                output.push_str("     Example code:\n");
1612                output.push_str(&format!("     ```rust{code}\n     ```\n"));
1613            }
1614        }
1615    }
1616
1617    if !enhancement.documentation_links.is_empty() {
1618        output.push_str("\nšŸ“š Documentation:\n");
1619        for link in &enhancement.documentation_links {
1620            output.push_str(&format!("  - {link}\n"));
1621        }
1622    }
1623
1624    if !enhancement.troubleshooting_steps.is_empty() {
1625        output.push_str("\nšŸ” Troubleshooting:\n");
1626        for step in &enhancement.troubleshooting_steps {
1627            output.push_str(&format!("  - {step}\n"));
1628        }
1629    }
1630
1631    output
1632}
1633
1634/// Convenient function to enhance any `RecognitionError`
1635#[must_use]
1636pub fn enhance_recognition_error(error: &RecognitionError) -> String {
1637    error.get_enhanced_message()
1638}
1639
1640/// Get quick fixes for an error
1641#[must_use]
1642pub fn get_quick_fixes(error: &RecognitionError) -> Vec<String> {
1643    error.get_quick_fixes()
1644}
1645
1646/// Check if an error is recoverable
1647#[must_use]
1648pub fn is_error_recoverable(error: &RecognitionError) -> bool {
1649    error.is_recoverable()
1650}
1651
1652impl fmt::Display for ErrorEnhancement {
1653    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1654        write!(f, "{}", format_enhanced_error(self))
1655    }
1656}
1657
1658impl fmt::Display for ErrorCategory {
1659    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1660        match self {
1661            ErrorCategory::Configuration => write!(f, "Configuration"),
1662            ErrorCategory::Resources => write!(f, "Resources"),
1663            ErrorCategory::AudioFormat => write!(f, "Audio Format"),
1664            ErrorCategory::ModelIssues => write!(f, "Model Issues"),
1665            ErrorCategory::Performance => write!(f, "Performance"),
1666            ErrorCategory::InputValidation => write!(f, "Input Validation"),
1667            ErrorCategory::Integration => write!(f, "Integration"),
1668            ErrorCategory::FeatureSupport => write!(f, "Feature Support"),
1669        }
1670    }
1671}
1672
1673impl fmt::Display for ErrorSeverity {
1674    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1675        match self {
1676            ErrorSeverity::Critical => write!(f, "Critical"),
1677            ErrorSeverity::High => write!(f, "High"),
1678            ErrorSeverity::Medium => write!(f, "Medium"),
1679            ErrorSeverity::Low => write!(f, "Low"),
1680            ErrorSeverity::Info => write!(f, "Info"),
1681        }
1682    }
1683}
1684
1685impl fmt::Display for SolutionDifficulty {
1686    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1687        match self {
1688            SolutionDifficulty::Easy => write!(f, "Easy"),
1689            SolutionDifficulty::Moderate => write!(f, "Moderate"),
1690            SolutionDifficulty::Advanced => write!(f, "Advanced"),
1691        }
1692    }
1693}
1694
1695/// Create enhancement for training errors
1696fn create_training_error_enhancement(message: &str) -> ErrorEnhancement {
1697    ErrorEnhancement {
1698        original_message: message.to_string(),
1699        category: ErrorCategory::ModelIssues,
1700        severity: ErrorSeverity::High,
1701        context: ErrorContext {
1702            operation: "Training".to_string(),
1703            component: "Training Manager".to_string(),
1704            input_summary: "training module".to_string(),
1705            system_state: "Training in progress".to_string(),
1706            additional_info: HashMap::new(),
1707            timestamp: std::time::SystemTime::now(),
1708        },
1709        solutions: vec![
1710            Solution {
1711                title: "Check training data quality".to_string(),
1712                description: "Verify training data format and quality".to_string(),
1713                priority: 1,
1714                estimated_time: "5-10 minutes".to_string(),
1715                difficulty: SolutionDifficulty::Easy,
1716                steps: vec![
1717                    "Verify audio file formats are supported".to_string(),
1718                    "Check transcription accuracy and formatting".to_string(),
1719                    "Ensure data splits are properly balanced".to_string(),
1720                ],
1721                code_example: Some(
1722                    r"
1723# Validate training data
1724cargo run --example validate_training_data --path ./data/train
1725"
1726                    .to_string(),
1727                ),
1728                success_indicators: vec![
1729                    "All audio files load successfully".to_string(),
1730                    "No missing transcriptions".to_string(),
1731                ],
1732            },
1733            Solution {
1734                title: "Verify model configuration".to_string(),
1735                description: "Check training hyperparameters and model settings".to_string(),
1736                priority: 2,
1737                estimated_time: "2-5 minutes".to_string(),
1738                difficulty: SolutionDifficulty::Easy,
1739                steps: vec![
1740                    "Review learning rate settings".to_string(),
1741                    "Check batch size configuration".to_string(),
1742                    "Verify model architecture parameters".to_string(),
1743                ],
1744                code_example: Some(
1745                    r#"
1746// Check training configuration
1747let config = TrainingConfig::default();
1748println!("Learning rate: {}", config.learning_rate);
1749println!("Batch size: {}", config.batch_size);
1750"#
1751                    .to_string(),
1752                ),
1753                success_indicators: vec![
1754                    "Configuration parameters are within valid ranges".to_string()
1755                ],
1756            },
1757        ],
1758        documentation_links: vec![
1759            "https://docs.voirs.ai/training/getting-started".to_string(),
1760            "https://docs.voirs.ai/training/configuration".to_string(),
1761        ],
1762        troubleshooting_steps: vec![
1763            "Check training logs for detailed error information".to_string(),
1764            "Verify system has sufficient disk space for training artifacts".to_string(),
1765            "Ensure GPU memory is sufficient for model size".to_string(),
1766        ],
1767    }
1768}
1769
1770fn create_synchronization_error_enhancement(message: &str) -> ErrorEnhancement {
1771    ErrorEnhancement {
1772        original_message: message.to_string(),
1773        category: ErrorCategory::Resources,
1774        severity: ErrorSeverity::Critical,
1775        context: ErrorContext {
1776            operation: "Synchronization".to_string(),
1777            component: "Internal Mutex".to_string(),
1778            input_summary: "concurrent access".to_string(),
1779            system_state: "Mutex poisoned or lock failed".to_string(),
1780            additional_info: HashMap::new(),
1781            timestamp: std::time::SystemTime::now(),
1782        },
1783        solutions: vec![
1784            Solution {
1785                title: "Restart the application".to_string(),
1786                description: "Critical internal synchronization error requires restart".to_string(),
1787                priority: 1,
1788                estimated_time: "1 minute".to_string(),
1789                difficulty: SolutionDifficulty::Easy,
1790                steps: vec![
1791                    "Save any unsaved work".to_string(),
1792                    "Gracefully shutdown the application".to_string(),
1793                    "Restart the application".to_string(),
1794                ],
1795                code_example: None,
1796                success_indicators: vec!["Application starts without errors".to_string()],
1797            },
1798            Solution {
1799                title: "Report the issue".to_string(),
1800                description: "This indicates an internal bug that should be reported".to_string(),
1801                priority: 2,
1802                estimated_time: "5 minutes".to_string(),
1803                difficulty: SolutionDifficulty::Easy,
1804                steps: vec![
1805                    "Collect error logs and stack trace".to_string(),
1806                    "Note the steps that led to this error".to_string(),
1807                    "Report the issue to the development team".to_string(),
1808                ],
1809                code_example: None,
1810                success_indicators: vec!["Issue is tracked and being investigated".to_string()],
1811            },
1812        ],
1813        documentation_links: vec![
1814            "https://docs.voirs.ai/troubleshooting/critical-errors".to_string()
1815        ],
1816        troubleshooting_steps: vec![
1817            "Check system logs for any hardware or OS-level issues".to_string(),
1818            "Verify system resources (CPU, memory) are not exhausted".to_string(),
1819            "Try running with reduced concurrency settings".to_string(),
1820        ],
1821    }
1822}
1823
1824#[cfg(test)]
1825mod tests {
1826    use super::*;
1827
1828    #[test]
1829    fn test_model_load_enhancement() {
1830        let error = RecognitionError::ModelLoadError {
1831            message: "Model file not found".to_string(),
1832            source: None,
1833        };
1834
1835        let enhancement = error.enhance_error();
1836        assert_eq!(enhancement.category, ErrorCategory::ModelIssues);
1837        assert_eq!(enhancement.severity, ErrorSeverity::Critical);
1838        assert!(!enhancement.solutions.is_empty());
1839        assert!(enhancement.solutions[0].title.contains("Check model file"));
1840    }
1841
1842    #[test]
1843    fn test_audio_processing_enhancement() {
1844        let error = RecognitionError::AudioProcessingError {
1845            message: "Unsupported audio format".to_string(),
1846            source: None,
1847        };
1848
1849        let enhancement = error.enhance_error();
1850        assert_eq!(enhancement.category, ErrorCategory::AudioFormat);
1851        assert_eq!(enhancement.severity, ErrorSeverity::High);
1852        assert!(!enhancement.solutions.is_empty());
1853    }
1854
1855    #[test]
1856    fn test_enhanced_message_formatting() {
1857        let error = RecognitionError::ConfigurationError {
1858            message: "Invalid configuration".to_string(),
1859        };
1860
1861        let message = error.get_enhanced_message();
1862        assert!(message.contains("🚨"));
1863        assert!(message.contains("šŸ’” Suggested Solutions:"));
1864        assert!(message.contains("šŸ“š Documentation:"));
1865    }
1866
1867    #[test]
1868    fn test_quick_fixes() {
1869        let error = RecognitionError::ConfigurationError {
1870            message: "Invalid configuration".to_string(),
1871        };
1872
1873        let fixes = error.get_quick_fixes();
1874        assert!(!fixes.is_empty());
1875        assert!(fixes.iter().any(|f| f.contains("configuration")));
1876    }
1877
1878    #[test]
1879    fn test_recoverable_errors() {
1880        let recoverable_error = RecognitionError::ConfigurationError {
1881            message: "Invalid config".to_string(),
1882        };
1883        assert!(recoverable_error.is_recoverable());
1884
1885        let non_recoverable_error = RecognitionError::MemoryError {
1886            message: "Out of memory".to_string(),
1887            source: None,
1888        };
1889        assert!(!non_recoverable_error.is_recoverable());
1890    }
1891}