mod openai;
#[cfg(feature = "local-whisper")]
mod local;
#[cfg(feature = "local-whisper")]
mod model;
use async_trait::async_trait;
pub use bytes::Bytes;
#[cfg(feature = "local-whisper")]
pub use local::{LocalWhisperClient, LocalWhisperConfig};
#[cfg(feature = "local-whisper")]
pub use model::{WhisperModel, download_model, ensure_model, model_exists, model_path};
#[cfg(all(feature = "local-whisper", target_os = "macos"))]
pub use model::{coreml_encoder_exists, coreml_encoder_path, ensure_coreml_encoder};
pub use openai::{OpenAIClient, OpenAIConfig};
use thiserror::Error;
#[derive(Debug, Error)]
pub enum TranscribeError {
#[error("API request failed: {0}")]
ApiError(String),
#[error("No API key configured")]
NoApiKey,
#[error("Invalid audio format: {0}")]
InvalidAudioFormat(String),
#[error("Network error: {0}")]
NetworkError(#[from] reqwest::Error),
#[error("Transcription failed: {0}")]
TranscriptionFailed(String),
}
pub type Result<T> = std::result::Result<T, TranscribeError>;
#[async_trait]
pub trait Transcriber: Send + Sync {
async fn transcribe(&self, audio: Bytes, language: Option<&str>) -> Result<String>;
fn name(&self) -> &str;
}