use anyhow::Result;
use futures_util::{SinkExt, StreamExt};
use tokio::time::{Duration, timeout};
use tokio_tungstenite::{
connect_async,
tungstenite::{
Message,
client::IntoClientRequest,
http::header::{AUTHORIZATION, HeaderValue},
},
};
use crate::http::{get_http_client, warmup_http_client};
const WARMUP_TIMEOUT_SECS: u64 = 5;
const OPENAI_REALTIME_WS_URL: &str =
"wss://api.openai.com/v1/realtime?model=gpt-4o-realtime-preview";
const DEEPGRAM_REALTIME_WS_URL: &str =
"wss://api.deepgram.com/v1/listen?model=nova-2&encoding=linear16&sample_rate=16000&channels=1";
const OPENAI_API_URL: &str = "https://api.openai.com";
const DEEPGRAM_API_URL: &str = "https://api.deepgram.com";
const GROQ_API_URL: &str = "https://api.groq.com";
const MISTRAL_API_URL: &str = "https://api.mistral.ai";
#[derive(Debug, Default, Clone)]
pub struct WarmupConfig {
pub provider: Option<String>,
pub provider_api_key: Option<String>,
pub post_processor: Option<String>,
pub post_processor_api_key: Option<String>,
}
pub async fn warmup_configured(config: &WarmupConfig) -> Result<()> {
if let Err(e) = warmup_http_client()
&& crate::verbose::is_verbose()
{
eprintln!("[warmup] HTTP client warmup failed: {}", e);
}
let mut warmup_tasks = Vec::new();
if let (Some(provider), Some(api_key)) = (&config.provider, &config.provider_api_key) {
let provider = provider.clone();
let api_key = api_key.clone();
warmup_tasks.push(tokio::spawn(async move {
warmup_provider(&provider, &api_key).await
}));
}
if let (Some(processor), Some(api_key)) =
(&config.post_processor, &config.post_processor_api_key)
{
if processor != "none" {
let processor = processor.clone();
let api_key = api_key.clone();
warmup_tasks.push(tokio::spawn(async move {
warmup_post_processor(&processor, &api_key).await
}));
}
}
let overall_timeout = Duration::from_secs(WARMUP_TIMEOUT_SECS + 2);
let _ = timeout(overall_timeout, async {
for task in warmup_tasks {
let _ = task.await;
}
})
.await;
if crate::verbose::is_verbose() {
eprintln!("[warmup] Connection warmup completed");
}
Ok(())
}
async fn warmup_provider(provider: &str, api_key: &str) -> Result<()> {
match provider {
"openai-realtime" => {
warmup_websocket_openai(api_key).await?;
}
"deepgram-realtime" => {
warmup_websocket_deepgram(api_key).await?;
}
"openai" => {
warmup_http_endpoint(OPENAI_API_URL, Some(api_key), "Bearer").await?;
}
"deepgram" => {
warmup_http_endpoint(DEEPGRAM_API_URL, Some(api_key), "Token").await?;
}
"groq" => {
warmup_http_endpoint(GROQ_API_URL, Some(api_key), "Bearer").await?;
}
_ => {
if crate::verbose::is_verbose() {
eprintln!("[warmup] Skipping unknown/local provider: {}", provider);
}
}
}
Ok(())
}
async fn warmup_post_processor(processor: &str, api_key: &str) -> Result<()> {
match processor {
"openai" => {
warmup_http_endpoint(OPENAI_API_URL, Some(api_key), "Bearer").await?;
}
"mistral" => {
warmup_http_endpoint(MISTRAL_API_URL, Some(api_key), "Bearer").await?;
}
"ollama" => {
}
_ => {
if crate::verbose::is_verbose() {
eprintln!("[warmup] Skipping unknown post-processor: {}", processor);
}
}
}
Ok(())
}
async fn warmup_http_endpoint(url: &str, api_key: Option<&str>, auth_prefix: &str) -> Result<()> {
let client = get_http_client()?;
let mut request = client.head(url);
if let Some(key) = api_key {
request = request.header("Authorization", format!("{} {}", auth_prefix, key));
}
let result = timeout(Duration::from_secs(WARMUP_TIMEOUT_SECS), request.send()).await;
match result {
Ok(Ok(_response)) => {
if crate::verbose::is_verbose() {
eprintln!("[warmup] HTTP warmup succeeded: {}", url);
}
}
Ok(Err(e)) => {
if crate::verbose::is_verbose() {
eprintln!(
"[warmup] HTTP warmup error (still warms TLS): {} - {}",
url, e
);
}
}
Err(_) => {
if crate::verbose::is_verbose() {
eprintln!("[warmup] HTTP warmup timeout: {}", url);
}
}
}
Ok(())
}
async fn warmup_websocket_openai(api_key: &str) -> Result<()> {
let result = timeout(Duration::from_secs(WARMUP_TIMEOUT_SECS), async {
let mut request = OPENAI_REALTIME_WS_URL.into_client_request()?;
request.headers_mut().insert(
AUTHORIZATION,
HeaderValue::from_str(&format!("Bearer {}", api_key))?,
);
request
.headers_mut()
.insert("OpenAI-Beta", HeaderValue::from_static("realtime=v1"));
let (ws_stream, _) = connect_async(request).await?;
let (mut write, _read) = ws_stream.split();
let _ = write.send(Message::Close(None)).await;
if crate::verbose::is_verbose() {
eprintln!("[warmup] OpenAI WebSocket warmup succeeded");
}
Ok::<_, anyhow::Error>(())
})
.await;
match result {
Ok(Ok(())) => {}
Ok(Err(e)) => {
if crate::verbose::is_verbose() {
eprintln!(
"[warmup] OpenAI WebSocket warmup failed (still warms DNS/TLS): {}",
e
);
}
}
Err(_) => {
if crate::verbose::is_verbose() {
eprintln!("[warmup] OpenAI WebSocket warmup timeout");
}
}
}
Ok(())
}
async fn warmup_websocket_deepgram(api_key: &str) -> Result<()> {
let result = timeout(Duration::from_secs(WARMUP_TIMEOUT_SECS), async {
let mut request = DEEPGRAM_REALTIME_WS_URL.into_client_request()?;
request.headers_mut().insert(
AUTHORIZATION,
HeaderValue::from_str(&format!("Token {}", api_key))?,
);
let (ws_stream, _) = connect_async(request).await?;
let (mut write, _read) = ws_stream.split();
let _ = write.send(Message::Close(None)).await;
if crate::verbose::is_verbose() {
eprintln!("[warmup] Deepgram WebSocket warmup succeeded");
}
Ok::<_, anyhow::Error>(())
})
.await;
match result {
Ok(Ok(())) => {}
Ok(Err(e)) => {
if crate::verbose::is_verbose() {
eprintln!(
"[warmup] Deepgram WebSocket warmup failed (still warms DNS/TLS): {}",
e
);
}
}
Err(_) => {
if crate::verbose::is_verbose() {
eprintln!("[warmup] Deepgram WebSocket warmup timeout");
}
}
}
Ok(())
}