mod config;
use clap::Parser as _;
use config::Config;
use onwards::{
AppState, build_metrics_layer_and_handle, build_metrics_router, build_router,
create_openai_sanitizer,
target::{Targets, WatchedFile},
};
use tokio::{net::TcpListener, task::JoinSet};
use tracing::{error, info, instrument};
#[tokio::main]
#[instrument]
pub async fn main() -> anyhow::Result<()> {
tracing_subscriber::fmt()
.with_env_filter(
tracing_subscriber::EnvFilter::try_from_default_env()
.unwrap_or_else(|_| tracing_subscriber::EnvFilter::new("info")),
)
.init();
let config = Config::parse().validate()?;
info!("Starting AI Gateway with config: {:?}", config);
let targets = Targets::from_config_file(&config.targets)
.await
.map_err(|e| anyhow::anyhow!("Failed to create targets from config: {}", e))?;
if config.watch {
targets.receive_updates(WatchedFile(config.targets)).await?;
}
let mut serves = JoinSet::new();
let prometheus_layer = if config.metrics {
let (prometheus_layer, prometheus_handle) =
build_metrics_layer_and_handle(config.metrics_prefix);
let metrics_router = build_metrics_router(prometheus_handle);
let bind_addr = format!("0.0.0.0:{}", config.metrics_port);
let listener = TcpListener::bind(&bind_addr).await?;
serves.spawn(axum::serve(listener, metrics_router).into_future());
info!("Metrics endpoint enabled on {}", bind_addr);
Some(prometheus_layer)
} else {
info!("Metrics endpoint disabled");
None
};
let app_state = AppState::new(targets).with_response_transform(create_openai_sanitizer());
let mut router = build_router(app_state);
if let Some(prometheus_layer) = prometheus_layer {
router = router.layer(prometheus_layer)
};
let bind_addr = format!("0.0.0.0:{}", config.port);
let listener = TcpListener::bind(&bind_addr).await?;
serves.spawn(axum::serve(listener, router).into_future());
info!("AI Gateway listening on {}", bind_addr);
if let Some(result) = serves.join_next().await {
result?.map_err(anyhow::Error::from)
} else {
error!("No server tasks were spawned");
Err(anyhow::anyhow!("No server tasks were spawned"))
}
}