use llmg_core::{
provider::{ApiKeyCredentials, Credentials, LlmError, Provider},
types::{ChatCompletionRequest, ChatCompletionResponse, EmbeddingRequest, EmbeddingResponse},
};
use std::sync::Arc;
#[derive(Debug, Clone)]
pub struct OpenRouterClient {
http_client: reqwest::Client,
base_url: String,
credentials: Arc<dyn Credentials>,
app_name: Option<String>,
http_referer: Option<String>,
}
#[derive(Debug, Clone, Default)]
pub struct OpenRouterExtras {
pub provider: Option<serde_json::Value>,
pub transforms: Option<Vec<String>>,
pub route: Option<String>,
pub models: Option<Vec<String>>,
}
#[derive(Debug, serde::Serialize)]
struct OpenRouterChatRequest {
#[serde(flatten)]
base: ChatCompletionRequest,
#[serde(skip_serializing_if = "Option::is_none")]
provider: Option<serde_json::Value>,
#[serde(skip_serializing_if = "Option::is_none")]
transforms: Option<Vec<String>>,
#[serde(skip_serializing_if = "Option::is_none")]
route: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
models: Option<Vec<String>>,
}
impl OpenRouterClient {
pub fn from_env() -> Result<Self, LlmError> {
let api_key = std::env::var("OPENROUTER_API_KEY").map_err(|_| LlmError::AuthError)?;
let base_url = std::env::var("OPENROUTER_API_BASE")
.unwrap_or_else(|_| "https://openrouter.ai/api/v1".to_string());
let app_name = std::env::var("OPENROUTER_APP_NAME").ok();
let http_referer = std::env::var("OPENROUTER_HTTP_REFERER").ok();
Ok(Self::with_config(api_key, base_url, app_name, http_referer))
}
pub fn new(api_key: impl Into<String>) -> Self {
Self::with_config(
api_key,
"https://openrouter.ai/api/v1".to_string(),
None,
None,
)
}
pub fn with_config(
api_key: impl Into<String>,
base_url: impl Into<String>,
app_name: Option<String>,
http_referer: Option<String>,
) -> Self {
let api_key = api_key.into();
Self {
http_client: reqwest::Client::new(),
base_url: base_url.into(),
credentials: Arc::new(ApiKeyCredentials::bearer(api_key)),
app_name,
http_referer,
}
}
pub fn with_base_url(mut self, url: impl Into<String>) -> Self {
self.base_url = url.into();
self
}
pub fn with_app_name(mut self, name: impl Into<String>) -> Self {
self.app_name = Some(name.into());
self
}
pub fn with_http_referer(mut self, referer: impl Into<String>) -> Self {
self.http_referer = Some(referer.into());
self
}
fn build_request(
&self,
request: ChatCompletionRequest,
extras: Option<OpenRouterExtras>,
) -> Result<reqwest::Request, LlmError> {
let url = format!("{}/chat/completions", self.base_url);
let openrouter_req = if let Some(extras) = extras {
OpenRouterChatRequest {
base: request,
provider: extras.provider,
transforms: extras.transforms,
route: extras.route,
models: extras.models,
}
} else {
OpenRouterChatRequest {
base: request,
provider: None,
transforms: None,
route: None,
models: None,
}
};
let mut req_builder = self.http_client.post(&url).json(&openrouter_req);
if let Some(ref app_name) = self.app_name {
req_builder = req_builder.header("X-Title", app_name);
}
if let Some(ref referer) = self.http_referer {
req_builder = req_builder.header("HTTP-Referer", referer);
}
let mut req = req_builder
.build()
.map_err(|e| LlmError::HttpError(e.to_string()))?;
self.credentials.apply(&mut req)?;
Ok(req)
}
async fn make_request(
&self,
request: ChatCompletionRequest,
) -> Result<ChatCompletionResponse, LlmError> {
let req = self.build_request(request, None)?;
let response = self
.http_client
.execute(req)
.await
.map_err(|e| LlmError::HttpError(e.to_string()))?;
if !response.status().is_success() {
let status = response.status().as_u16();
let text = response.text().await.unwrap_or_default();
return Err(LlmError::ApiError {
status,
message: text,
});
}
response
.json::<ChatCompletionResponse>()
.await
.map_err(|e| LlmError::HttpError(e.to_string()))
}
pub async fn chat_completion_with_extras(
&self,
request: ChatCompletionRequest,
extras: OpenRouterExtras,
) -> Result<ChatCompletionResponse, LlmError> {
let req = self.build_request(request, Some(extras))?;
let response = self
.http_client
.execute(req)
.await
.map_err(|e| LlmError::HttpError(e.to_string()))?;
if !response.status().is_success() {
let status = response.status().as_u16();
let text = response.text().await.unwrap_or_default();
return Err(LlmError::ApiError {
status,
message: text,
});
}
response
.json::<ChatCompletionResponse>()
.await
.map_err(|e| LlmError::HttpError(e.to_string()))
}
}
#[async_trait::async_trait]
impl Provider for OpenRouterClient {
async fn chat_completion(
&self,
request: ChatCompletionRequest,
) -> Result<ChatCompletionResponse, LlmError> {
self.make_request(request).await
}
async fn embeddings(&self, request: EmbeddingRequest) -> Result<EmbeddingResponse, LlmError> {
let url = format!("{}/embeddings", self.base_url);
let mut req = self
.http_client
.post(&url)
.json(&request)
.build()
.map_err(|e| LlmError::HttpError(e.to_string()))?;
self.credentials.apply(&mut req)?;
let response = self
.http_client
.execute(req)
.await
.map_err(|e| LlmError::HttpError(e.to_string()))?;
if !response.status().is_success() {
let status = response.status().as_u16();
let text = response.text().await.unwrap_or_default();
return Err(LlmError::ApiError {
status,
message: text,
});
}
response
.json::<EmbeddingResponse>()
.await
.map_err(|e| LlmError::HttpError(e.to_string()))
}
fn provider_name(&self) -> &'static str {
"openrouter"
}
}
#[cfg(test)]
mod tests {
use super::*;
use llmg_core::types::Message;
#[test]
fn test_openrouter_client_creation() {
let client = OpenRouterClient::new("test-key");
assert_eq!(client.provider_name(), "openrouter");
}
#[test]
fn test_from_env_missing_key() {
let original = std::env::var("OPENROUTER_API_KEY").ok();
std::env::remove_var("OPENROUTER_API_KEY");
let result = OpenRouterClient::from_env();
assert!(result.is_err());
if let Some(key) = original {
std::env::set_var("OPENROUTER_API_KEY", key);
}
}
#[test]
fn test_custom_config() {
let client = OpenRouterClient::with_config(
"test-key",
"https://custom.openrouter.ai/api/v1",
Some("MyApp".to_string()),
Some("https://myapp.com".to_string()),
);
assert_eq!(client.base_url, "https://custom.openrouter.ai/api/v1");
assert_eq!(client.app_name, Some("MyApp".to_string()));
assert_eq!(client.http_referer, Some("https://myapp.com".to_string()));
}
#[test]
fn test_extras_builder() {
let extras = OpenRouterExtras {
provider: Some(serde_json::json!({"order": ["Anthropic", "OpenAI"]})),
transforms: Some(vec!["middle-out".to_string()]),
route: Some("fallback".to_string()),
models: Some(vec!["anthropic/claude-3-opus".to_string()]),
};
let request = ChatCompletionRequest {
model: "anthropic/claude-3-opus".to_string(),
messages: vec![Message::User {
content: "Hello".to_string(),
name: None,
}],
temperature: None,
max_tokens: None,
stream: None,
top_p: None,
frequency_penalty: None,
presence_penalty: None,
stop: None,
user: None,
tools: None,
tool_choice: None,
};
let client = OpenRouterClient::new("test-key").with_app_name("test-app");
let built_req = client.build_request(request, Some(extras)).unwrap();
assert!(built_req.headers().contains_key("x-title"));
let body = String::from_utf8_lossy(built_req.body().unwrap().as_bytes().unwrap());
assert!(body.contains("provider"));
}
}
#[cfg(all(test, feature = "integration"))]
mod rig_integration_tests {
use crate::OpenRouterClient;
use llmg_core::provider::Provider;
use llmg_core::rig::RigAdapter;
use llmg_core::types::{ChatCompletionRequest, Message};
#[tokio::test]
#[ignore]
async fn test_rig_adapter_aurora_alpha_single_turn() {
let api_key = std::env::var("OPENROUTER_API_KEY")
.expect("OPENROUTER_API_KEY must be set for integration tests");
let client = OpenRouterClient::new(api_key);
let adapter = RigAdapter::new(client, "openrouter/aurora-alpha");
let completion = adapter
.completion()
.system("You are a helpful assistant. Answer in brief.")
.user("What is 2+2?")
.send()
.await;
if let Err(e) = &completion {
eprintln!("Error: {:?}", e);
}
assert!(completion.is_ok());
let result = completion.unwrap();
assert!(!result.content.is_empty(), "Response should not be empty");
println!("Aurora Alpha response: {}", result.content);
}
#[tokio::test]
#[ignore]
async fn test_rig_adapter_aurora_alpha_multi_turn() {
let api_key = std::env::var("OPENROUTER_API_KEY")
.expect("OPENROUTER_API_KEY must be set for integration tests");
let client = OpenRouterClient::new(api_key);
let adapter1 = RigAdapter::new(client.clone(), "openrouter/aurora-alpha");
let completion1 = adapter1
.completion()
.system("You are a helpful math tutor.")
.user("What is 5+3?")
.send()
.await
.expect("First request should succeed");
assert!(!completion1.content.is_empty());
println!("Turn 1 - Question: 5+3");
println!("Turn 1 - Answer: {}", completion1.content);
let adapter2 = RigAdapter::new(client.clone(), "openrouter/aurora-alpha");
let completion2 = adapter2
.completion()
.system("You are a helpful math tutor.")
.user("What is 5+3?")
.user(&format!("{}", completion1.content))
.user("Now multiply that by 2")
.send()
.await
.expect("Second request should succeed");
assert!(!completion2.content.is_empty());
println!("Turn 2 - Question: Now multiply that by 2");
println!("Turn 2 - Answer: {}", completion2.content);
}
#[tokio::test]
#[ignore]
async fn test_rig_adapter_gpt_oss_120b_single_turn() {
let api_key = std::env::var("OPENROUTER_API_KEY")
.expect("OPENROUTER_API_KEY must be set for integration tests");
let client = OpenRouterClient::new(api_key);
let adapter = RigAdapter::new(client, "openai/gpt-oss-120b:free");
let completion = adapter
.completion()
.system("You are a helpful assistant. Answer in brief.")
.user("What is the capital of France?")
.send()
.await;
assert!(completion.is_ok());
let result = completion.unwrap();
assert!(!result.content.is_empty(), "Response should not be empty");
println!("GPT-OSS-120B response: {}", result.content);
}
#[tokio::test]
#[ignore]
async fn test_rig_adapter_gpt_oss_120b_multi_turn() {
let api_key = std::env::var("OPENROUTER_API_KEY")
.expect("OPENROUTER_API_KEY must be set for integration tests");
let client = OpenRouterClient::new(api_key);
let adapter1 = RigAdapter::new(client.clone(), "openai/gpt-oss-120b:free");
let completion1 = adapter1
.completion()
.system("You are a helpful geography teacher.")
.user("What is the capital of Japan?")
.send()
.await
.expect("First request should succeed");
assert!(!completion1.content.is_empty());
println!("Turn 1 - Question: What is the capital of Japan?");
println!("Turn 1 - Answer: {}", completion1.content);
let adapter2 = RigAdapter::new(client, "openai/gpt-oss-120b:free");
let completion2 = adapter2
.completion()
.system("You are a helpful geography teacher.")
.user("What is the capital of Japan?")
.user(&format!("{}", completion1.content))
.user("What is its population?")
.send()
.await
.expect("Second request should succeed");
assert!(!completion2.content.is_empty());
println!("Turn 2 - Question: What is its population?");
println!("Turn 2 - Answer: {}", completion2.content);
}
#[tokio::test]
#[ignore]
async fn test_rig_adapter_model_comparison() {
let api_key = std::env::var("OPENROUTER_API_KEY")
.expect("OPENROUTER_API_KEY must be set for integration tests");
let client1 = OpenRouterClient::new(api_key.clone());
let adapter1 = RigAdapter::new(client1, "openrouter/aurora-alpha");
let result1 = adapter1
.completion()
.system("Answer with just the word 'Hello'.")
.user("Say hi")
.send()
.await
.expect("Aurora Alpha should work");
let client2 = OpenRouterClient::new(api_key);
let adapter2 = RigAdapter::new(client2, "openai/gpt-oss-120b:free");
let result2 = adapter2
.completion()
.system("Answer with just the word 'Hello'.")
.user("Say hi")
.send()
.await
.expect("GPT-OSS-120B should work");
println!("Aurora Alpha: {}", result1.content);
println!("GPT-OSS-120B: {}", result2.content);
assert!(!result1.content.is_empty());
assert!(!result2.content.is_empty());
}
}