#[cfg(feature = "openrouter")]
use reqwest;
use thiserror::Error;
#[derive(Error, Debug)]
pub enum OpenRouterError {
#[error("Feature not enabled: {0}")]
FeatureDisabled(String),
#[cfg(feature = "openrouter")]
#[error("API error: {0}")]
ApiError(String),
#[cfg(feature = "openrouter")]
#[error("HTTP error: {0}")]
HttpError(#[from] reqwest::Error),
#[cfg(feature = "openrouter")]
#[error("Invalid configuration: {0}")]
ConfigError(String),
#[cfg(feature = "openrouter")]
#[error("Rate limit exceeded")]
RateLimited,
#[cfg(feature = "openrouter")]
#[error("Content too long: {0} characters (max: {1})")]
ContentTooLong(usize, usize),
}
pub type Result<T> = std::result::Result<T, OpenRouterError>;
#[cfg(feature = "openrouter")]
#[derive(Debug)]
pub struct OpenRouterService {
client: reqwest::Client,
api_key: String,
model: String,
base_url: String,
}
#[cfg(feature = "openrouter")]
impl OpenRouterService {
pub fn new(api_key: &str, model: &str) -> Result<Self> {
if api_key.is_empty() {
return Err(OpenRouterError::ConfigError(
"API key cannot be empty".to_string(),
));
}
if model.is_empty() {
return Err(OpenRouterError::ConfigError(
"Model name cannot be empty".to_string(),
));
}
let client =
crate::http_client::create_api_client().unwrap_or_else(|_| reqwest::Client::new());
let base_url = Self::determine_base_url(model);
Ok(Self {
client,
api_key: api_key.to_string(),
model: model.to_string(),
base_url,
})
}
fn determine_base_url(model: &str) -> String {
if model.starts_with("anthropic/") || model.contains("claude") {
if let Ok(anthropic_base_url) = std::env::var("ANTHROPIC_BASE_URL") {
log::info!(
"🔗 Using z.ai proxy for Anthropic model: {} -> {}",
model,
anthropic_base_url
);
return anthropic_base_url;
}
}
std::env::var("OPENROUTER_BASE_URL")
.unwrap_or_else(|_| "https://openrouter.ai/api/v1".to_string())
}
fn get_api_key(&self, provided_key: &str) -> String {
if (self.model.starts_with("anthropic/") || self.model.contains("claude"))
&& self.base_url.contains("z.ai")
{
if let Ok(anthropic_token) = std::env::var("ANTHROPIC_AUTH_TOKEN") {
log::info!("🔑 Using ANTHROPIC_AUTH_TOKEN for z.ai proxy");
return anthropic_token;
}
}
provided_key.to_string()
}
pub async fn generate_summary(&self, content: &str, max_length: usize) -> Result<String> {
const MAX_CONTENT_LENGTH: usize = 4000; if content.len() > MAX_CONTENT_LENGTH {
return Err(OpenRouterError::ContentTooLong(
content.len(),
MAX_CONTENT_LENGTH,
));
}
if content.trim().is_empty() {
return Ok("No content available for summarization.".to_string());
}
let prompt = format!(
"Please provide a concise and informative summary of the following article content. The summary should be approximately {} characters long and capture the main ideas, key points, and essential information. Focus on being clear and helpful to someone browsing search results.\n\nArticle content:\n{}",
max_length, content
);
let request_body = serde_json::json!({
"model": self.model,
"messages": [
{
"role": "user",
"content": prompt
}
],
"max_tokens": self.calculate_max_tokens(max_length),
"temperature": 0.3, "top_p": 0.9,
"stream": false
});
log::debug!("Sending OpenRouter API request for model: {}", self.model);
let api_key = self.get_api_key(&self.api_key);
let response = self
.client
.post(format!("{}/chat/completions", self.base_url))
.header("Authorization", format!("Bearer {}", api_key))
.header("Content-Type", "application/json")
.header("HTTP-Referer", "https://terraphim.ai") .header("X-Title", "Terraphim AI") .json(&request_body)
.send()
.await?;
if response.status() == 429 {
log::warn!("OpenRouter API rate limit exceeded");
return Err(OpenRouterError::RateLimited);
}
if !response.status().is_success() {
let error_text = response
.text()
.await
.unwrap_or_else(|_| "Unknown error".to_string());
log::error!("OpenRouter API error: {}", error_text);
return Err(OpenRouterError::ApiError(error_text));
}
let response_json: serde_json::Value = response.json().await?;
let summary = response_json
.get("choices")
.and_then(|choices| choices.get(0))
.and_then(|choice| choice.get("message"))
.and_then(|message| message.get("content"))
.and_then(|content| content.as_str())
.unwrap_or("Summary generation failed")
.trim()
.to_string();
log::info!(
"Generated summary: {} characters for model: {}",
summary.len(),
self.model
);
if summary.len() > max_length + 50 {
Ok(format!("{}...", &summary[..max_length.saturating_sub(3)]))
} else {
Ok(summary)
}
}
fn calculate_max_tokens(&self, max_chars: usize) -> u32 {
let tokens = (max_chars / 3).clamp(50, 500); tokens as u32
}
pub fn is_configured(&self) -> bool {
!self.api_key.is_empty() && !self.model.is_empty()
}
pub fn get_model(&self) -> &str {
&self.model
}
pub fn get_recommended_models() -> Vec<(&'static str, &'static str, &'static str)> {
vec![
(
"openai/gpt-3.5-turbo",
"Fast and affordable",
"General purpose",
),
("openai/gpt-4", "High quality summaries", "Premium quality"),
(
"anthropic/claude-3-sonnet",
"Balanced performance",
"Good middle ground",
),
(
"anthropic/claude-3-haiku",
"Fast processing",
"High throughput",
),
(
"mistralai/mixtral-8x7b-instruct",
"Open source option",
"Cost effective",
),
]
}
pub async fn chat_completion(
&self,
messages: Vec<serde_json::Value>,
max_tokens: Option<u32>,
temperature: Option<f32>,
) -> Result<String> {
let request_body = serde_json::json!({
"model": self.model,
"messages": messages,
"max_tokens": max_tokens.unwrap_or(512),
"temperature": temperature.unwrap_or(0.2),
"stream": false
});
let api_key = self.get_api_key(&self.api_key);
let response = self
.client
.post(format!("{}/chat/completions", self.base_url))
.header("Authorization", format!("Bearer {}", api_key))
.header("Content-Type", "application/json")
.header("HTTP-Referer", "https://terraphim.ai")
.header("X-Title", "Terraphim AI")
.json(&request_body)
.send()
.await?;
if response.status() == 429 {
return Err(OpenRouterError::RateLimited);
}
if !response.status().is_success() {
let error_text = response
.text()
.await
.unwrap_or_else(|_| "Unknown error".to_string());
return Err(OpenRouterError::ApiError(error_text));
}
let response_json: serde_json::Value = response.json().await?;
let content = response_json
.get("choices")
.and_then(|c| c.get(0))
.and_then(|c| c.get("message"))
.and_then(|m| m.get("content"))
.and_then(|t| t.as_str())
.unwrap_or("")
.to_string();
Ok(content)
}
pub async fn list_models(&self) -> Result<Vec<String>> {
let api_key = self.get_api_key(&self.api_key);
let response = self
.client
.get(format!("{}/models", self.base_url))
.header("Authorization", format!("Bearer {}", api_key))
.header("HTTP-Referer", "https://terraphim.ai")
.header("X-Title", "Terraphim AI")
.send()
.await?;
if response.status() == 429 {
return Err(OpenRouterError::RateLimited);
}
if !response.status().is_success() {
let error_text = response
.text()
.await
.unwrap_or_else(|_| "Unknown error".to_string());
return Err(OpenRouterError::ApiError(error_text));
}
let json: serde_json::Value = response.json().await?;
let models = if let Some(arr) = json.get("data").and_then(|v| v.as_array()) {
arr.iter()
.filter_map(|m| {
m.get("id")
.and_then(|id| id.as_str())
.map(|s| s.to_string())
})
.collect::<Vec<_>>()
} else if let Some(arr) = json.get("models").and_then(|v| v.as_array()) {
arr.iter()
.filter_map(|v| v.as_str().map(|s| s.to_string()))
.collect()
} else {
Vec::new()
};
Ok(models)
}
}
#[cfg(not(feature = "openrouter"))]
pub struct OpenRouterService;
#[cfg(not(feature = "openrouter"))]
impl OpenRouterService {
pub fn new(_api_key: &str, _model: &str) -> Result<Self> {
Err(OpenRouterError::FeatureDisabled("openrouter".to_string()))
}
pub async fn generate_summary(&self, _content: &str, _max_length: usize) -> Result<String> {
Err(OpenRouterError::FeatureDisabled("openrouter".to_string()))
}
pub fn is_configured(&self) -> bool {
false
}
pub fn get_model(&self) -> &str {
""
}
pub fn get_recommended_models() -> Vec<(&'static str, &'static str, &'static str)> {
vec![]
}
}
#[cfg(test)]
mod tests {
use super::*;
#[tokio::test]
async fn test_service_creation() {
#[cfg(feature = "openrouter")]
{
let service = OpenRouterService::new("sk-or-v1-test-key", "openai/gpt-3.5-turbo");
assert!(service.is_ok());
let service = service.unwrap();
assert!(service.is_configured());
assert_eq!(service.get_model(), "openai/gpt-3.5-turbo");
}
#[cfg(not(feature = "openrouter"))]
{
let result = OpenRouterService::new("test", "test");
assert!(matches!(
result.unwrap_err(),
OpenRouterError::FeatureDisabled(_)
));
}
}
#[tokio::test]
async fn test_configuration_validation() {
#[cfg(feature = "openrouter")]
{
let result = OpenRouterService::new("", "openai/gpt-3.5-turbo");
assert!(matches!(
result.unwrap_err(),
OpenRouterError::ConfigError(_)
));
let result = OpenRouterService::new("sk-or-v1-test", "");
assert!(matches!(
result.unwrap_err(),
OpenRouterError::ConfigError(_)
));
}
}
#[tokio::test]
async fn test_content_validation() {
#[cfg(feature = "openrouter")]
{
let service =
OpenRouterService::new("sk-or-v1-test-key", "openai/gpt-3.5-turbo").unwrap();
let result = service.generate_summary("", 100).await;
assert!(result.is_ok());
assert_eq!(result.unwrap(), "No content available for summarization.");
let long_content = "a".repeat(5000);
let result = service.generate_summary(&long_content, 100).await;
assert!(matches!(
result.unwrap_err(),
OpenRouterError::ContentTooLong(_, _)
));
}
}
#[test]
fn test_recommended_models() {
let models = OpenRouterService::get_recommended_models();
#[cfg(feature = "openrouter")]
{
assert!(!models.is_empty());
assert!(models.len() >= 5);
let model_names: Vec<&str> = models.iter().map(|(name, _, _)| *name).collect();
assert!(model_names.contains(&"openai/gpt-3.5-turbo"));
assert!(model_names.contains(&"anthropic/claude-3-sonnet"));
}
#[cfg(not(feature = "openrouter"))]
assert!(models.is_empty());
}
#[test]
fn test_max_tokens_calculation() {
#[cfg(feature = "openrouter")]
{
let service = OpenRouterService::new("sk-or-v1-test", "test").unwrap();
assert_eq!(service.calculate_max_tokens(150), 50); assert_eq!(service.calculate_max_tokens(300), 100);
assert_eq!(service.calculate_max_tokens(1500), 500); assert_eq!(service.calculate_max_tokens(3000), 500); }
}
#[cfg(feature = "openrouter")]
#[tokio::test]
async fn test_error_handling() {
let service = OpenRouterService::new("invalid-key", "openai/gpt-3.5-turbo").unwrap();
let result = service
.generate_summary("This is test content for summarization.", 100)
.await;
assert!(result.is_err());
match result.unwrap_err() {
OpenRouterError::ApiError(_)
| OpenRouterError::HttpError(_)
| OpenRouterError::RateLimited => {
}
other => panic!("Unexpected error type: {:?}", other),
}
}
#[cfg(feature = "openrouter")]
#[test]
fn test_prompt_generation() {
let service = OpenRouterService::new("sk-or-v1-test", "openai/gpt-3.5-turbo").unwrap();
let _short_content = "Short content.";
let _medium_content = "This is a medium-length piece of content that should be suitable for summarization. It contains multiple sentences and provides enough context for a meaningful summary to be generated.";
let _long_content = "a".repeat(4000);
assert_eq!(service.calculate_max_tokens(200), 66);
assert_eq!(service.calculate_max_tokens(500), 166);
}
}