Skip to main content

converge_provider/
openrouter.rs

1// Copyright 2024-2025 Aprio One AB, Sweden
2// Author: Kenneth Pernyer, kenneth@aprio.one
3// SPDX-License-Identifier: MIT
4// See LICENSE file in the project root for full license information.
5
6//! `OpenRouter` API provider (multi-provider aggregator).
7
8use crate::common::{
9    ChatCompletionRequest, ChatCompletionResponse, HttpProviderConfig, OpenAiCompatibleProvider,
10    chat_response_to_llm_response,
11};
12use converge_traits::llm::{LlmError, LlmProvider, LlmRequest, LlmResponse};
13use serde::Deserialize;
14
15/// `OpenRouter` API provider.
16///
17/// `OpenRouter` provides access to multiple LLM providers through a single API.
18/// Model names use the format: `provider/model-name` (e.g., `anthropic/claude-3-opus`)
19///
20/// # Example
21///
22/// ```ignore
23/// use converge_provider::OpenRouterProvider;
24/// use converge_traits::llm::{LlmProvider, LlmRequest};
25///
26/// let provider = OpenRouterProvider::new(
27///     "your-api-key",
28///     "anthropic/claude-3-opus"
29/// );
30///
31/// let request = LlmRequest::new("What is 2+2?");
32/// let response = provider.complete(&request)?;
33/// println!("{}", response.content);
34/// ```
35pub struct OpenRouterProvider {
36    config: HttpProviderConfig,
37}
38
39impl OpenRouterProvider {
40    /// Creates a new `OpenRouter` provider.
41    #[must_use]
42    pub fn new(api_key: impl Into<String>, model: impl Into<String>) -> Self {
43        Self {
44            config: HttpProviderConfig::new(api_key, model, "https://openrouter.ai/api/v1"),
45        }
46    }
47
48    /// Creates a provider using the `OPENROUTER_API_KEY` environment variable.
49    ///
50    /// # Errors
51    ///
52    /// Returns error if the environment variable is not set.
53    pub fn from_env(model: impl Into<String>) -> Result<Self, LlmError> {
54        let api_key = std::env::var("OPENROUTER_API_KEY")
55            .map_err(|_| LlmError::auth("OPENROUTER_API_KEY environment variable not set"))?;
56        Ok(Self::new(api_key, model))
57    }
58
59    /// Uses a custom base URL (for testing or proxies).
60    #[must_use]
61    pub fn with_base_url(mut self, url: impl Into<String>) -> Self {
62        self.config.base_url = url.into();
63        self
64    }
65}
66
67impl OpenAiCompatibleProvider for OpenRouterProvider {
68    fn config(&self) -> &HttpProviderConfig {
69        &self.config
70    }
71
72    fn endpoint(&self) -> &'static str {
73        "/chat/completions"
74    }
75}
76
77impl LlmProvider for OpenRouterProvider {
78    fn name(&self) -> &'static str {
79        "openrouter"
80    }
81
82    fn model(&self) -> &str {
83        &self.config.model
84    }
85
86    fn complete(&self, request: &LlmRequest) -> Result<LlmResponse, LlmError> {
87        // OpenRouter has custom error handling and optional headers
88        let chat_request =
89            ChatCompletionRequest::from_llm_request(self.config.model.clone(), request);
90        let url = format!("{}{}", self.config.base_url, self.endpoint());
91
92        let http_response = self
93            .config
94            .client
95            .post(&url)
96            .header("Authorization", format!("Bearer {}", self.config.api_key))
97            .header("Content-Type", "application/json")
98            .header("HTTP-Referer", "https://github.com/converge-hey-sh") // Optional: for analytics
99            .header("X-Title", "Converge") // Optional: for analytics
100            .json(&chat_request)
101            .send()
102            .map_err(|e| LlmError::network(format!("Request failed: {e}")))?;
103
104        let status = http_response.status();
105
106        if !status.is_success() {
107            #[derive(Deserialize)]
108            struct OpenRouterError {
109                error: OpenRouterErrorDetail,
110            }
111            #[derive(Deserialize)]
112            struct OpenRouterErrorDetail {
113                message: String,
114                #[serde(rename = "type")]
115                error_type: Option<String>,
116            }
117
118            let error_body: OpenRouterError = http_response
119                .json()
120                .map_err(|e| LlmError::parse(format!("Failed to parse error: {e}")))?;
121
122            let error_type = error_body.error.error_type.as_deref().unwrap_or("unknown");
123            return match error_type {
124                "invalid_request_error" | "authentication_error" => {
125                    Err(LlmError::auth(error_body.error.message))
126                }
127                "rate_limit_error" => Err(LlmError::rate_limit(error_body.error.message)),
128                _ => Err(LlmError::provider(error_body.error.message)),
129            };
130        }
131
132        let api_response: ChatCompletionResponse = http_response
133            .json()
134            .map_err(|e| LlmError::parse(format!("Failed to parse response: {e}")))?;
135
136        chat_response_to_llm_response(api_response)
137    }
138
139    fn provenance(&self, request_id: &str) -> String {
140        format!("openrouter:{}:{}", self.config.model, request_id)
141    }
142}