Skip to main content

llm/providers/openrouter/
provider.rs

1use super::types::OpenRouterChatRequest;
2use crate::provider::get_context_window;
3use crate::providers::openai_compatible::{build_chat_request, streaming::create_custom_stream_generic};
4use crate::{Context, LlmError, LlmResponseStream, ProviderFactory, Result, StreamingModelProvider};
5use async_openai::{Client, config::OpenAIConfig};
6
7pub struct OpenRouterProvider {
8    client: Client<OpenAIConfig>,
9    model: String,
10}
11
12impl OpenRouterProvider {
13    pub fn new(api_key: String, model: String) -> Result<Self> {
14        let config = OpenAIConfig::new().with_api_key(api_key).with_api_base("https://openrouter.ai/api/v1");
15
16        let client = Client::with_config(config);
17        Ok(Self { client, model })
18    }
19
20    pub fn default(model: &str) -> Result<Self> {
21        let api_key = std::env::var("OPENROUTER_API_KEY")
22            .map_err(|_| LlmError::MissingApiKey("OPENROUTER_API_KEY".to_string()))?;
23
24        let config = OpenAIConfig::new().with_api_key(api_key).with_api_base("https://openrouter.ai/api/v1");
25
26        let client = Client::with_config(config);
27
28        Ok(Self { client, model: model.to_string() })
29    }
30}
31
32impl ProviderFactory for OpenRouterProvider {
33    async fn from_env() -> Result<Self> {
34        let api_key = std::env::var("OPENROUTER_API_KEY")
35            .map_err(|_| LlmError::MissingApiKey("OPENROUTER_API_KEY".to_string()))?;
36
37        let config = OpenAIConfig::new().with_api_key(api_key).with_api_base("https://openrouter.ai/api/v1");
38
39        let client = Client::with_config(config);
40
41        Ok(Self { client, model: String::new() })
42    }
43
44    fn with_model(mut self, model: &str) -> Self {
45        self.model = model.to_string();
46        self
47    }
48}
49
50impl StreamingModelProvider for OpenRouterProvider {
51    fn model(&self) -> Option<crate::LlmModel> {
52        format!("openrouter:{}", self.model).parse().ok()
53    }
54
55    fn context_window(&self) -> Option<u32> {
56        get_context_window("openrouter", &self.model)
57    }
58
59    fn stream_response(&self, context: &Context) -> LlmResponseStream {
60        // Build base request and convert to OpenRouter-specific format
61        // The From trait automatically adds usage tracking parameters
62        // See: https://openrouter.ai/docs/use-cases/usage-accounting
63        let mut request: OpenRouterChatRequest = match build_chat_request(&self.model, context) {
64            Ok(req) => req.into(),
65            Err(e) => return Box::pin(async_stream::stream! { yield Err(e); }),
66        };
67
68        if let Some(effort) = context.reasoning_effort() {
69            request.reasoning_effort = Some(effort);
70        }
71
72        create_custom_stream_generic(&self.client, request)
73    }
74
75    fn display_name(&self) -> String {
76        format!("OpenRouter ({})", self.model)
77    }
78}