Skip to main content

llm/providers/openrouter/
provider.rs

1use super::types::OpenRouterChatRequest;
2use crate::provider::get_context_window;
3use crate::providers::openai_compatible::{
4    build_chat_request, streaming::create_custom_stream_generic,
5};
6use crate::{
7    Context, LlmError, LlmResponseStream, ProviderFactory, Result, StreamingModelProvider,
8};
9use async_openai::{Client, config::OpenAIConfig};
10
11pub struct OpenRouterProvider {
12    client: Client<OpenAIConfig>,
13    model: String,
14}
15
16impl OpenRouterProvider {
17    pub fn new(api_key: String, model: String) -> Result<Self> {
18        let config = OpenAIConfig::new()
19            .with_api_key(api_key)
20            .with_api_base("https://openrouter.ai/api/v1");
21
22        let client = Client::with_config(config);
23        Ok(Self { client, model })
24    }
25
26    pub fn default(model: &str) -> Result<Self> {
27        let api_key = std::env::var("OPENROUTER_API_KEY")
28            .map_err(|_| LlmError::MissingApiKey("OPENROUTER_API_KEY".to_string()))?;
29
30        let config = OpenAIConfig::new()
31            .with_api_key(api_key)
32            .with_api_base("https://openrouter.ai/api/v1");
33
34        let client = Client::with_config(config);
35
36        Ok(Self {
37            client,
38            model: model.to_string(),
39        })
40    }
41}
42
43impl ProviderFactory for OpenRouterProvider {
44    fn from_env() -> Result<Self> {
45        let api_key = std::env::var("OPENROUTER_API_KEY")
46            .map_err(|_| LlmError::MissingApiKey("OPENROUTER_API_KEY".to_string()))?;
47
48        let config = OpenAIConfig::new()
49            .with_api_key(api_key)
50            .with_api_base("https://openrouter.ai/api/v1");
51
52        let client = Client::with_config(config);
53
54        Ok(Self {
55            client,
56            model: String::new(),
57        })
58    }
59
60    fn with_model(mut self, model: &str) -> Self {
61        self.model = model.to_string();
62        self
63    }
64}
65
66impl StreamingModelProvider for OpenRouterProvider {
67    fn model(&self) -> Option<crate::LlmModel> {
68        format!("openrouter:{}", self.model).parse().ok()
69    }
70
71    fn context_window(&self) -> Option<u32> {
72        get_context_window("openrouter", &self.model)
73    }
74
75    fn stream_response(&self, context: &Context) -> LlmResponseStream {
76        // Build base request and convert to OpenRouter-specific format
77        // The From trait automatically adds usage tracking parameters
78        // See: https://openrouter.ai/docs/use-cases/usage-accounting
79        let mut request: OpenRouterChatRequest = match build_chat_request(&self.model, context) {
80            Ok(req) => req.into(),
81            Err(e) => return Box::pin(async_stream::stream! { yield Err(e); }),
82        };
83
84        if let Some(effort) = context.reasoning_effort() {
85            request.reasoning_effort = Some(effort);
86        }
87
88        create_custom_stream_generic(&self.client, request)
89    }
90
91    fn display_name(&self) -> String {
92        format!("OpenRouter ({})", self.model)
93    }
94}