agent_io/llm/
openrouter.rs1use async_trait::async_trait;
6
7use crate::llm::openai_compatible::ChatOpenAICompatible;
8use crate::llm::{
9 BaseChatModel, ChatCompletion, ChatStream, LlmError, Message, ToolChoice, ToolDefinition,
10};
11
12const OPENROUTER_URL: &str = "https://openrouter.ai/api/v1";
13
14pub struct ChatOpenRouter {
25 inner: ChatOpenAICompatible,
26}
27
28impl ChatOpenRouter {
29 pub fn new(model: impl Into<String>) -> Result<Self, LlmError> {
31 Self::builder().model(model).build()
32 }
33
34 pub fn builder() -> ChatOpenRouterBuilder {
36 ChatOpenRouterBuilder::default()
37 }
38}
39
40#[derive(Default)]
41pub struct ChatOpenRouterBuilder {
42 model: Option<String>,
43 api_key: Option<String>,
44 base_url: Option<String>,
45 temperature: Option<f32>,
46 max_tokens: Option<u64>,
47}
48
49impl ChatOpenRouterBuilder {
50 pub fn model(mut self, model: impl Into<String>) -> Self {
51 self.model = Some(model.into());
52 self
53 }
54
55 pub fn api_key(mut self, key: impl Into<String>) -> Self {
56 self.api_key = Some(key.into());
57 self
58 }
59
60 pub fn base_url(mut self, url: impl Into<String>) -> Self {
61 self.base_url = Some(url.into());
62 self
63 }
64
65 pub fn temperature(mut self, temp: f32) -> Self {
66 self.temperature = Some(temp);
67 self
68 }
69
70 pub fn max_tokens(mut self, tokens: u64) -> Self {
71 self.max_tokens = Some(tokens);
72 self
73 }
74
75 pub fn build(self) -> Result<ChatOpenRouter, LlmError> {
76 let model = self
77 .model
78 .ok_or_else(|| LlmError::Config("model is required".into()))?;
79
80 let api_key = self
81 .api_key
82 .or_else(|| std::env::var("OPENROUTER_API_KEY").ok())
83 .ok_or_else(|| LlmError::Config("OPENROUTER_API_KEY not set".into()))?;
84
85 let base_url = self.base_url.unwrap_or_else(|| OPENROUTER_URL.to_string());
86
87 let inner = ChatOpenAICompatible::builder()
88 .model(&model)
89 .base_url(&base_url)
90 .provider("openrouter")
91 .api_key(Some(api_key))
92 .temperature(self.temperature.unwrap_or(0.2))
93 .max_completion_tokens(self.max_tokens)
94 .build()?;
95
96 Ok(ChatOpenRouter { inner })
97 }
98}
99
100#[async_trait]
101impl BaseChatModel for ChatOpenRouter {
102 fn model(&self) -> &str {
103 self.inner.model()
104 }
105
106 fn provider(&self) -> &str {
107 "openrouter"
108 }
109
110 fn context_window(&self) -> Option<u64> {
111 self.inner.context_window()
112 }
113
114 async fn invoke(
115 &self,
116 messages: Vec<Message>,
117 tools: Option<Vec<ToolDefinition>>,
118 tool_choice: Option<ToolChoice>,
119 ) -> Result<ChatCompletion, LlmError> {
120 self.inner.invoke(messages, tools, tool_choice).await
121 }
122
123 async fn invoke_stream(
124 &self,
125 messages: Vec<Message>,
126 tools: Option<Vec<ToolDefinition>>,
127 tool_choice: Option<ToolChoice>,
128 ) -> Result<ChatStream, LlmError> {
129 self.inner.invoke_stream(messages, tools, tool_choice).await
130 }
131
132 fn supports_vision(&self) -> bool {
133 true
134 }
135}