agent_io/llm/openrouter/
mod.rs1mod builder;
6
7use async_trait::async_trait;
8
9use crate::llm::{
10 BaseChatModel, ChatCompletion, ChatStream, LlmError, Message, ToolChoice, ToolDefinition,
11};
12
13pub use builder::ChatOpenRouterBuilder;
14
15pub struct ChatOpenRouter {
26 pub(super) inner: crate::llm::openai_compatible::ChatOpenAICompatible,
27}
28
29impl ChatOpenRouter {
30 pub fn new(model: impl Into<String>) -> Result<Self, LlmError> {
32 Self::builder().model(model).build()
33 }
34
35 pub fn builder() -> ChatOpenRouterBuilder {
37 ChatOpenRouterBuilder::default()
38 }
39}
40
41#[async_trait]
42impl BaseChatModel for ChatOpenRouter {
43 fn model(&self) -> &str {
44 self.inner.model()
45 }
46
47 fn provider(&self) -> &str {
48 "openrouter"
49 }
50
51 fn context_window(&self) -> Option<u64> {
52 self.inner.context_window()
53 }
54
55 async fn invoke(
56 &self,
57 messages: Vec<Message>,
58 tools: Option<Vec<ToolDefinition>>,
59 tool_choice: Option<ToolChoice>,
60 ) -> Result<ChatCompletion, LlmError> {
61 self.inner.invoke(messages, tools, tool_choice).await
62 }
63
64 async fn invoke_stream(
65 &self,
66 messages: Vec<Message>,
67 tools: Option<Vec<ToolDefinition>>,
68 tool_choice: Option<ToolChoice>,
69 ) -> Result<ChatStream, LlmError> {
70 self.inner.invoke_stream(messages, tools, tool_choice).await
71 }
72
73 fn supports_vision(&self) -> bool {
74 true
75 }
76}