Skip to main content

agent_io/llm/openrouter/
mod.rs

1//! OpenRouter Chat Model implementation
2//!
3//! OpenRouter provides a unified API for many LLM providers.
4
5mod builder;
6
7use async_trait::async_trait;
8
9use crate::llm::{
10    BaseChatModel, ChatCompletion, ChatStream, LlmError, Message, ToolChoice, ToolDefinition,
11};
12
13pub use builder::ChatOpenRouterBuilder;
14
15/// OpenRouter Chat Model
16///
17/// Access any LLM through OpenRouter's unified API.
18///
19/// # Example
20/// ```ignore
21/// use agent_io::llm::ChatOpenRouter;
22///
23/// let llm = ChatOpenRouter::new("anthropic/claude-3.5-sonnet")?;
24/// ```
25pub struct ChatOpenRouter {
26    pub(super) inner: crate::llm::openai_compatible::ChatOpenAICompatible,
27}
28
29impl ChatOpenRouter {
30    /// Create a new OpenRouter chat model
31    pub fn new(model: impl Into<String>) -> Result<Self, LlmError> {
32        Self::builder().model(model).build()
33    }
34
35    /// Create a builder for configuration
36    pub fn builder() -> ChatOpenRouterBuilder {
37        ChatOpenRouterBuilder::default()
38    }
39}
40
41#[async_trait]
42impl BaseChatModel for ChatOpenRouter {
43    fn model(&self) -> &str {
44        self.inner.model()
45    }
46
47    fn provider(&self) -> &str {
48        "openrouter"
49    }
50
51    fn context_window(&self) -> Option<u64> {
52        self.inner.context_window()
53    }
54
55    async fn invoke(
56        &self,
57        messages: Vec<Message>,
58        tools: Option<Vec<ToolDefinition>>,
59        tool_choice: Option<ToolChoice>,
60    ) -> Result<ChatCompletion, LlmError> {
61        self.inner.invoke(messages, tools, tool_choice).await
62    }
63
64    async fn invoke_stream(
65        &self,
66        messages: Vec<Message>,
67        tools: Option<Vec<ToolDefinition>>,
68        tool_choice: Option<ToolChoice>,
69    ) -> Result<ChatStream, LlmError> {
70        self.inner.invoke_stream(messages, tools, tool_choice).await
71    }
72
73    fn supports_vision(&self) -> bool {
74        true
75    }
76}