Skip to main content

agent_io/llm/ollama/
mod.rs

1//! Ollama Chat Model implementation
2//!
3//! Ollama runs models locally and provides an OpenAI-compatible API.
4
5mod builder;
6
7use async_trait::async_trait;
8
9use crate::llm::{
10    BaseChatModel, ChatCompletion, ChatStream, LlmError, Message, ToolChoice, ToolDefinition,
11};
12
13pub use builder::ChatOllamaBuilder;
14
15/// Ollama Chat Model
16///
17/// Connect to a locally running Ollama instance.
18///
19/// # Example
20/// ```ignore
21/// use agent_io::llm::ChatOllama;
22///
23/// let llm = ChatOllama::new("llama3.2")?;
24/// ```
25pub struct ChatOllama {
26    pub(super) inner: crate::llm::openai_compatible::ChatOpenAICompatible,
27}
28
29impl ChatOllama {
30    /// Create a new Ollama chat model
31    pub fn new(model: impl Into<String>) -> Result<Self, LlmError> {
32        Self::builder().model(model).build()
33    }
34
35    /// Create a builder for configuration
36    pub fn builder() -> ChatOllamaBuilder {
37        ChatOllamaBuilder::default()
38    }
39}
40
41#[async_trait]
42impl BaseChatModel for ChatOllama {
43    fn model(&self) -> &str {
44        self.inner.model()
45    }
46
47    fn provider(&self) -> &str {
48        "ollama"
49    }
50
51    fn context_window(&self) -> Option<u64> {
52        self.inner.context_window()
53    }
54
55    async fn invoke(
56        &self,
57        messages: Vec<Message>,
58        tools: Option<Vec<ToolDefinition>>,
59        tool_choice: Option<ToolChoice>,
60    ) -> Result<ChatCompletion, LlmError> {
61        self.inner.invoke(messages, tools, tool_choice).await
62    }
63
64    async fn invoke_stream(
65        &self,
66        messages: Vec<Message>,
67        tools: Option<Vec<ToolDefinition>>,
68        tool_choice: Option<ToolChoice>,
69    ) -> Result<ChatStream, LlmError> {
70        self.inner.invoke_stream(messages, tools, tool_choice).await
71    }
72
73    fn supports_vision(&self) -> bool {
74        let model = self.model().to_lowercase();
75        model.contains("llava") || model.contains("vision")
76    }
77}