Skip to main content

llm/providers/local/
ollama.rs

1use super::util::get_local_config;
2use crate::providers::openai::OpenAiChatProvider;
3use crate::{ProviderFactory, Result};
4use async_openai::{Client, config::OpenAIConfig};
5
6pub struct OllamaProvider {
7    model: String,
8    client: Client<OpenAIConfig>,
9}
10
11impl OllamaProvider {
12    pub fn new(model: &str, base_url: &str) -> Self {
13        Self {
14            model: model.to_string(),
15            client: Client::with_config(get_local_config(base_url)),
16        }
17    }
18
19    pub fn default(model: &str) -> Self {
20        Self {
21            model: model.to_string(),
22            client: Client::with_config(get_local_config("http://localhost:11434/v1")),
23        }
24    }
25}
26
27impl ProviderFactory for OllamaProvider {
28    fn from_env() -> Result<Self> {
29        Ok(Self {
30            model: String::new(),
31            client: Client::with_config(get_local_config("http://localhost:11434/v1")),
32        })
33    }
34
35    fn with_model(mut self, model: &str) -> Self {
36        self.model = model.to_string();
37        self
38    }
39}
40
41impl OpenAiChatProvider for OllamaProvider {
42    type Config = OpenAIConfig;
43
44    fn client(&self) -> &Client<Self::Config> {
45        &self.client
46    }
47
48    fn model(&self) -> &str {
49        &self.model
50    }
51
52    fn provider_name(&self) -> &'static str {
53        "Ollama"
54    }
55}