Skip to main content

llm/providers/local/
ollama.rs

1#![doc = include_str!(concat!(env!("OUT_DIR"), "/docs/ollama.md"))]
2
3use super::util::get_local_config;
4use crate::providers::openai::OpenAiChatProvider;
5use crate::{ProviderFactory, Result};
6use async_openai::{Client, config::OpenAIConfig};
7
8pub struct OllamaProvider {
9    model: String,
10    client: Client<OpenAIConfig>,
11}
12
13impl OllamaProvider {
14    pub fn new(model: &str, base_url: &str) -> Self {
15        Self { model: model.to_string(), client: Client::with_config(get_local_config(base_url)) }
16    }
17
18    pub fn default(model: &str) -> Self {
19        Self { model: model.to_string(), client: Client::with_config(get_local_config("http://localhost:11434/v1")) }
20    }
21}
22
23impl ProviderFactory for OllamaProvider {
24    async fn from_env() -> Result<Self> {
25        Ok(Self { model: String::new(), client: Client::with_config(get_local_config("http://localhost:11434/v1")) })
26    }
27
28    fn with_model(mut self, model: &str) -> Self {
29        self.model = model.to_string();
30        self
31    }
32}
33
34impl OpenAiChatProvider for OllamaProvider {
35    type Config = OpenAIConfig;
36
37    fn client(&self) -> &Client<Self::Config> {
38        &self.client
39    }
40
41    fn model(&self) -> &str {
42        &self.model
43    }
44
45    fn provider_name(&self) -> &'static str {
46        "Ollama"
47    }
48}