chasm_cli/providers/
ollama.rs

1// Copyright (c) 2024-2026 Nervosys LLC
2// SPDX-License-Identifier: Apache-2.0
3//! Ollama provider for local LLM inference
4
5#![allow(dead_code)]
6
7use super::{ChatProvider, ProviderType};
8use crate::models::{ChatMessage, ChatRequest, ChatSession};
9use anyhow::Result;
10use serde::{Deserialize, Serialize};
11use std::path::PathBuf;
12
13/// Ollama API provider
14///
15/// Ollama runs local LLMs and provides an API at http://localhost:11434
16/// It can also be configured to save conversation history.
17pub struct OllamaProvider {
18    /// API endpoint URL
19    endpoint: String,
20    /// Whether Ollama is available
21    available: bool,
22    /// Path to Ollama's data directory (for history)
23    data_path: Option<PathBuf>,
24}
25
26/// Ollama API response for listing models
27#[derive(Debug, Deserialize)]
28struct OllamaModelsResponse {
29    models: Vec<OllamaModel>,
30}
31
32/// Ollama model info
33#[derive(Debug, Deserialize)]
34struct OllamaModel {
35    name: String,
36    modified_at: Option<String>,
37    size: Option<u64>,
38}
39
40/// Ollama chat message format
41#[derive(Debug, Serialize, Deserialize)]
42struct OllamaChatMessage {
43    role: String,
44    content: String,
45}
46
47/// Ollama chat request
48#[derive(Debug, Serialize)]
49struct OllamaChatRequest {
50    model: String,
51    messages: Vec<OllamaChatMessage>,
52    stream: bool,
53}
54
55/// Ollama chat response
56#[derive(Debug, Deserialize)]
57struct OllamaChatResponse {
58    message: OllamaChatMessage,
59    done: bool,
60}
61
62impl OllamaProvider {
63    /// Discover Ollama installation and create provider
64    pub fn discover() -> Option<Self> {
65        let endpoint =
66            std::env::var("OLLAMA_HOST").unwrap_or_else(|_| "http://localhost:11434".to_string());
67
68        let data_path = Self::find_ollama_data();
69
70        // Check if Ollama is running
71        let available = Self::check_availability(&endpoint);
72
73        Some(Self {
74            endpoint,
75            available,
76            data_path,
77        })
78    }
79
80    /// Find Ollama's data directory
81    fn find_ollama_data() -> Option<PathBuf> {
82        // Check OLLAMA_MODELS environment variable first
83        if let Ok(models_path) = std::env::var("OLLAMA_MODELS") {
84            return Some(PathBuf::from(models_path));
85        }
86
87        #[cfg(target_os = "windows")]
88        {
89            let home = dirs::home_dir()?;
90            let path = home.join(".ollama");
91            if path.exists() {
92                return Some(path);
93            }
94        }
95
96        #[cfg(target_os = "macos")]
97        {
98            let home = dirs::home_dir()?;
99            let path = home.join(".ollama");
100            if path.exists() {
101                return Some(path);
102            }
103        }
104
105        #[cfg(target_os = "linux")]
106        {
107            // Check XDG data dir first
108            if let Some(data_dir) = dirs::data_dir() {
109                let path = data_dir.join("ollama");
110                if path.exists() {
111                    return Some(path);
112                }
113            }
114            // Fall back to home directory
115            let home = dirs::home_dir()?;
116            let path = home.join(".ollama");
117            if path.exists() {
118                return Some(path);
119            }
120        }
121
122        None
123    }
124
125    /// Check if Ollama API is available
126    fn check_availability(endpoint: &str) -> bool {
127        // Try to connect to Ollama API
128        // We use a simple blocking check here
129        let _url = format!("{}/api/tags", endpoint);
130
131        // Use ureq for simple HTTP requests (add to Cargo.toml if needed)
132        // For now, we'll just check if the endpoint looks valid
133        // and assume it's available if configured
134        !endpoint.is_empty()
135    }
136
137    /// List available models from Ollama
138    pub fn list_models(&self) -> Result<Vec<String>> {
139        if !self.available {
140            return Ok(Vec::new());
141        }
142
143        // This would make an HTTP request to /api/tags
144        // For now, return empty list - implement with reqwest/ureq later
145        Ok(Vec::new())
146    }
147
148    /// Convert Ollama chat history to CSM session format
149    fn convert_to_session(&self, messages: Vec<OllamaChatMessage>, model: &str) -> ChatSession {
150        let now = chrono::Utc::now().timestamp_millis();
151        let session_id = uuid::Uuid::new_v4().to_string();
152
153        let mut requests = Vec::new();
154        let mut user_msg: Option<String> = None;
155
156        for msg in messages {
157            match msg.role.as_str() {
158                "user" => {
159                    user_msg = Some(msg.content);
160                }
161                "assistant" => {
162                    if let Some(user_text) = user_msg.take() {
163                        requests.push(ChatRequest {
164                            timestamp: Some(now),
165                            message: Some(ChatMessage {
166                                text: Some(user_text),
167                                parts: None,
168                            }),
169                            response: Some(serde_json::json!({
170                                "value": [{"value": msg.content}]
171                            })),
172                            variable_data: None,
173                            request_id: Some(uuid::Uuid::new_v4().to_string()),
174                            response_id: Some(uuid::Uuid::new_v4().to_string()),
175                            model_id: Some(format!("ollama/{}", model)),
176                            agent: None,
177                            result: None,
178                            followups: None,
179                            is_canceled: Some(false),
180                            content_references: None,
181                            code_citations: None,
182                            response_markdown_info: None,
183                            source_session: None,
184                        });
185                    }
186                }
187                _ => {}
188            }
189        }
190
191        ChatSession {
192            version: 3,
193            session_id: Some(session_id),
194            creation_date: now,
195            last_message_date: now,
196            is_imported: true,
197            initial_location: "ollama".to_string(),
198            custom_title: Some(format!("Ollama Chat ({})", model)),
199            requester_username: Some("user".to_string()),
200            requester_avatar_icon_uri: None,
201            responder_username: Some(format!("Ollama/{}", model)),
202            responder_avatar_icon_uri: None,
203            requests,
204        }
205    }
206}
207
208impl ChatProvider for OllamaProvider {
209    fn provider_type(&self) -> ProviderType {
210        ProviderType::Ollama
211    }
212
213    fn name(&self) -> &str {
214        "Ollama"
215    }
216
217    fn is_available(&self) -> bool {
218        self.available
219    }
220
221    fn sessions_path(&self) -> Option<PathBuf> {
222        self.data_path.clone()
223    }
224
225    fn list_sessions(&self) -> Result<Vec<ChatSession>> {
226        // Ollama doesn't persist chat history by default
227        // This would need integration with Ollama's history feature
228        // or a custom persistence layer
229        Ok(Vec::new())
230    }
231
232    fn import_session(&self, _session_id: &str) -> Result<ChatSession> {
233        anyhow::bail!("Ollama does not persist chat sessions by default")
234    }
235
236    fn export_session(&self, _session: &ChatSession) -> Result<()> {
237        // Could implement by sending messages to Ollama to recreate context
238        anyhow::bail!("Export to Ollama not yet implemented")
239    }
240}
241
242/// Create an Ollama chat session from a conversation
243pub fn create_ollama_session(
244    messages: Vec<(String, String)>, // (user_msg, assistant_msg) pairs
245    model: &str,
246) -> ChatSession {
247    let provider = OllamaProvider {
248        endpoint: String::new(),
249        available: false,
250        data_path: None,
251    };
252
253    let ollama_messages: Vec<OllamaChatMessage> = messages
254        .into_iter()
255        .flat_map(|(user, assistant)| {
256            vec![
257                OllamaChatMessage {
258                    role: "user".to_string(),
259                    content: user,
260                },
261                OllamaChatMessage {
262                    role: "assistant".to_string(),
263                    content: assistant,
264                },
265            ]
266        })
267        .collect();
268
269    provider.convert_to_session(ollama_messages, model)
270}