chasm/providers/
ollama.rs1#![allow(dead_code)]
6
7use super::{ChatProvider, ProviderType};
8use crate::models::{ChatMessage, ChatRequest, ChatSession};
9use anyhow::Result;
10use serde::{Deserialize, Serialize};
11use std::path::PathBuf;
12
13pub struct OllamaProvider {
18 endpoint: String,
20 available: bool,
22 data_path: Option<PathBuf>,
24}
25
26#[derive(Debug, Deserialize)]
28struct OllamaModelsResponse {
29 models: Vec<OllamaModel>,
30}
31
32#[derive(Debug, Deserialize)]
34struct OllamaModel {
35 name: String,
36 modified_at: Option<String>,
37 size: Option<u64>,
38}
39
40#[derive(Debug, Serialize, Deserialize)]
42struct OllamaChatMessage {
43 role: String,
44 content: String,
45}
46
47#[derive(Debug, Serialize)]
49struct OllamaChatRequest {
50 model: String,
51 messages: Vec<OllamaChatMessage>,
52 stream: bool,
53}
54
55#[derive(Debug, Deserialize)]
57struct OllamaChatResponse {
58 message: OllamaChatMessage,
59 done: bool,
60}
61
62impl OllamaProvider {
63 pub fn discover() -> Option<Self> {
65 let endpoint =
66 std::env::var("OLLAMA_HOST").unwrap_or_else(|_| "http://localhost:11434".to_string());
67
68 let data_path = Self::find_ollama_data();
69
70 let available = Self::check_availability(&endpoint);
72
73 Some(Self {
74 endpoint,
75 available,
76 data_path,
77 })
78 }
79
80 fn find_ollama_data() -> Option<PathBuf> {
82 if let Ok(models_path) = std::env::var("OLLAMA_MODELS") {
84 return Some(PathBuf::from(models_path));
85 }
86
87 #[cfg(target_os = "windows")]
88 {
89 let home = dirs::home_dir()?;
90 let path = home.join(".ollama");
91 if path.exists() {
92 return Some(path);
93 }
94 }
95
96 #[cfg(target_os = "macos")]
97 {
98 let home = dirs::home_dir()?;
99 let path = home.join(".ollama");
100 if path.exists() {
101 return Some(path);
102 }
103 }
104
105 #[cfg(target_os = "linux")]
106 {
107 if let Some(data_dir) = dirs::data_dir() {
109 let path = data_dir.join("ollama");
110 if path.exists() {
111 return Some(path);
112 }
113 }
114 let home = dirs::home_dir()?;
116 let path = home.join(".ollama");
117 if path.exists() {
118 return Some(path);
119 }
120 }
121
122 None
123 }
124
125 fn check_availability(endpoint: &str) -> bool {
127 let _url = format!("{}/api/tags", endpoint);
130
131 !endpoint.is_empty()
135 }
136
137 pub fn list_models(&self) -> Result<Vec<String>> {
139 if !self.available {
140 return Ok(Vec::new());
141 }
142
143 Ok(Vec::new())
146 }
147
148 fn convert_to_session(&self, messages: Vec<OllamaChatMessage>, model: &str) -> ChatSession {
150 let now = chrono::Utc::now().timestamp_millis();
151 let session_id = uuid::Uuid::new_v4().to_string();
152
153 let mut requests = Vec::new();
154 let mut user_msg: Option<String> = None;
155
156 for msg in messages {
157 match msg.role.as_str() {
158 "user" => {
159 user_msg = Some(msg.content);
160 }
161 "assistant" => {
162 if let Some(user_text) = user_msg.take() {
163 requests.push(ChatRequest {
164 timestamp: Some(now),
165 message: Some(ChatMessage {
166 text: Some(user_text),
167 parts: None,
168 }),
169 response: Some(serde_json::json!({
170 "value": [{"value": msg.content}]
171 })),
172 variable_data: None,
173 request_id: Some(uuid::Uuid::new_v4().to_string()),
174 response_id: Some(uuid::Uuid::new_v4().to_string()),
175 model_id: Some(format!("ollama/{}", model)),
176 agent: None,
177 result: None,
178 followups: None,
179 is_canceled: Some(false),
180 content_references: None,
181 code_citations: None,
182 response_markdown_info: None,
183 source_session: None,
184 model_state: None,
185 time_spent_waiting: None,
186 });
187 }
188 }
189 _ => {}
190 }
191 }
192
193 ChatSession {
194 version: 3,
195 session_id: Some(session_id),
196 creation_date: now,
197 last_message_date: now,
198 is_imported: true,
199 initial_location: "ollama".to_string(),
200 custom_title: Some(format!("Ollama Chat ({})", model)),
201 requester_username: Some("user".to_string()),
202 requester_avatar_icon_uri: None,
203 responder_username: Some(format!("Ollama/{}", model)),
204 responder_avatar_icon_uri: None,
205 requests,
206 }
207 }
208}
209
210impl ChatProvider for OllamaProvider {
211 fn provider_type(&self) -> ProviderType {
212 ProviderType::Ollama
213 }
214
215 fn name(&self) -> &str {
216 "Ollama"
217 }
218
219 fn is_available(&self) -> bool {
220 self.available
221 }
222
223 fn sessions_path(&self) -> Option<PathBuf> {
224 self.data_path.clone()
225 }
226
227 fn list_sessions(&self) -> Result<Vec<ChatSession>> {
228 Ok(Vec::new())
232 }
233
234 fn import_session(&self, _session_id: &str) -> Result<ChatSession> {
235 anyhow::bail!("Ollama does not persist chat sessions by default")
236 }
237
238 fn export_session(&self, _session: &ChatSession) -> Result<()> {
239 anyhow::bail!("Export to Ollama not yet implemented")
241 }
242}
243
244pub fn create_ollama_session(
246 messages: Vec<(String, String)>, model: &str,
248) -> ChatSession {
249 let provider = OllamaProvider {
250 endpoint: String::new(),
251 available: false,
252 data_path: None,
253 };
254
255 let ollama_messages: Vec<OllamaChatMessage> = messages
256 .into_iter()
257 .flat_map(|(user, assistant)| {
258 vec![
259 OllamaChatMessage {
260 role: "user".to_string(),
261 content: user,
262 },
263 OllamaChatMessage {
264 role: "assistant".to_string(),
265 content: assistant,
266 },
267 ]
268 })
269 .collect();
270
271 provider.convert_to_session(ollama_messages, model)
272}