chasm_cli/providers/
ollama.rs1#![allow(dead_code)]
6
7use super::{ChatProvider, ProviderType};
8use crate::models::{ChatMessage, ChatRequest, ChatSession};
9use anyhow::Result;
10use serde::{Deserialize, Serialize};
11use std::path::PathBuf;
12
13pub struct OllamaProvider {
18 endpoint: String,
20 available: bool,
22 data_path: Option<PathBuf>,
24}
25
26#[derive(Debug, Deserialize)]
28struct OllamaModelsResponse {
29 models: Vec<OllamaModel>,
30}
31
32#[derive(Debug, Deserialize)]
34struct OllamaModel {
35 name: String,
36 modified_at: Option<String>,
37 size: Option<u64>,
38}
39
40#[derive(Debug, Serialize, Deserialize)]
42struct OllamaChatMessage {
43 role: String,
44 content: String,
45}
46
47#[derive(Debug, Serialize)]
49struct OllamaChatRequest {
50 model: String,
51 messages: Vec<OllamaChatMessage>,
52 stream: bool,
53}
54
55#[derive(Debug, Deserialize)]
57struct OllamaChatResponse {
58 message: OllamaChatMessage,
59 done: bool,
60}
61
62impl OllamaProvider {
63 pub fn discover() -> Option<Self> {
65 let endpoint =
66 std::env::var("OLLAMA_HOST").unwrap_or_else(|_| "http://localhost:11434".to_string());
67
68 let data_path = Self::find_ollama_data();
69
70 let available = Self::check_availability(&endpoint);
72
73 Some(Self {
74 endpoint,
75 available,
76 data_path,
77 })
78 }
79
80 fn find_ollama_data() -> Option<PathBuf> {
82 if let Ok(models_path) = std::env::var("OLLAMA_MODELS") {
84 return Some(PathBuf::from(models_path));
85 }
86
87 #[cfg(target_os = "windows")]
88 {
89 let home = dirs::home_dir()?;
90 let path = home.join(".ollama");
91 if path.exists() {
92 return Some(path);
93 }
94 }
95
96 #[cfg(target_os = "macos")]
97 {
98 let home = dirs::home_dir()?;
99 let path = home.join(".ollama");
100 if path.exists() {
101 return Some(path);
102 }
103 }
104
105 #[cfg(target_os = "linux")]
106 {
107 if let Some(data_dir) = dirs::data_dir() {
109 let path = data_dir.join("ollama");
110 if path.exists() {
111 return Some(path);
112 }
113 }
114 let home = dirs::home_dir()?;
116 let path = home.join(".ollama");
117 if path.exists() {
118 return Some(path);
119 }
120 }
121
122 None
123 }
124
125 fn check_availability(endpoint: &str) -> bool {
127 let _url = format!("{}/api/tags", endpoint);
130
131 !endpoint.is_empty()
135 }
136
137 pub fn list_models(&self) -> Result<Vec<String>> {
139 if !self.available {
140 return Ok(Vec::new());
141 }
142
143 Ok(Vec::new())
146 }
147
148 fn convert_to_session(&self, messages: Vec<OllamaChatMessage>, model: &str) -> ChatSession {
150 let now = chrono::Utc::now().timestamp_millis();
151 let session_id = uuid::Uuid::new_v4().to_string();
152
153 let mut requests = Vec::new();
154 let mut user_msg: Option<String> = None;
155
156 for msg in messages {
157 match msg.role.as_str() {
158 "user" => {
159 user_msg = Some(msg.content);
160 }
161 "assistant" => {
162 if let Some(user_text) = user_msg.take() {
163 requests.push(ChatRequest {
164 timestamp: Some(now),
165 message: Some(ChatMessage {
166 text: Some(user_text),
167 parts: None,
168 }),
169 response: Some(serde_json::json!({
170 "value": [{"value": msg.content}]
171 })),
172 variable_data: None,
173 request_id: Some(uuid::Uuid::new_v4().to_string()),
174 response_id: Some(uuid::Uuid::new_v4().to_string()),
175 model_id: Some(format!("ollama/{}", model)),
176 agent: None,
177 result: None,
178 followups: None,
179 is_canceled: Some(false),
180 content_references: None,
181 code_citations: None,
182 response_markdown_info: None,
183 source_session: None,
184 });
185 }
186 }
187 _ => {}
188 }
189 }
190
191 ChatSession {
192 version: 3,
193 session_id: Some(session_id),
194 creation_date: now,
195 last_message_date: now,
196 is_imported: true,
197 initial_location: "ollama".to_string(),
198 custom_title: Some(format!("Ollama Chat ({})", model)),
199 requester_username: Some("user".to_string()),
200 requester_avatar_icon_uri: None,
201 responder_username: Some(format!("Ollama/{}", model)),
202 responder_avatar_icon_uri: None,
203 requests,
204 }
205 }
206}
207
208impl ChatProvider for OllamaProvider {
209 fn provider_type(&self) -> ProviderType {
210 ProviderType::Ollama
211 }
212
213 fn name(&self) -> &str {
214 "Ollama"
215 }
216
217 fn is_available(&self) -> bool {
218 self.available
219 }
220
221 fn sessions_path(&self) -> Option<PathBuf> {
222 self.data_path.clone()
223 }
224
225 fn list_sessions(&self) -> Result<Vec<ChatSession>> {
226 Ok(Vec::new())
230 }
231
232 fn import_session(&self, _session_id: &str) -> Result<ChatSession> {
233 anyhow::bail!("Ollama does not persist chat sessions by default")
234 }
235
236 fn export_session(&self, _session: &ChatSession) -> Result<()> {
237 anyhow::bail!("Export to Ollama not yet implemented")
239 }
240}
241
242pub fn create_ollama_session(
244 messages: Vec<(String, String)>, model: &str,
246) -> ChatSession {
247 let provider = OllamaProvider {
248 endpoint: String::new(),
249 available: false,
250 data_path: None,
251 };
252
253 let ollama_messages: Vec<OllamaChatMessage> = messages
254 .into_iter()
255 .flat_map(|(user, assistant)| {
256 vec![
257 OllamaChatMessage {
258 role: "user".to_string(),
259 content: user,
260 },
261 OllamaChatMessage {
262 role: "assistant".to_string(),
263 content: assistant,
264 },
265 ]
266 })
267 .collect();
268
269 provider.convert_to_session(ollama_messages, model)
270}