#![allow(dead_code)]
use super::{ChatProvider, ProviderType};
use crate::models::{ChatMessage, ChatRequest, ChatSession};
use anyhow::Result;
use serde::{Deserialize, Serialize};
use std::path::PathBuf;
pub struct OllamaProvider {
endpoint: String,
available: bool,
data_path: Option<PathBuf>,
}
#[derive(Debug, Deserialize)]
struct OllamaModelsResponse {
models: Vec<OllamaModel>,
}
#[derive(Debug, Deserialize)]
struct OllamaModel {
name: String,
modified_at: Option<String>,
size: Option<u64>,
}
#[derive(Debug, Serialize, Deserialize)]
struct OllamaChatMessage {
role: String,
content: String,
}
#[derive(Debug, Serialize)]
struct OllamaChatRequest {
model: String,
messages: Vec<OllamaChatMessage>,
stream: bool,
}
#[derive(Debug, Deserialize)]
struct OllamaChatResponse {
message: OllamaChatMessage,
done: bool,
}
impl OllamaProvider {
pub fn discover() -> Option<Self> {
let endpoint =
std::env::var("OLLAMA_HOST").unwrap_or_else(|_| "http://localhost:11434".to_string());
let data_path = Self::find_ollama_data();
let available = Self::check_availability(&endpoint);
Some(Self {
endpoint,
available,
data_path,
})
}
fn find_ollama_data() -> Option<PathBuf> {
if let Ok(models_path) = std::env::var("OLLAMA_MODELS") {
return Some(PathBuf::from(models_path));
}
#[cfg(target_os = "windows")]
{
let home = dirs::home_dir()?;
let path = home.join(".ollama");
if path.exists() {
return Some(path);
}
}
#[cfg(target_os = "macos")]
{
let home = dirs::home_dir()?;
let path = home.join(".ollama");
if path.exists() {
return Some(path);
}
}
#[cfg(target_os = "linux")]
{
if let Some(data_dir) = dirs::data_dir() {
let path = data_dir.join("ollama");
if path.exists() {
return Some(path);
}
}
let home = dirs::home_dir()?;
let path = home.join(".ollama");
if path.exists() {
return Some(path);
}
}
None
}
fn check_availability(endpoint: &str) -> bool {
let _url = format!("{}/api/tags", endpoint);
!endpoint.is_empty()
}
pub fn list_models(&self) -> Result<Vec<String>> {
if !self.available {
return Ok(Vec::new());
}
Ok(Vec::new())
}
fn convert_to_session(&self, messages: Vec<OllamaChatMessage>, model: &str) -> ChatSession {
let now = chrono::Utc::now().timestamp_millis();
let session_id = uuid::Uuid::new_v4().to_string();
let mut requests = Vec::new();
let mut user_msg: Option<String> = None;
for msg in messages {
match msg.role.as_str() {
"user" => {
user_msg = Some(msg.content);
}
"assistant" => {
if let Some(user_text) = user_msg.take() {
requests.push(ChatRequest {
timestamp: Some(now),
message: Some(ChatMessage {
text: Some(user_text),
parts: None,
}),
response: Some(serde_json::json!({
"value": [{"value": msg.content}]
})),
variable_data: None,
request_id: Some(uuid::Uuid::new_v4().to_string()),
response_id: Some(uuid::Uuid::new_v4().to_string()),
model_id: Some(format!("ollama/{}", model)),
agent: None,
result: None,
followups: None,
is_canceled: Some(false),
content_references: None,
code_citations: None,
response_markdown_info: None,
source_session: None,
model_state: None,
time_spent_waiting: None,
});
}
}
_ => {}
}
}
ChatSession {
version: 3,
session_id: Some(session_id),
creation_date: now,
last_message_date: now,
is_imported: true,
initial_location: "ollama".to_string(),
custom_title: Some(format!("Ollama Chat ({})", model)),
requester_username: Some("user".to_string()),
requester_avatar_icon_uri: None,
responder_username: Some(format!("Ollama/{}", model)),
responder_avatar_icon_uri: None,
requests,
}
}
}
impl ChatProvider for OllamaProvider {
fn provider_type(&self) -> ProviderType {
ProviderType::Ollama
}
fn name(&self) -> &str {
"Ollama"
}
fn is_available(&self) -> bool {
self.available
}
fn sessions_path(&self) -> Option<PathBuf> {
self.data_path.clone()
}
fn list_sessions(&self) -> Result<Vec<ChatSession>> {
Ok(Vec::new())
}
fn import_session(&self, _session_id: &str) -> Result<ChatSession> {
anyhow::bail!("Ollama does not persist chat sessions by default")
}
fn export_session(&self, _session: &ChatSession) -> Result<()> {
anyhow::bail!("Export to Ollama not yet implemented")
}
}
pub fn create_ollama_session(
messages: Vec<(String, String)>, model: &str,
) -> ChatSession {
let provider = OllamaProvider {
endpoint: String::new(),
available: false,
data_path: None,
};
let ollama_messages: Vec<OllamaChatMessage> = messages
.into_iter()
.flat_map(|(user, assistant)| {
vec![
OllamaChatMessage {
role: "user".to_string(),
content: user,
},
OllamaChatMessage {
role: "assistant".to_string(),
content: assistant,
},
]
})
.collect();
provider.convert_to_session(ollama_messages, model)
}