#![allow(warnings)]
use serde::{Serialize, Deserialize};
use serde::de::DeserializeOwned;
use indexmap::IndexMap;
use atlas_http::{HttpClient, HttpBody, HttpRequest};
use regex::Regex;
use std::io;
#[derive(Default, Clone, Serialize, Deserialize)]
pub struct LlmProfile {
pub provider: LlmProvider, pub api_key: String, pub model_name: String, pub temperature: Option<f32>, pub max_tokens: Option<usize>, pub api_url: String, }
#[derive(Default, Clone, Serialize, Deserialize, Eq, PartialEq, Hash)]
pub enum LlmProvider {
#[default]
ollama, openai, anthropic, google, xai, mistral, deepseek, groq, together, other }
#[derive(Serialize, Deserialize)]
struct ChatMessage {
role: String, content: String, }
#[derive(Serialize, Deserialize)]
struct ChatRequest {
model: String, messages: Vec<ChatMessage>, temperature: Option<f32>, max_tokens: Option<usize>, }
#[derive(Serialize, Deserialize)]
struct ChatRequest_Ollama {
model: String, messages: Vec<ChatMessage>, stream: bool, temperature: Option<f32>, max_tokens: Option<usize>, }
#[derive(Serialize, Deserialize)]
struct ChatChoice {
message: ChatMessage, }
#[derive(Serialize, Deserialize)]
struct ChatResponse {
choices: Vec<ChatChoice>, }
impl LlmProfile {
pub fn send_single(&self, message: &str) -> Result<String, io::Error> {
if self.provider == LlmProvider::ollama {
return self.send_ollama(message);
}
let request = ChatRequest {
model: self.model_name.clone(),
messages: vec![ChatMessage {
role: "user".to_string(),
content: message.to_string(),
}],
temperature: self.temperature,
max_tokens: self.max_tokens,
};
let json_str = serde_json::to_string(&request)
.map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e.to_string()))?;
self.send(&json_str)
}
pub fn send_ollama(&self, message: &str) -> Result<String, io::Error> {
let request = ChatRequest_Ollama {
model: self.model_name.clone(),
stream: false, messages: vec![ChatMessage {
role: "user".to_string(),
content: message.to_string(),
}],
temperature: self.temperature,
max_tokens: self.max_tokens,
};
let json_str = serde_json::to_string(&request)
.map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e.to_string()))?;
self.send(&json_str)
}
fn send(&self, json_str: &str) -> Result<String, io::Error> {
let body = HttpBody::from_raw_str(json_str);
let auth_line = format!("Authorization: Bearer {}", self.api_key);
let auth_header = vec![
auth_line.as_str(),
"Content-type: application/json"
];
let mut url = if self.api_url.is_empty() {
self.provider.get_completion_url()
} else {
self.api_url.clone()
};
url = url.replace("~model~", &self.model_name);
url = url.replace("~api_key~", &self.api_key);
let req = HttpRequest::new("POST", &url, &auth_header, &body);
let mut http = HttpClient::builder().browser().build_sync();
let res = http.send(&req)
.map_err(|e| io::Error::new(io::ErrorKind::Other, e.to_string()))?;
if res.status_code() != 200 {
return Err(io::Error::new(
io::ErrorKind::Other,
format!("HTTP error: {}", res.status_code())
));
}
if self.provider == LlmProvider::ollama {
let json_res: ChatChoice = serde_json::from_str(&res.body())
.map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e.to_string()))?;
Ok(json_res.message.content.clone())
} else {
let json_res: ChatResponse = serde_json::from_str(&res.body())
.map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e.to_string()))?;
if json_res.choices.is_empty() {
return Err(io::Error::new(
io::ErrorKind::Other,
"No choices in response"
));
}
Ok(json_res.choices[0].message.content.clone())
}
}
pub fn extract_json<T: DeserializeOwned>(&self, input: &str, is_object: bool) -> Option<T> {
let start_char = if is_object { '{' } else { '[' };
let start_idx = input.find(start_char)?;
let mut depth = 0;
let end_char = if is_object { '}' } else { ']' };
let mut end_idx = 0;
for (i, c) in input[start_idx..].char_indices() {
if c == start_char {
depth += 1;
} else if c == end_char {
depth -= 1;
if depth == 0 {
end_idx = start_idx + i + 1;
break;
}
}
}
if end_idx == 0 || depth != 0 {
return None;
}
let json_str = &input[start_idx..end_idx];
serde_json::from_str(json_str).ok()
}
pub fn extract_json_flexible<T: DeserializeOwned>(&self, input: &str) -> Option<T> {
if let Some(result) = self.extract_json::<T>(input, true) {
return Some(result);
}
if let Some(result) = self.extract_json::<T>(input, false) {
return Some(result);
}
let re_string = Regex::new(r#""([^"\\]|\\[\s\S])*""#).ok()?;
if let Some(m) = re_string.find(input) {
if let Ok(value) = serde_json::from_str::<T>(m.as_str()) {
return Some(value);
}
}
let re_scalar = Regex::new(r"\b(true|false|null|-?\d+(\.\d+)?([eE][+-]?\d+)?)\b").ok()?;
if let Some(m) = re_scalar.find(input) {
if let Ok(value) = serde_json::from_str::<T>(m.as_str()) {
return Some(value);
}
}
None
}
pub fn from_str(
provider_slug: &str,
model_name: &str,
api_key: &str,
temperature: Option<f32>,
max_tokens: Option<usize>,
) -> Self {
LlmProfile {
provider: LlmProvider::from_str(provider_slug),
api_key: api_key.to_string(),
model_name: model_name.to_string(),
temperature,
max_tokens,
api_url: String::new(), }
}
}
impl LlmProvider {
pub fn from_usize(value: usize) -> Self {
match value {
0 => Self::ollama,
1 => Self::openai,
2 => Self::anthropic,
3 => Self::google,
4 => Self::xai,
5 => Self::mistral,
6 => Self::deepseek,
7 => Self::groq,
8 => Self::together,
_ => Self::other
}
}
pub fn to_string(&self) -> String {
match self {
Self::ollama => "Ollama".to_string(),
Self::openai => "OpenAI".to_string(),
Self::anthropic => "Anthropic".to_string(),
Self::google => "Google Gemini".to_string(),
Self::xai => "X.ai".to_string(),
Self::mistral => "Mistral".to_string(),
Self::deepseek => "Deepseek".to_string(),
Self::groq => "Groq".to_string(),
Self::together => "TogetherAI".to_string(),
_ => "Other".to_string()
}
}
pub fn to_slug(&self) -> String {
match self {
Self::ollama => "ollama".to_string(),
Self::openai => "openai".to_string(),
Self::anthropic => "anthropic".to_string(),
Self::google => "google".to_string(),
Self::xai => "xai".to_string(),
Self::mistral => "mistral".to_string(),
Self::deepseek => "deepseek".to_string(),
Self::groq => "groq".to_string(),
Self::together => "together".to_string(),
_ => "other".to_string()
}
}
pub fn get_indexmap_options() -> IndexMap<String, String> {
let mut options = IndexMap::new();
for x in 1..9 {
let val = Self::from_usize(x);
options.insert(format!("{}", x), val.to_string());
}
options
}
fn get_completion_url(&self) -> String {
match self {
LlmProvider::ollama => "http://localhost:11434/api/chat".to_string(),
LlmProvider::openai => "https://api.openai.com/v1/chat/completions".to_string(),
LlmProvider::anthropic => "https://api.anthropic.com/v1/messages".to_string(),
LlmProvider::google => "https://generativelanguage.googleapis.com/v1beta/models/~model~:generateContent?key=~api_key~".to_string(),
LlmProvider::xai => "https://api.x.ai/v1/chat/completions".to_string(),
LlmProvider::mistral => "https://api.mixtral.ai/v1/chat/completions".to_string(),
LlmProvider::deepseek => "https://api.deepseek.com/v1/chat/completions".to_string(),
LlmProvider::groq => "https://api.groq.com/openai/v1/chat/completions".to_string(),
LlmProvider::together => "https://api.together.xyz/v1/chat/completions".to_string(),
LlmProvider::other => "http://localhost:8000/v1/chat/completions".to_string(),
}
}
fn from_str(slug: &str) -> Self {
match slug.to_lowercase().as_str() {
"ollama" => LlmProvider::ollama,
"openai" => LlmProvider::openai,
"anthropic" => LlmProvider::anthropic,
"google" => LlmProvider::google,
"xai" => LlmProvider::xai,
"mistral" => LlmProvider::mistral,
"deepseek" => LlmProvider::deepseek,
"groq" => LlmProvider::groq,
"together" => LlmProvider::together,
_ => LlmProvider::other,
}
}
}