#![allow(dead_code)]
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct AgencyMessage {
pub id: String,
pub role: MessageRole,
pub content: String,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub tool_calls: Vec<ToolCall>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tool_result: Option<ToolResult>,
pub timestamp: DateTime<Utc>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tokens: Option<u32>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub agent_name: Option<String>,
#[serde(default, skip_serializing_if = "HashMap::is_empty")]
pub metadata: HashMap<String, serde_json::Value>,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum MessageRole {
User,
Assistant,
System,
Tool,
}
impl std::fmt::Display for MessageRole {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
MessageRole::User => write!(f, "user"),
MessageRole::Assistant => write!(f, "assistant"),
MessageRole::System => write!(f, "system"),
MessageRole::Tool => write!(f, "tool"),
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ToolCall {
pub id: String,
pub name: String,
pub arguments: serde_json::Value,
#[serde(default = "Utc::now")]
pub timestamp: DateTime<Utc>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ToolResult {
pub call_id: String,
pub name: String,
pub success: bool,
pub content: String,
#[serde(default)]
pub duration_ms: u64,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub data: Option<serde_json::Value>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct AgencyEvent {
pub event_type: EventType,
pub agent_name: String,
pub data: serde_json::Value,
pub timestamp: DateTime<Utc>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub session_id: Option<String>,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum EventType {
AgentStarted,
AgentThinking,
AgentExecuting,
AgentCompleted,
AgentFailed,
ToolCallStarted,
ToolCallCompleted,
ToolCallFailed,
MessageCreated,
MessageDelta,
TaskCreated,
TaskStarted,
TaskCompleted,
TaskFailed,
SwarmStarted,
SwarmAgentJoined,
SwarmCompleted,
SwarmFailed,
Handoff,
Error,
}
impl std::fmt::Display for EventType {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
EventType::AgentStarted => write!(f, "agent_started"),
EventType::AgentThinking => write!(f, "agent_thinking"),
EventType::AgentExecuting => write!(f, "agent_executing"),
EventType::AgentCompleted => write!(f, "agent_completed"),
EventType::AgentFailed => write!(f, "agent_failed"),
EventType::ToolCallStarted => write!(f, "tool_call_started"),
EventType::ToolCallCompleted => write!(f, "tool_call_completed"),
EventType::ToolCallFailed => write!(f, "tool_call_failed"),
EventType::MessageCreated => write!(f, "message_created"),
EventType::MessageDelta => write!(f, "message_delta"),
EventType::TaskCreated => write!(f, "task_created"),
EventType::TaskStarted => write!(f, "task_started"),
EventType::TaskCompleted => write!(f, "task_completed"),
EventType::TaskFailed => write!(f, "task_failed"),
EventType::SwarmStarted => write!(f, "swarm_started"),
EventType::SwarmAgentJoined => write!(f, "swarm_agent_joined"),
EventType::SwarmCompleted => write!(f, "swarm_completed"),
EventType::SwarmFailed => write!(f, "swarm_failed"),
EventType::Handoff => write!(f, "handoff"),
EventType::Error => write!(f, "error"),
}
}
}
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
pub struct TokenUsage {
pub prompt_tokens: u32,
pub completion_tokens: u32,
pub total_tokens: u32,
}
impl TokenUsage {
pub fn new(prompt: u32, completion: u32) -> Self {
Self {
prompt_tokens: prompt,
completion_tokens: completion,
total_tokens: prompt + completion,
}
}
pub fn add(&mut self, other: &TokenUsage) {
self.prompt_tokens += other.prompt_tokens;
self.completion_tokens += other.completion_tokens;
self.total_tokens += other.total_tokens;
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ModelConfig {
pub model: String,
#[serde(default)]
pub provider: ModelProvider,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub endpoint: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub api_key: Option<String>,
#[serde(default = "default_temperature")]
pub temperature: f32,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub max_tokens: Option<u32>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub top_p: Option<f32>,
}
fn default_temperature() -> f32 {
0.7
}
impl Default for ModelConfig {
fn default() -> Self {
Self {
model: "gemini-2.5-flash".to_string(),
provider: ModelProvider::Google,
endpoint: None,
api_key: None,
temperature: 0.7,
max_tokens: None,
top_p: None,
}
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Default, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum ModelProvider {
#[default]
Google,
OpenAI,
Anthropic,
Azure,
Groq,
Together,
Fireworks,
DeepSeek,
Mistral,
Cohere,
Perplexity,
Ollama,
LMStudio,
Jan,
GPT4All,
LocalAI,
Llamafile,
TextGenWebUI,
VLLM,
KoboldCpp,
TabbyML,
Exo,
OpenAICompatible,
Custom,
}
impl ModelProvider {
pub fn default_endpoint(&self) -> Option<&'static str> {
match self {
ModelProvider::Google => Some("https://generativelanguage.googleapis.com/v1"),
ModelProvider::OpenAI => Some("https://api.openai.com/v1"),
ModelProvider::Anthropic => Some("https://api.anthropic.com/v1"),
ModelProvider::Azure => None, ModelProvider::Groq => Some("https://api.groq.com/openai/v1"),
ModelProvider::Together => Some("https://api.together.xyz/v1"),
ModelProvider::Fireworks => Some("https://api.fireworks.ai/inference/v1"),
ModelProvider::DeepSeek => Some("https://api.deepseek.com/v1"),
ModelProvider::Mistral => Some("https://api.mistral.ai/v1"),
ModelProvider::Cohere => Some("https://api.cohere.ai/v1"),
ModelProvider::Perplexity => Some("https://api.perplexity.ai"),
ModelProvider::Ollama => Some("http://localhost:11434"),
ModelProvider::LMStudio => Some("http://localhost:1234/v1"),
ModelProvider::Jan => Some("http://localhost:1337/v1"),
ModelProvider::GPT4All => Some("http://localhost:4891/v1"),
ModelProvider::LocalAI => Some("http://localhost:8080/v1"),
ModelProvider::Llamafile => Some("http://localhost:8080/v1"),
ModelProvider::TextGenWebUI => Some("http://localhost:5000/v1"),
ModelProvider::VLLM => Some("http://localhost:8000/v1"),
ModelProvider::KoboldCpp => Some("http://localhost:5001/v1"),
ModelProvider::TabbyML => Some("http://localhost:8080/v1"),
ModelProvider::Exo => Some("http://localhost:52415/v1"),
ModelProvider::OpenAICompatible => None, ModelProvider::Custom => None,
}
}
pub fn is_local(&self) -> bool {
matches!(
self,
ModelProvider::Ollama
| ModelProvider::LMStudio
| ModelProvider::Jan
| ModelProvider::GPT4All
| ModelProvider::LocalAI
| ModelProvider::Llamafile
| ModelProvider::TextGenWebUI
| ModelProvider::VLLM
| ModelProvider::KoboldCpp
| ModelProvider::TabbyML
| ModelProvider::Exo
)
}
pub fn is_openai_compatible(&self) -> bool {
matches!(
self,
ModelProvider::OpenAI
| ModelProvider::Azure
| ModelProvider::Groq
| ModelProvider::Together
| ModelProvider::Fireworks
| ModelProvider::DeepSeek
| ModelProvider::Mistral
| ModelProvider::Perplexity
| ModelProvider::LMStudio
| ModelProvider::Jan
| ModelProvider::GPT4All
| ModelProvider::LocalAI
| ModelProvider::Llamafile
| ModelProvider::TextGenWebUI
| ModelProvider::VLLM
| ModelProvider::KoboldCpp
| ModelProvider::TabbyML
| ModelProvider::Exo
| ModelProvider::OpenAICompatible
)
}
}
impl std::fmt::Display for ModelProvider {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
ModelProvider::Google => write!(f, "google"),
ModelProvider::OpenAI => write!(f, "openai"),
ModelProvider::Anthropic => write!(f, "anthropic"),
ModelProvider::Azure => write!(f, "azure"),
ModelProvider::Groq => write!(f, "groq"),
ModelProvider::Together => write!(f, "together"),
ModelProvider::Fireworks => write!(f, "fireworks"),
ModelProvider::DeepSeek => write!(f, "deepseek"),
ModelProvider::Mistral => write!(f, "mistral"),
ModelProvider::Cohere => write!(f, "cohere"),
ModelProvider::Perplexity => write!(f, "perplexity"),
ModelProvider::Ollama => write!(f, "ollama"),
ModelProvider::LMStudio => write!(f, "lmstudio"),
ModelProvider::Jan => write!(f, "jan"),
ModelProvider::GPT4All => write!(f, "gpt4all"),
ModelProvider::LocalAI => write!(f, "localai"),
ModelProvider::Llamafile => write!(f, "llamafile"),
ModelProvider::TextGenWebUI => write!(f, "textgenwebui"),
ModelProvider::VLLM => write!(f, "vllm"),
ModelProvider::KoboldCpp => write!(f, "koboldcpp"),
ModelProvider::TabbyML => write!(f, "tabbyml"),
ModelProvider::Exo => write!(f, "exo"),
ModelProvider::OpenAICompatible => write!(f, "openai_compatible"),
ModelProvider::Custom => write!(f, "custom"),
}
}
}