use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::sync::{Arc, RwLock};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Citation {
pub title: String,
pub url: String,
#[serde(default)]
pub start_index: usize,
#[serde(default)]
pub end_index: usize,
}
impl Citation {
pub fn new(title: impl Into<String>, url: impl Into<String>) -> Self {
Self {
title: title.into(),
url: url.into(),
start_index: 0,
end_index: 0,
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ReasoningStep {
pub text: String,
#[serde(default = "default_reasoning_type")]
pub step_type: String,
}
fn default_reasoning_type() -> String {
"reasoning".to_string()
}
impl ReasoningStep {
pub fn new(text: impl Into<String>) -> Self {
Self {
text: text.into(),
step_type: "reasoning".to_string(),
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct WebSearchCall {
pub query: String,
pub status: String,
}
impl WebSearchCall {
pub fn new(query: impl Into<String>, status: impl Into<String>) -> Self {
Self {
query: query.into(),
status: status.into(),
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CodeExecutionStep {
pub input_code: String,
pub output: Option<String>,
}
impl CodeExecutionStep {
pub fn new(input_code: impl Into<String>) -> Self {
Self {
input_code: input_code.into(),
output: None,
}
}
pub fn with_output(input_code: impl Into<String>, output: impl Into<String>) -> Self {
Self {
input_code: input_code.into(),
output: Some(output.into()),
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct FileSearchCall {
pub store_names: Vec<String>,
}
impl FileSearchCall {
pub fn new(store_names: Vec<String>) -> Self {
Self { store_names }
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum Provider {
OpenAI,
Gemini,
LiteLLM,
}
impl Default for Provider {
fn default() -> Self {
Self::OpenAI
}
}
impl std::fmt::Display for Provider {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::OpenAI => write!(f, "openai"),
Self::Gemini => write!(f, "gemini"),
Self::LiteLLM => write!(f, "litellm"),
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct DeepResearchResponse {
pub report: String,
#[serde(default)]
pub citations: Vec<Citation>,
#[serde(default)]
pub reasoning_steps: Vec<ReasoningStep>,
#[serde(default)]
pub web_searches: Vec<WebSearchCall>,
#[serde(default)]
pub code_executions: Vec<CodeExecutionStep>,
#[serde(default)]
pub file_searches: Vec<FileSearchCall>,
#[serde(default)]
pub provider: Provider,
pub interaction_id: Option<String>,
}
impl DeepResearchResponse {
pub fn new(report: impl Into<String>) -> Self {
Self {
report: report.into(),
citations: Vec::new(),
reasoning_steps: Vec::new(),
web_searches: Vec::new(),
code_executions: Vec::new(),
file_searches: Vec::new(),
provider: Provider::default(),
interaction_id: None,
}
}
pub fn get_citation_text(&self, citation: &Citation) -> &str {
if citation.start_index < citation.end_index && citation.end_index <= self.report.len() {
&self.report[citation.start_index..citation.end_index]
} else {
""
}
}
pub fn get_all_sources(&self) -> Vec<HashMap<String, String>> {
let mut seen = std::collections::HashSet::new();
let mut sources = Vec::new();
for c in &self.citations {
if !seen.contains(&c.url) {
seen.insert(c.url.clone());
let mut source = HashMap::new();
source.insert("title".to_string(), c.title.clone());
source.insert("url".to_string(), c.url.clone());
sources.push(source);
}
}
sources
}
}
pub type RAGCitation = Citation;
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, Default)]
#[serde(rename_all = "snake_case")]
pub enum RetrievalPolicy {
#[default]
Always,
Never,
OnDemand,
Threshold,
}
pub type RagRetrievalPolicy = RetrievalPolicy;
#[derive(Debug, Clone)]
pub struct LLMGuardrail {
pub name: String,
pub description: String,
pub prompt_template: String,
pub model: String,
pub block_on_failure: bool,
}
impl LLMGuardrail {
pub fn new(name: impl Into<String>, description: impl Into<String>) -> Self {
Self {
name: name.into(),
description: description.into(),
prompt_template: String::new(),
model: "gpt-4o-mini".to_string(),
block_on_failure: true,
}
}
pub fn with_prompt(mut self, prompt: impl Into<String>) -> Self {
self.prompt_template = prompt.into();
self
}
pub fn with_model(mut self, model: impl Into<String>) -> Self {
self.model = model.into();
self
}
pub fn block_on_failure(mut self, block: bool) -> Self {
self.block_on_failure = block;
self
}
}
#[derive(Debug, Clone, thiserror::Error)]
pub enum HandoffError {
#[error("Handoff cycle detected: {agents:?}")]
Cycle { agents: Vec<String> },
#[error("Maximum handoff depth {max_depth} exceeded")]
DepthExceeded { max_depth: usize },
#[error("Handoff timed out after {timeout_ms}ms")]
Timeout { timeout_ms: u64 },
#[error("Agent not found: {name}")]
AgentNotFound { name: String },
#[error("Invalid handoff configuration: {message}")]
InvalidConfig { message: String },
}
pub type HandoffCycleError = HandoffError;
pub type HandoffDepthError = HandoffError;
pub type HandoffTimeoutError = HandoffError;
pub trait AgentAppProtocol: Send + Sync {
fn name(&self) -> &str;
fn version(&self) -> &str;
fn start(&self) -> crate::error::Result<()>;
fn stop(&self) -> crate::error::Result<()>;
}
pub trait AgentOSProtocol: AgentAppProtocol {}
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct AgentAppConfig {
pub name: String,
pub version: String,
pub host: String,
pub port: u16,
pub debug: bool,
}
impl AgentAppConfig {
pub fn new(name: impl Into<String>) -> Self {
Self {
name: name.into(),
version: "1.0.0".to_string(),
host: "127.0.0.1".to_string(),
port: 8000,
debug: false,
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct SecurityPolicy {
pub allow_network: bool,
pub allow_filesystem: bool,
pub allow_subprocess: bool,
pub allowed_domains: Vec<String>,
pub allowed_paths: Vec<String>,
pub max_execution_time: u64,
pub max_memory: u64,
}
impl SecurityPolicy {
pub fn restrictive() -> Self {
Self {
allow_network: false,
allow_filesystem: false,
allow_subprocess: false,
allowed_domains: Vec::new(),
allowed_paths: Vec::new(),
max_execution_time: 30,
max_memory: 256 * 1024 * 1024, }
}
pub fn permissive() -> Self {
Self {
allow_network: true,
allow_filesystem: true,
allow_subprocess: true,
allowed_domains: Vec::new(),
allowed_paths: Vec::new(),
max_execution_time: 300,
max_memory: 1024 * 1024 * 1024, }
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ReflectionOutput {
pub original: String,
pub reflection: String,
pub improved: Option<String>,
pub confidence: f64,
pub was_modified: bool,
}
impl ReflectionOutput {
pub fn new(original: impl Into<String>, reflection: impl Into<String>) -> Self {
Self {
original: original.into(),
reflection: reflection.into(),
improved: None,
confidence: 1.0,
was_modified: false,
}
}
pub fn with_improvement(
original: impl Into<String>,
reflection: impl Into<String>,
improved: impl Into<String>,
) -> Self {
Self {
original: original.into(),
reflection: reflection.into(),
improved: Some(improved.into()),
confidence: 1.0,
was_modified: true,
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct EmbeddingResult {
pub embedding: Vec<f32>,
pub model: String,
pub usage: Option<EmbeddingUsage>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct EmbeddingUsage {
pub prompt_tokens: u32,
pub total_tokens: u32,
}
pub fn embed(text: &str, model: Option<&str>) -> crate::error::Result<Vec<f32>> {
let _model = model.unwrap_or("text-embedding-3-small");
Ok(vec![0.0; 1536])
}
pub fn embedding(text: &str, model: Option<&str>) -> crate::error::Result<Vec<f32>> {
embed(text, model)
}
pub fn embeddings(texts: &[&str], model: Option<&str>) -> crate::error::Result<Vec<Vec<f32>>> {
texts.iter().map(|t| embed(t, model)).collect()
}
pub async fn aembed(text: &str, model: Option<&str>) -> crate::error::Result<Vec<f32>> {
embed(text, model)
}
pub async fn aembedding(text: &str, model: Option<&str>) -> crate::error::Result<Vec<f32>> {
aembed(text, model).await
}
pub async fn aembeddings(texts: &[&str], model: Option<&str>) -> crate::error::Result<Vec<Vec<f32>>> {
let mut results = Vec::with_capacity(texts.len());
for text in texts {
results.push(aembed(text, model).await?);
}
Ok(results)
}
lazy_static::lazy_static! {
static ref SYNC_DISPLAY_CALLBACKS: RwLock<Vec<Arc<dyn Fn(&str) + Send + Sync>>> = RwLock::new(Vec::new());
static ref ASYNC_DISPLAY_CALLBACKS: RwLock<Vec<Arc<dyn Fn(&str) + Send + Sync>>> = RwLock::new(Vec::new());
static ref ERROR_LOGS: RwLock<Vec<String>> = RwLock::new(Vec::new());
}
pub fn sync_display_callbacks() -> Vec<Arc<dyn Fn(&str) + Send + Sync>> {
SYNC_DISPLAY_CALLBACKS.read().unwrap().clone()
}
pub fn async_display_callbacks() -> Vec<Arc<dyn Fn(&str) + Send + Sync>> {
ASYNC_DISPLAY_CALLBACKS.read().unwrap().clone()
}
pub fn error_logs() -> Vec<String> {
ERROR_LOGS.read().unwrap().clone()
}
lazy_static::lazy_static! {
pub static ref AUTONOMY_PRESETS: HashMap<String, HashMap<String, serde_json::Value>> = {
let mut presets = HashMap::new();
let mut full = HashMap::new();
full.insert("level".to_string(), serde_json::json!("full"));
full.insert("require_approval".to_string(), serde_json::json!(false));
full.insert("max_iterations".to_string(), serde_json::json!(100));
presets.insert("full".to_string(), full);
let mut supervised = HashMap::new();
supervised.insert("level".to_string(), serde_json::json!("supervised"));
supervised.insert("require_approval".to_string(), serde_json::json!(true));
supervised.insert("max_iterations".to_string(), serde_json::json!(10));
presets.insert("supervised".to_string(), supervised);
let mut minimal = HashMap::new();
minimal.insert("level".to_string(), serde_json::json!("minimal"));
minimal.insert("require_approval".to_string(), serde_json::json!(true));
minimal.insert("max_iterations".to_string(), serde_json::json!(1));
presets.insert("minimal".to_string(), minimal);
presets
};
}
pub const RECOMMENDED_PROMPT_PREFIX: &str = r#"You are a helpful assistant that can hand off tasks to other specialized agents.
When you need to hand off a task:
1. Clearly state which agent you're handing off to
2. Provide all relevant context
3. Specify what you expect back
"#;
pub fn resolve_guardrail_policies(
policies: Option<&[&str]>,
config: Option<&serde_json::Value>,
) -> Vec<String> {
if let Some(p) = policies {
return p.iter().map(|s| s.to_string()).collect();
}
if let Some(c) = config {
if let Some(arr) = c.get("guardrail_policies").and_then(|v| v.as_array()) {
return arr
.iter()
.filter_map(|v| v.as_str().map(String::from))
.collect();
}
}
Vec::new()
}
#[derive(Debug, Clone)]
pub struct TraceContextData {
pub trace_id: String,
pub span_id: String,
pub parent_span_id: Option<String>,
pub attributes: HashMap<String, serde_json::Value>,
}
impl TraceContextData {
pub fn new() -> Self {
Self {
trace_id: uuid::Uuid::new_v4().to_string(),
span_id: uuid::Uuid::new_v4().to_string(),
parent_span_id: None,
attributes: HashMap::new(),
}
}
}
impl Default for TraceContextData {
fn default() -> Self {
Self::new()
}
}
pub fn trace_context() -> TraceContextData {
TraceContextData::new()
}
pub fn track_workflow(name: &str, _context: Option<&TraceContextData>) -> TraceContextData {
let mut ctx = TraceContextData::new();
ctx.attributes.insert("workflow_name".to_string(), serde_json::json!(name));
ctx
}
pub fn load_plugin(path: &str) -> crate::error::Result<()> {
tracing::info!("Loading plugin from: {}", path);
Ok(())
}
pub mod config {
pub use super::super::config_loader::*;
pub use super::super::param_resolver::*;
}
pub mod memory {
}
pub mod tools {
}
pub mod workflows {
pub use super::super::workflow_aliases::*;
}
pub mod db {
pub trait DbAdapter: Send + Sync {
fn store(&self, key: &str, value: &serde_json::Value) -> crate::error::Result<()>;
fn get(&self, key: &str) -> crate::error::Result<Option<serde_json::Value>>;
fn delete(&self, key: &str) -> crate::error::Result<()>;
}
}
pub mod obs {
pub trait ObsCollector: Send + Sync {
fn record(&self, event: &str, data: &serde_json::Value);
fn flush(&self);
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_citation() {
let citation = Citation::new("Test Title", "https://example.com");
assert_eq!(citation.title, "Test Title");
assert_eq!(citation.url, "https://example.com");
}
#[test]
fn test_reasoning_step() {
let step = ReasoningStep::new("This is a reasoning step");
assert_eq!(step.step_type, "reasoning");
}
#[test]
fn test_web_search_call() {
let call = WebSearchCall::new("test query", "completed");
assert_eq!(call.query, "test query");
assert_eq!(call.status, "completed");
}
#[test]
fn test_code_execution_step() {
let step = CodeExecutionStep::new("print('hello')");
assert!(step.output.is_none());
let step_with_output = CodeExecutionStep::with_output("print('hello')", "hello");
assert_eq!(step_with_output.output, Some("hello".to_string()));
}
#[test]
fn test_provider() {
assert_eq!(Provider::default(), Provider::OpenAI);
assert_eq!(format!("{}", Provider::Gemini), "gemini");
}
#[test]
fn test_deep_research_response() {
let mut response = DeepResearchResponse::new("Test report content");
response.citations.push(Citation {
title: "Source".to_string(),
url: "https://example.com".to_string(),
start_index: 0,
end_index: 4,
});
let text = response.get_citation_text(&response.citations[0]);
assert_eq!(text, "Test");
let sources = response.get_all_sources();
assert_eq!(sources.len(), 1);
}
#[test]
fn test_retrieval_policy() {
assert_eq!(RetrievalPolicy::default(), RetrievalPolicy::Always);
}
#[test]
fn test_llm_guardrail() {
let guardrail = LLMGuardrail::new("content_filter", "Filters inappropriate content")
.with_model("gpt-4o")
.block_on_failure(true);
assert_eq!(guardrail.name, "content_filter");
assert_eq!(guardrail.model, "gpt-4o");
assert!(guardrail.block_on_failure);
}
#[test]
fn test_handoff_error() {
let err = HandoffError::Cycle { agents: vec!["a".to_string(), "b".to_string()] };
assert!(err.to_string().contains("cycle"));
let err = HandoffError::DepthExceeded { max_depth: 10 };
assert!(err.to_string().contains("10"));
}
#[test]
fn test_agent_app_config() {
let config = AgentAppConfig::new("test_app");
assert_eq!(config.name, "test_app");
assert_eq!(config.port, 8000);
}
#[test]
fn test_security_policy() {
let restrictive = SecurityPolicy::restrictive();
assert!(!restrictive.allow_network);
let permissive = SecurityPolicy::permissive();
assert!(permissive.allow_network);
}
#[test]
fn test_reflection_output() {
let output = ReflectionOutput::new("original", "reflection");
assert!(!output.was_modified);
let improved = ReflectionOutput::with_improvement("original", "reflection", "improved");
assert!(improved.was_modified);
}
#[test]
fn test_embed() {
let result = embed("test", None);
assert!(result.is_ok());
assert_eq!(result.unwrap().len(), 1536);
}
#[test]
fn test_autonomy_presets() {
assert!(AUTONOMY_PRESETS.contains_key("full"));
assert!(AUTONOMY_PRESETS.contains_key("supervised"));
assert!(AUTONOMY_PRESETS.contains_key("minimal"));
}
#[test]
fn test_recommended_prompt_prefix() {
assert!(RECOMMENDED_PROMPT_PREFIX.contains("hand off"));
}
#[test]
fn test_resolve_guardrail_policies() {
let policies = resolve_guardrail_policies(Some(&["policy1", "policy2"]), None);
assert_eq!(policies.len(), 2);
let empty = resolve_guardrail_policies(None, None);
assert!(empty.is_empty());
}
#[test]
fn test_trace_context() {
let ctx = trace_context();
assert!(!ctx.trace_id.is_empty());
assert!(!ctx.span_id.is_empty());
}
#[test]
fn test_track_workflow() {
let ctx = track_workflow("test_workflow", None);
assert!(ctx.attributes.contains_key("workflow_name"));
}
}