pub mod anthropic;
pub mod bedrock;
pub mod handler;
pub mod ollama;
pub mod openai;
pub mod redact;
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ChatMessage {
pub role: String, pub content: String,
}
pub struct LlmResponse {
pub text: String,
pub close_thread: bool,
}
pub const CLOSE_THREAD_TOOL_NAME: &str = "close_thread";
pub const CLOSE_THREAD_TOOL_DESC: &str = "End the conversation when it is naturally complete";
pub const DEFAULT_SYSTEM_PROMPT: &str =
"You are an AI agent communicating via the toq protocol. Respond helpfully and concisely.";
pub const DEFAULT_MAX_TURNS: usize = 10;
pub async fn call(
provider: &str,
model: &str,
system_prompt: &str,
messages: &[ChatMessage],
include_close_tool: bool,
) -> Result<LlmResponse, String> {
match provider {
"openai" => openai::call(model, system_prompt, messages, include_close_tool).await,
"anthropic" => anthropic::call(model, system_prompt, messages, include_close_tool).await,
"bedrock" => bedrock::call(model, system_prompt, messages, include_close_tool).await,
"ollama" => ollama::call(model, system_prompt, messages, include_close_tool).await,
_ => Err(format!("unknown provider: {provider}")),
}
}