use anyhow::Result;
use async_trait::async_trait;
use serde::Deserialize;
use std::io::Write;
use tokio_stream::StreamExt;
use ambi::agent::tool::ToolErr;
use ambi::llm::chat_template::ChatTemplateType;
use ambi::{Agent, Tool, ToolDefinition};
use ambi::{EngineConfig, OpenAIEngineConfig};
#[derive(Deserialize)]
pub struct PumpArgs {}
pub struct DatePumpTool;
#[async_trait]
impl Tool for DatePumpTool {
const NAME: &'static str = "get_date";
type Args = PumpArgs;
type Output = String;
fn definition(&self) -> ToolDefinition {
ToolDefinition {
name: Self::NAME.to_string(),
description: "Get the current local date and time.".to_string(),
parameters: serde_json::json!({
"type": "object",
"properties": {},
"required": []
}),
timeout_secs: Some(10),
max_retries: Some(3),
}
}
async fn call(&self, _arg: Self::Args) -> Result<Self::Output, ToolErr> {
println!("\n[System] Tool 'DatePumpTool' invoked by the LLM...\n");
let local_time = chrono::Local::now();
Ok(local_time.format("%Y-%m-%d %H:%M:%S").to_string())
}
}
fn init_logger() {
use simplelog::*;
let _ = TermLogger::init(
LevelFilter::Info, Config::default(),
TerminalMode::Mixed,
ColorChoice::Auto,
);
}
#[tokio::main]
async fn main() -> Result<()> {
init_logger();
let system_prompt = "You are a helpful AI assistant with tool-calling capabilities.";
let api_key = std::env::var("OPENAI_API_KEY").unwrap_or_else(|_| "test-key".to_string());
let engine_config = EngineConfig::OpenAI(OpenAIEngineConfig {
api_key,
base_url: "https://api.openai.com/v1".to_string(),
model_name: "gpt-4o-mini".to_string(),
temp: 0.7,
top_p: 0.9,
});
let mut agent = Agent::make(engine_config)?
.template(ChatTemplateType::Chatml)
.preamble(system_prompt)
.tool(DatePumpTool)?;
let mut res_stream = agent
.chat_stream("What is the current local date and time?")
.await
.map_err(|_| anyhow::anyhow!("Failed to create chat stream"))?;
while let Some(chunk) = res_stream.next().await {
if let Ok(text) = chunk {
print!("{}", text);
let _ = std::io::stdout().flush();
}
}
println!();
Ok(())
}