use anyhow::Result;
use async_trait::async_trait;
use serde::Deserialize;
use std::sync::{Arc, Mutex};
use ambi::agent::tool::ToolErr;
use ambi::agent::{Tool, ToolDefinition};
use ambi::llm::ChatTemplateType;
use ambi::types::config::OpenAIEngineConfig;
use ambi::{Agent, AgentState};
use ambi::{ChatRunner, LLMEngineConfig};
#[derive(Deserialize)]
pub struct PumpArgs {}
pub struct DatePumpTool;
#[async_trait]
impl Tool for DatePumpTool {
const NAME: &'static str = "get_date";
type Args = PumpArgs;
type Output = String;
fn definition(&self) -> ToolDefinition {
ToolDefinition {
name: Self::NAME.to_string(),
description: "Get the current local date and time.".to_string(),
parameters: serde_json::json!({
"type": "object",
"properties": {},
"required": []
}),
timeout_secs: Some(10),
max_retries: Some(3),
is_idempotent: false,
}
}
async fn call(&self, _arg: Self::Args) -> Result<Self::Output, ToolErr> {
println!("\n[System] Tool 'DatePumpTool' invoked by the LLM...\n");
let local_time = chrono::Local::now();
Ok(local_time.format("%Y-%m-%d %H:%M:%S").to_string())
}
}
#[tokio::main]
async fn main() -> Result<()> {
let system_prompt = "You are a helpful AI assistant with tool-calling capabilities.";
let api_key = std::env::var("OPENAI_API_KEY").unwrap_or_else(|_| "test-key".to_string());
let engine_config = LLMEngineConfig::OpenAI(OpenAIEngineConfig {
api_key,
base_url: "https://api.openai.com/v1".to_string(),
model_name: "gpt-4o-mini".to_string(),
temp: 0.7,
top_p: 0.9,
});
let chat_runner = ChatRunner;
let agent = Agent::make(engine_config)
.await?
.template(ChatTemplateType::Chatml)
.preamble(system_prompt)
.tool(DatePumpTool)?;
let agent_state = Arc::new(Mutex::new(AgentState::new()));
let res = ChatRunner::chat(
&chat_runner,
&agent,
&agent_state,
"What is the current local date and time?",
)
.await?;
print!("{}", res);
Ok(())
}