use agentix::{LlmEvent, Request, tool, Tool};
use futures::StreamExt;
use std::env;
#[agentix::tool]
async fn add(a: i64, b: i64) -> i64 {
a + b
}
struct Calculator;
#[tool]
impl agentix::Tool for Calculator {
async fn multiply(&self, a: i64, b: i64) -> i64 {
a * b
}
async fn divide(&self, a: f64, b: f64) -> Result<f64, String> {
if b == 0.0 { Err("division by zero".into()) } else { Ok(a / b) }
}
}
#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
let api_key = env::var("OPENAI_API_KEY")
.expect("OPENAI_API_KEY must be set in your environment variables");
let http = reqwest::Client::new();
let bundle = add + Calculator;
println!("Sending request to OpenAI with calculator tools...");
let mut stream = Request::openai(api_key)
.model("gpt-4o")
.system_prompt("You are a math assistant. You MUST use your tools to perform calculations.")
.user("What is 1234 multiplied by 5678, then divided by 3?")
.tools(bundle.raw_tools())
.stream(&http)
.await?;
println!("\nResponse stream:");
while let Some(event) = stream.next().await {
match event {
LlmEvent::Token(t) => {
print!("{t}");
}
LlmEvent::ToolCall(tc) => {
println!("\n\n[Model requested tool call]");
println!("Tool Name: {}", tc.name);
println!("Arguments: {}", tc.arguments);
}
LlmEvent::Done => {
break;
}
LlmEvent::Error(e) => {
eprintln!("\nError: {e}");
}
_ => {}
}
}
println!("\n");
Ok(())
}