use serde::{Deserialize, Serialize};
use menta::{GenerateTextRequest, Tool, ToolChoice, ToolExecute, generate_text};
#[derive(Deserialize, Tool)]
#[tool(description = "Get the weather in a location")]
struct WeatherTool {
#[description = "The location to get the weather for"]
location: String,
}
#[derive(Serialize)]
struct WeatherOutput {
temperature: i32,
conditions: String,
}
impl ToolExecute for WeatherTool {
type Output = WeatherOutput;
async fn execute(&self) -> std::result::Result<Self::Output, String> {
Ok(WeatherOutput {
temperature: 72,
conditions: format!("sunny in {}", self.location),
})
}
}
#[tokio::main]
async fn main() {
let result = generate_text(
GenerateTextRequest::new()
.model("openai/gpt-4.1-mini")
.prompt("What is the weather in Paris?")
.tool::<WeatherTool>()
.tool_choice(ToolChoice::Required("weather".into()))
.max_steps(2),
)
.await
.expect("generate_text failed");
println!("{}", result.text);
}