use agent::{Agent, Message, Result, Tool, ToolRegistry};
use agent::provider::openai::OpenAIProvider;
use async_trait::async_trait;
use serde_json::{json, Value};
use std::env;
use std::sync::Arc;
struct WeatherTool;
#[async_trait]
impl Tool for WeatherTool {
fn name(&self) -> &str {
"get_weather"
}
fn description(&self) -> &str {
"获取指定城市的天气信息"
}
async fn execute(&self, params: Value) -> Result<Value> {
let city = params["city"].as_str().unwrap_or("未知城市");
Ok(json!({
"city": city,
"temperature": "22°C",
"condition": "晴天",
"humidity": "65%"
}))
}
}
#[tokio::main]
async fn main() -> Result<()> {
println!("=== Agent 集成 Provider 示例 ===\n");
let api_key = env::var("OPENAI_API_KEY")
.expect("请设置 OPENAI_API_KEY 环境变量");
let provider = Arc::new(OpenAIProvider::new(api_key));
let tools = ToolRegistry::new()
.register(Arc::new(WeatherTool));
let mut agent = Agent::builder()
.name("智能助手")
.description("一个集成了 OpenAI 的智能助手")
.system_prompt("你是一个友好且专业的 AI 助手,可以帮助用户解决各种问题。")
.model("gpt-3.5-turbo")
.temperature(0.7)
.max_tokens(1000)
.max_iterations(3)
.tools(tools)
.provider(provider)
.build();
println!("用户: 你好,请介绍一下你自己");
let response = agent.run("你好,请介绍一下你自己").await?;
println!("Agent: {}\n", response);
println!("=== 对话历史 ===");
let history = agent.get_history().await?;
for (i, msg) in history.iter().enumerate() {
println!("{}. {:?}: {}", i + 1, msg.role, msg.content);
}
Ok(())
}