use rucora::agent::ChatAgent;
use rucora::prelude::Agent;
use rucora::provider::OpenAiProvider;
use tokio::io::{self, AsyncBufReadExt, BufReader};
use tracing::{Level, info};
use tracing_subscriber::FmtSubscriber;
#[tokio::main]
async fn main() -> anyhow::Result<()> {
dotenv::dotenv().ok();
let subscriber = FmtSubscriber::builder()
.with_max_level(Level::INFO)
.with_target(false)
.finish();
tracing::subscriber::set_global_default(subscriber)?;
info!("╔════════════════════════════════════════╗");
info!("║ rucora 基础聊天示例 ║");
info!("╚════════════════════════════════════════╝\n");
if std::env::var("OPENAI_API_KEY").is_err() && std::env::var("OPENAI_BASE_URL").is_err() {
info!("⚠ 未设置 API 配置");
info!(" 使用 OpenAI: export OPENAI_API_KEY=sk-your-key");
info!(" 使用 Ollama: export OPENAI_BASE_URL=http://localhost:11434");
return Ok(());
}
let model_name = std::env::var("MODEL_NAME").expect("没有设置环境变量MODEL_NAME");
info!("1. 创建 Provider...");
let provider = OpenAiProvider::from_env()?;
info!("✓ Provider 创建成功\n");
info!("2. 创建 ChatAgent(带对话历史)...");
let agent = ChatAgent::builder()
.provider(provider)
.model(model_name)
.system_prompt("你是友好的 AI 助手。请记住对话历史,以便进行连贯的多轮对话。")
.with_conversation(true) .max_history_messages(20) .build();
info!("✓ ChatAgent 创建成功\n");
info!("═══════════════════════════════════════");
info!("开始聊天(输入 'quit' 退出)");
info!("═══════════════════════════════════════\n");
let stdin = BufReader::new(io::stdin());
let mut lines = stdin.lines();
loop {
info!("你:");
if let Ok(Some(line)) = lines.next_line().await {
let input = line.trim();
if input.is_empty() {
continue;
}
if input.eq_ignore_ascii_case("quit") || input.eq_ignore_ascii_case("exit") {
info!("再见!");
break;
}
match agent.run(input.into()).await {
Ok(output) => {
if let Some(text) = output.text() {
info!("助手:{}\n", text);
}
}
Err(e) => {
info!("错误:{}\n", e);
}
}
}
}
Ok(())
}