j_cli/command/chat/
mod.rs1pub mod api;
2pub mod app;
3pub mod handler;
4pub mod markdown;
5pub mod model;
6pub mod render;
7pub mod ui;
8
9use crate::config::YamlConfig;
10use crate::{error, info};
11use api::call_openai_stream;
12use handler::run_chat_tui;
13use model::{
14 AgentConfig, ChatMessage, ModelProvider, agent_config_path, load_agent_config,
15 save_agent_config,
16};
17use std::io::{self, Write};
18
19pub fn handle_chat(content: &[String], _config: &YamlConfig) {
20 let agent_config = load_agent_config();
21
22 if agent_config.providers.is_empty() {
23 info!("⚠️ 尚未配置 LLM 模型提供方。");
24 info!("📁 请编辑配置文件: {}", agent_config_path().display());
25 info!("📝 配置示例:");
26 let example = AgentConfig {
27 providers: vec![ModelProvider {
28 name: "GPT-4o".to_string(),
29 api_base: "https://api.openai.com/v1".to_string(),
30 api_key: "sk-your-api-key".to_string(),
31 model: "gpt-4o".to_string(),
32 }],
33 active_index: 0,
34 system_prompt: Some("你是一个有用的助手。".to_string()),
35 stream_mode: true,
36 max_history_messages: 20,
37 };
38 if let Ok(json) = serde_json::to_string_pretty(&example) {
39 println!("{}", json);
40 }
41 if !agent_config_path().exists() {
43 let _ = save_agent_config(&example);
44 info!(
45 "✅ 已自动创建示例配置文件: {}",
46 agent_config_path().display()
47 );
48 info!("📌 请修改其中的 api_key 和其他配置后重新运行 chat 命令");
49 }
50 return;
51 }
52
53 if content.is_empty() {
54 run_chat_tui();
56 return;
57 }
58
59 let message = content.join(" ");
61 let message = message.trim().to_string();
62 if message.is_empty() {
63 error!("⚠️ 消息内容为空");
64 return;
65 }
66
67 let idx = agent_config
68 .active_index
69 .min(agent_config.providers.len() - 1);
70 let provider = &agent_config.providers[idx];
71
72 info!("🤖 [{}] 思考中...", provider.name);
73
74 let mut messages = Vec::new();
75 if let Some(sys) = &agent_config.system_prompt {
76 messages.push(ChatMessage {
77 role: "system".to_string(),
78 content: sys.clone(),
79 });
80 }
81 messages.push(ChatMessage {
82 role: "user".to_string(),
83 content: message,
84 });
85
86 match call_openai_stream(provider, &messages, &mut |chunk| {
87 print!("{}", chunk);
88 let _ = io::stdout().flush();
89 }) {
90 Ok(_) => {
91 println!(); }
93 Err(e) => {
94 error!("\n❌ {}", e);
95 }
96 }
97}