j_cli/command/chat/
mod.rs1pub mod api;
2pub mod app;
3pub mod archive;
4pub mod handler;
5pub mod markdown;
6pub mod model;
7pub mod render;
8pub mod theme;
9pub mod ui;
10
11use crate::command::chat::theme::ThemeName;
12use crate::config::YamlConfig;
13use crate::{error, info};
14use api::call_openai_stream;
15use handler::run_chat_tui;
16use model::{
17 AgentConfig, ChatMessage, ModelProvider, agent_config_path, load_agent_config,
18 save_agent_config,
19};
20use std::io::{self, Write};
21
22pub fn handle_chat(content: &[String], _config: &YamlConfig) {
23 let agent_config = load_agent_config();
24
25 if agent_config.providers.is_empty() {
26 info!("⚠️ 尚未配置 LLM 模型提供方。");
27 info!("📁 请编辑配置文件: {}", agent_config_path().display());
28 info!("📝 配置示例:");
29 let example = AgentConfig {
30 providers: vec![ModelProvider {
31 name: "GPT-4o".to_string(),
32 api_base: "https://api.openai.com/v1".to_string(),
33 api_key: "sk-your-api-key".to_string(),
34 model: "gpt-4o".to_string(),
35 }],
36 active_index: 0,
37 system_prompt: Some("你是一个有用的助手。".to_string()),
38 stream_mode: true,
39 max_history_messages: 20,
40 theme: ThemeName::default(),
41 };
42 if let Ok(json) = serde_json::to_string_pretty(&example) {
43 println!("{}", json);
44 }
45 if !agent_config_path().exists() {
47 let _ = save_agent_config(&example);
48 info!(
49 "✅ 已自动创建示例配置文件: {}",
50 agent_config_path().display()
51 );
52 info!("📌 请修改其中的 api_key 和其他配置后重新运行 chat 命令");
53 }
54 return;
55 }
56
57 if content.is_empty() {
58 run_chat_tui();
60 return;
61 }
62
63 let message = content.join(" ");
65 let message = message.trim().to_string();
66 if message.is_empty() {
67 error!("⚠️ 消息内容为空");
68 return;
69 }
70
71 let idx = agent_config
72 .active_index
73 .min(agent_config.providers.len() - 1);
74 let provider = &agent_config.providers[idx];
75
76 info!("🤖 [{}] 思考中...", provider.name);
77
78 let mut messages = Vec::new();
79 if let Some(sys) = &agent_config.system_prompt {
80 messages.push(ChatMessage {
81 role: "system".to_string(),
82 content: sys.clone(),
83 });
84 }
85 messages.push(ChatMessage {
86 role: "user".to_string(),
87 content: message,
88 });
89
90 match call_openai_stream(provider, &messages, &mut |chunk| {
91 print!("{}", chunk);
92 let _ = io::stdout().flush();
93 }) {
94 Ok(_) => {
95 println!(); }
97 Err(e) => {
98 error!("\n❌ {}", e);
99 }
100 }
101}