1use super::api::{create_openai_client, to_openai_messages};
2use super::model::{
3 AgentConfig, ChatMessage, ChatSession, ModelProvider, load_agent_config, load_chat_session,
4 save_agent_config, save_chat_session,
5};
6use crate::util::log::write_error_log;
7use async_openai::types::chat::CreateChatCompletionRequestArgs;
8use futures::StreamExt;
9use ratatui::text::Line;
10use ratatui::widgets::ListState;
11use std::sync::{Arc, Mutex, mpsc};
12
13pub enum StreamMsg {
17 Chunk,
19 Done,
21 Error(String),
23}
24
25pub struct ChatApp {
27 pub agent_config: AgentConfig,
29 pub session: ChatSession,
31 pub input: String,
33 pub cursor_pos: usize,
35 pub mode: ChatMode,
37 pub scroll_offset: u16,
39 pub is_loading: bool,
41 pub model_list_state: ListState,
43 pub toast: Option<(String, bool, std::time::Instant)>,
45 pub stream_rx: Option<mpsc::Receiver<StreamMsg>>,
47 pub streaming_content: Arc<Mutex<String>>,
49 pub msg_lines_cache: Option<MsgLinesCache>,
52 pub browse_msg_index: usize,
54 pub last_rendered_streaming_len: usize,
56 pub last_stream_render_time: std::time::Instant,
58 pub config_provider_idx: usize,
60 pub config_field_idx: usize,
62 pub config_editing: bool,
64 pub config_edit_buf: String,
66 pub config_edit_cursor: usize,
68 pub auto_scroll: bool,
70}
71
72pub struct MsgLinesCache {
74 pub msg_count: usize,
76 pub last_msg_len: usize,
78 pub streaming_len: usize,
80 pub is_loading: bool,
82 pub bubble_max_width: usize,
84 pub browse_index: Option<usize>,
86 pub lines: Vec<Line<'static>>,
88 pub msg_start_lines: Vec<(usize, usize)>, pub per_msg_lines: Vec<PerMsgCache>,
92 pub streaming_stable_lines: Vec<Line<'static>>,
94 pub streaming_stable_offset: usize,
96}
97
98pub struct PerMsgCache {
100 pub content_len: usize,
102 pub lines: Vec<Line<'static>>,
104 pub msg_index: usize,
106}
107
108pub const TOAST_DURATION_SECS: u64 = 4;
110
111#[derive(PartialEq)]
112pub enum ChatMode {
113 Chat,
115 SelectModel,
117 Browse,
119 Help,
121 Config,
123}
124
125pub const CONFIG_FIELDS: &[&str] = &["name", "api_base", "api_key", "model"];
127pub const CONFIG_GLOBAL_FIELDS: &[&str] = &["system_prompt", "stream_mode", "max_history_messages"];
129pub fn config_total_fields() -> usize {
131 CONFIG_FIELDS.len() + CONFIG_GLOBAL_FIELDS.len()
132}
133
134impl ChatApp {
135 pub fn new() -> Self {
136 let agent_config = load_agent_config();
137 let session = load_chat_session();
138 let mut model_list_state = ListState::default();
139 if !agent_config.providers.is_empty() {
140 model_list_state.select(Some(agent_config.active_index));
141 }
142 Self {
143 agent_config,
144 session,
145 input: String::new(),
146 cursor_pos: 0,
147 mode: ChatMode::Chat,
148 scroll_offset: u16::MAX, is_loading: false,
150 model_list_state,
151 toast: None,
152 stream_rx: None,
153 streaming_content: Arc::new(Mutex::new(String::new())),
154 msg_lines_cache: None,
155 browse_msg_index: 0,
156 last_rendered_streaming_len: 0,
157 last_stream_render_time: std::time::Instant::now(),
158 config_provider_idx: 0,
159 config_field_idx: 0,
160 config_editing: false,
161 config_edit_buf: String::new(),
162 config_edit_cursor: 0,
163 auto_scroll: true,
164 }
165 }
166
167 pub fn show_toast(&mut self, msg: impl Into<String>, is_error: bool) {
169 self.toast = Some((msg.into(), is_error, std::time::Instant::now()));
170 }
171
172 pub fn tick_toast(&mut self) {
174 if let Some((_, _, created)) = &self.toast {
175 if created.elapsed().as_secs() >= TOAST_DURATION_SECS {
176 self.toast = None;
177 }
178 }
179 }
180
181 pub fn active_provider(&self) -> Option<&ModelProvider> {
183 if self.agent_config.providers.is_empty() {
184 return None;
185 }
186 let idx = self
187 .agent_config
188 .active_index
189 .min(self.agent_config.providers.len() - 1);
190 Some(&self.agent_config.providers[idx])
191 }
192
193 pub fn active_model_name(&self) -> String {
195 self.active_provider()
196 .map(|p| p.name.clone())
197 .unwrap_or_else(|| "未配置".to_string())
198 }
199
200 pub fn build_api_messages(&self) -> Vec<ChatMessage> {
202 let mut messages = Vec::new();
203 if let Some(sys) = &self.agent_config.system_prompt {
204 messages.push(ChatMessage {
205 role: "system".to_string(),
206 content: sys.clone(),
207 });
208 }
209
210 let max_history = self.agent_config.max_history_messages;
212 let history_messages: Vec<_> = if self.session.messages.len() > max_history {
213 self.session.messages[self.session.messages.len() - max_history..].to_vec()
214 } else {
215 self.session.messages.clone()
216 };
217
218 for msg in history_messages {
219 messages.push(msg);
220 }
221 messages
222 }
223
224 pub fn send_message(&mut self) {
226 let text = self.input.trim().to_string();
227 if text.is_empty() {
228 return;
229 }
230
231 self.session.messages.push(ChatMessage {
233 role: "user".to_string(),
234 content: text,
235 });
236 self.input.clear();
237 self.cursor_pos = 0;
238 self.auto_scroll = true;
240 self.scroll_offset = u16::MAX;
241
242 let provider = match self.active_provider() {
244 Some(p) => p.clone(),
245 None => {
246 self.show_toast("未配置模型提供方,请先编辑配置文件", true);
247 return;
248 }
249 };
250
251 self.is_loading = true;
252 self.last_rendered_streaming_len = 0;
254 self.last_stream_render_time = std::time::Instant::now();
255 self.msg_lines_cache = None;
256
257 let api_messages = self.build_api_messages();
258
259 {
261 let mut sc = self.streaming_content.lock().unwrap();
262 sc.clear();
263 }
264
265 let (tx, rx) = mpsc::channel::<StreamMsg>();
267 self.stream_rx = Some(rx);
268
269 let streaming_content = Arc::clone(&self.streaming_content);
270
271 let use_stream = self.agent_config.stream_mode;
272
273 std::thread::spawn(move || {
275 let rt = match tokio::runtime::Runtime::new() {
276 Ok(rt) => rt,
277 Err(e) => {
278 let _ = tx.send(StreamMsg::Error(format!("创建异步运行时失败: {}", e)));
279 return;
280 }
281 };
282
283 rt.block_on(async {
284 let client = create_openai_client(&provider);
285 let openai_messages = to_openai_messages(&api_messages);
286
287 let request = match CreateChatCompletionRequestArgs::default()
288 .model(&provider.model)
289 .messages(openai_messages)
290 .build()
291 {
292 Ok(req) => req,
293 Err(e) => {
294 let _ = tx.send(StreamMsg::Error(format!("构建请求失败: {}", e)));
295 return;
296 }
297 };
298
299 if use_stream {
300 let mut stream = match client.chat().create_stream(request).await {
302 Ok(s) => s,
303 Err(e) => {
304 let error_msg = format!("API 请求失败: {}", e);
305 write_error_log("Chat API 流式请求创建", &error_msg);
306 let _ = tx.send(StreamMsg::Error(error_msg));
307 return;
308 }
309 };
310
311 while let Some(result) = stream.next().await {
312 match result {
313 Ok(response) => {
314 for choice in &response.choices {
315 if let Some(ref content) = choice.delta.content {
316 {
318 let mut sc = streaming_content.lock().unwrap();
319 sc.push_str(content);
320 }
321 let _ = tx.send(StreamMsg::Chunk);
322 }
323 }
324 }
325 Err(e) => {
326 let error_str = format!("{}", e);
327 write_error_log("Chat API 流式响应", &error_str);
328 let _ = tx.send(StreamMsg::Error(error_str));
329 return;
330 }
331 }
332 }
333 } else {
334 match client.chat().create(request).await {
336 Ok(response) => {
337 if let Some(choice) = response.choices.first() {
338 if let Some(ref content) = choice.message.content {
339 {
340 let mut sc = streaming_content.lock().unwrap();
341 sc.push_str(content);
342 }
343 let _ = tx.send(StreamMsg::Chunk);
344 }
345 }
346 }
347 Err(e) => {
348 let error_msg = format!("API 请求失败: {}", e);
349 write_error_log("Chat API 非流式请求", &error_msg);
350 let _ = tx.send(StreamMsg::Error(error_msg));
351 return;
352 }
353 }
354 }
355
356 let _ = tx.send(StreamMsg::Done);
357
358 let _ = tx.send(StreamMsg::Done);
359 });
360 });
361 }
362
363 pub fn poll_stream(&mut self) {
365 if self.stream_rx.is_none() {
366 return;
367 }
368
369 let mut finished = false;
370 let mut had_error = false;
371
372 if let Some(ref rx) = self.stream_rx {
374 loop {
375 match rx.try_recv() {
376 Ok(StreamMsg::Chunk) => {
377 if self.auto_scroll {
380 self.scroll_offset = u16::MAX;
381 }
382 }
383 Ok(StreamMsg::Done) => {
384 finished = true;
385 break;
386 }
387 Ok(StreamMsg::Error(e)) => {
388 self.show_toast(format!("请求失败: {}", e), true);
389 had_error = true;
390 finished = true;
391 break;
392 }
393 Err(mpsc::TryRecvError::Empty) => break,
394 Err(mpsc::TryRecvError::Disconnected) => {
395 finished = true;
396 break;
397 }
398 }
399 }
400 }
401
402 if finished {
403 self.stream_rx = None;
404 self.is_loading = false;
405 self.last_rendered_streaming_len = 0;
407 self.msg_lines_cache = None;
409
410 if !had_error {
411 let content = {
413 let sc = self.streaming_content.lock().unwrap();
414 sc.clone()
415 };
416 if !content.is_empty() {
417 self.session.messages.push(ChatMessage {
418 role: "assistant".to_string(),
419 content,
420 });
421 self.streaming_content.lock().unwrap().clear();
423 self.show_toast("回复完成 ✓", false);
424 }
425 if self.auto_scroll {
426 self.scroll_offset = u16::MAX;
427 }
428 } else {
429 self.streaming_content.lock().unwrap().clear();
431 }
432
433 let _ = save_chat_session(&self.session);
435 }
436 }
437
438 pub fn clear_session(&mut self) {
440 self.session.messages.clear();
441 self.scroll_offset = 0;
442 self.msg_lines_cache = None; let _ = save_chat_session(&self.session);
444 self.show_toast("对话已清空", false);
445 }
446
447 pub fn switch_model(&mut self) {
449 if let Some(sel) = self.model_list_state.selected() {
450 self.agent_config.active_index = sel;
451 let _ = save_agent_config(&self.agent_config);
452 let name = self.active_model_name();
453 self.show_toast(format!("已切换到: {}", name), false);
454 }
455 self.mode = ChatMode::Chat;
456 }
457
458 pub fn scroll_up(&mut self) {
460 self.scroll_offset = self.scroll_offset.saturating_sub(3);
461 self.auto_scroll = false;
463 }
464
465 pub fn scroll_down(&mut self) {
467 self.scroll_offset = self.scroll_offset.saturating_add(3);
468 }
471}