1use super::api::{create_openai_client, to_openai_messages};
2use super::model::{
3 AgentConfig, ChatMessage, ChatSession, ModelProvider, load_agent_config, load_chat_session,
4 save_agent_config, save_chat_session,
5};
6use super::theme::Theme;
7use crate::util::log::write_error_log;
8use async_openai::types::chat::CreateChatCompletionRequestArgs;
9use futures::StreamExt;
10use ratatui::text::Line;
11use ratatui::widgets::ListState;
12use std::sync::{Arc, Mutex, mpsc};
13
14pub enum StreamMsg {
18 Chunk,
20 Done,
22 Error(String),
24}
25
26pub struct ChatApp {
28 pub agent_config: AgentConfig,
30 pub session: ChatSession,
32 pub input: String,
34 pub cursor_pos: usize,
36 pub mode: ChatMode,
38 pub scroll_offset: u16,
40 pub is_loading: bool,
42 pub model_list_state: ListState,
44 pub toast: Option<(String, bool, std::time::Instant)>,
46 pub stream_rx: Option<mpsc::Receiver<StreamMsg>>,
48 pub streaming_content: Arc<Mutex<String>>,
50 pub msg_lines_cache: Option<MsgLinesCache>,
53 pub browse_msg_index: usize,
55 pub last_rendered_streaming_len: usize,
57 pub last_stream_render_time: std::time::Instant,
59 pub config_provider_idx: usize,
61 pub config_field_idx: usize,
63 pub config_editing: bool,
65 pub config_edit_buf: String,
67 pub config_edit_cursor: usize,
69 pub auto_scroll: bool,
71 pub theme: Theme,
73}
74
75pub struct MsgLinesCache {
77 pub msg_count: usize,
79 pub last_msg_len: usize,
81 pub streaming_len: usize,
83 pub is_loading: bool,
85 pub bubble_max_width: usize,
87 pub browse_index: Option<usize>,
89 pub lines: Vec<Line<'static>>,
91 pub msg_start_lines: Vec<(usize, usize)>, pub per_msg_lines: Vec<PerMsgCache>,
95 pub streaming_stable_lines: Vec<Line<'static>>,
97 pub streaming_stable_offset: usize,
99}
100
101pub struct PerMsgCache {
103 pub content_len: usize,
105 pub lines: Vec<Line<'static>>,
107 pub msg_index: usize,
109}
110
111pub const TOAST_DURATION_SECS: u64 = 4;
113
114#[derive(PartialEq)]
115pub enum ChatMode {
116 Chat,
118 SelectModel,
120 Browse,
122 Help,
124 Config,
126}
127
128pub const CONFIG_FIELDS: &[&str] = &["name", "api_base", "api_key", "model"];
130pub const CONFIG_GLOBAL_FIELDS: &[&str] = &[
132 "system_prompt",
133 "stream_mode",
134 "max_history_messages",
135 "theme",
136];
137pub fn config_total_fields() -> usize {
139 CONFIG_FIELDS.len() + CONFIG_GLOBAL_FIELDS.len()
140}
141
142impl ChatApp {
143 pub fn new() -> Self {
144 let agent_config = load_agent_config();
145 let session = load_chat_session();
146 let mut model_list_state = ListState::default();
147 if !agent_config.providers.is_empty() {
148 model_list_state.select(Some(agent_config.active_index));
149 }
150 let theme = Theme::from_name(&agent_config.theme);
151 Self {
152 agent_config,
153 session,
154 input: String::new(),
155 cursor_pos: 0,
156 mode: ChatMode::Chat,
157 scroll_offset: u16::MAX, is_loading: false,
159 model_list_state,
160 toast: None,
161 stream_rx: None,
162 streaming_content: Arc::new(Mutex::new(String::new())),
163 msg_lines_cache: None,
164 browse_msg_index: 0,
165 last_rendered_streaming_len: 0,
166 last_stream_render_time: std::time::Instant::now(),
167 config_provider_idx: 0,
168 config_field_idx: 0,
169 config_editing: false,
170 config_edit_buf: String::new(),
171 config_edit_cursor: 0,
172 auto_scroll: true,
173 theme,
174 }
175 }
176
177 pub fn switch_theme(&mut self) {
179 self.agent_config.theme = self.agent_config.theme.next();
180 self.theme = Theme::from_name(&self.agent_config.theme);
181 self.msg_lines_cache = None; }
183
184 pub fn show_toast(&mut self, msg: impl Into<String>, is_error: bool) {
186 self.toast = Some((msg.into(), is_error, std::time::Instant::now()));
187 }
188
189 pub fn tick_toast(&mut self) {
191 if let Some((_, _, created)) = &self.toast {
192 if created.elapsed().as_secs() >= TOAST_DURATION_SECS {
193 self.toast = None;
194 }
195 }
196 }
197
198 pub fn active_provider(&self) -> Option<&ModelProvider> {
200 if self.agent_config.providers.is_empty() {
201 return None;
202 }
203 let idx = self
204 .agent_config
205 .active_index
206 .min(self.agent_config.providers.len() - 1);
207 Some(&self.agent_config.providers[idx])
208 }
209
210 pub fn active_model_name(&self) -> String {
212 self.active_provider()
213 .map(|p| p.name.clone())
214 .unwrap_or_else(|| "未配置".to_string())
215 }
216
217 pub fn build_api_messages(&self) -> Vec<ChatMessage> {
219 let mut messages = Vec::new();
220 if let Some(sys) = &self.agent_config.system_prompt {
221 messages.push(ChatMessage {
222 role: "system".to_string(),
223 content: sys.clone(),
224 });
225 }
226
227 let max_history = self.agent_config.max_history_messages;
229 let history_messages: Vec<_> = if self.session.messages.len() > max_history {
230 self.session.messages[self.session.messages.len() - max_history..].to_vec()
231 } else {
232 self.session.messages.clone()
233 };
234
235 for msg in history_messages {
236 messages.push(msg);
237 }
238 messages
239 }
240
241 pub fn send_message(&mut self) {
243 let text = self.input.trim().to_string();
244 if text.is_empty() {
245 return;
246 }
247
248 self.session.messages.push(ChatMessage {
250 role: "user".to_string(),
251 content: text,
252 });
253 self.input.clear();
254 self.cursor_pos = 0;
255 self.auto_scroll = true;
257 self.scroll_offset = u16::MAX;
258
259 let provider = match self.active_provider() {
261 Some(p) => p.clone(),
262 None => {
263 self.show_toast("未配置模型提供方,请先编辑配置文件", true);
264 return;
265 }
266 };
267
268 self.is_loading = true;
269 self.last_rendered_streaming_len = 0;
271 self.last_stream_render_time = std::time::Instant::now();
272 self.msg_lines_cache = None;
273
274 let api_messages = self.build_api_messages();
275
276 {
278 let mut sc = self.streaming_content.lock().unwrap();
279 sc.clear();
280 }
281
282 let (tx, rx) = mpsc::channel::<StreamMsg>();
284 self.stream_rx = Some(rx);
285
286 let streaming_content = Arc::clone(&self.streaming_content);
287
288 let use_stream = self.agent_config.stream_mode;
289
290 std::thread::spawn(move || {
292 let rt = match tokio::runtime::Runtime::new() {
293 Ok(rt) => rt,
294 Err(e) => {
295 let _ = tx.send(StreamMsg::Error(format!("创建异步运行时失败: {}", e)));
296 return;
297 }
298 };
299
300 rt.block_on(async {
301 let client = create_openai_client(&provider);
302 let openai_messages = to_openai_messages(&api_messages);
303
304 let request = match CreateChatCompletionRequestArgs::default()
305 .model(&provider.model)
306 .messages(openai_messages)
307 .build()
308 {
309 Ok(req) => req,
310 Err(e) => {
311 let _ = tx.send(StreamMsg::Error(format!("构建请求失败: {}", e)));
312 return;
313 }
314 };
315
316 if use_stream {
317 let mut stream = match client.chat().create_stream(request).await {
319 Ok(s) => s,
320 Err(e) => {
321 let error_msg = format!("API 请求失败: {}", e);
322 write_error_log("Chat API 流式请求创建", &error_msg);
323 let _ = tx.send(StreamMsg::Error(error_msg));
324 return;
325 }
326 };
327
328 while let Some(result) = stream.next().await {
329 match result {
330 Ok(response) => {
331 for choice in &response.choices {
332 if let Some(ref content) = choice.delta.content {
333 {
335 let mut sc = streaming_content.lock().unwrap();
336 sc.push_str(content);
337 }
338 let _ = tx.send(StreamMsg::Chunk);
339 }
340 }
341 }
342 Err(e) => {
343 let error_str = format!("{}", e);
344 write_error_log("Chat API 流式响应", &error_str);
345 let _ = tx.send(StreamMsg::Error(error_str));
346 return;
347 }
348 }
349 }
350 } else {
351 match client.chat().create(request).await {
353 Ok(response) => {
354 if let Some(choice) = response.choices.first() {
355 if let Some(ref content) = choice.message.content {
356 {
357 let mut sc = streaming_content.lock().unwrap();
358 sc.push_str(content);
359 }
360 let _ = tx.send(StreamMsg::Chunk);
361 }
362 }
363 }
364 Err(e) => {
365 let error_msg = format!("API 请求失败: {}", e);
366 write_error_log("Chat API 非流式请求", &error_msg);
367 let _ = tx.send(StreamMsg::Error(error_msg));
368 return;
369 }
370 }
371 }
372
373 let _ = tx.send(StreamMsg::Done);
374
375 let _ = tx.send(StreamMsg::Done);
376 });
377 });
378 }
379
380 pub fn poll_stream(&mut self) {
382 if self.stream_rx.is_none() {
383 return;
384 }
385
386 let mut finished = false;
387 let mut had_error = false;
388
389 if let Some(ref rx) = self.stream_rx {
391 loop {
392 match rx.try_recv() {
393 Ok(StreamMsg::Chunk) => {
394 if self.auto_scroll {
397 self.scroll_offset = u16::MAX;
398 }
399 }
400 Ok(StreamMsg::Done) => {
401 finished = true;
402 break;
403 }
404 Ok(StreamMsg::Error(e)) => {
405 self.show_toast(format!("请求失败: {}", e), true);
406 had_error = true;
407 finished = true;
408 break;
409 }
410 Err(mpsc::TryRecvError::Empty) => break,
411 Err(mpsc::TryRecvError::Disconnected) => {
412 finished = true;
413 break;
414 }
415 }
416 }
417 }
418
419 if finished {
420 self.stream_rx = None;
421 self.is_loading = false;
422 self.last_rendered_streaming_len = 0;
424 self.msg_lines_cache = None;
426
427 if !had_error {
428 let content = {
430 let sc = self.streaming_content.lock().unwrap();
431 sc.clone()
432 };
433 if !content.is_empty() {
434 self.session.messages.push(ChatMessage {
435 role: "assistant".to_string(),
436 content,
437 });
438 self.streaming_content.lock().unwrap().clear();
440 self.show_toast("回复完成 ✓", false);
441 }
442 if self.auto_scroll {
443 self.scroll_offset = u16::MAX;
444 }
445 } else {
446 self.streaming_content.lock().unwrap().clear();
448 }
449
450 let _ = save_chat_session(&self.session);
452 }
453 }
454
455 pub fn clear_session(&mut self) {
457 self.session.messages.clear();
458 self.scroll_offset = 0;
459 self.msg_lines_cache = None; let _ = save_chat_session(&self.session);
461 self.show_toast("对话已清空", false);
462 }
463
464 pub fn switch_model(&mut self) {
466 if let Some(sel) = self.model_list_state.selected() {
467 self.agent_config.active_index = sel;
468 let _ = save_agent_config(&self.agent_config);
469 let name = self.active_model_name();
470 self.show_toast(format!("已切换到: {}", name), false);
471 }
472 self.mode = ChatMode::Chat;
473 }
474
475 pub fn scroll_up(&mut self) {
477 self.scroll_offset = self.scroll_offset.saturating_sub(3);
478 self.auto_scroll = false;
480 }
481
482 pub fn scroll_down(&mut self) {
484 self.scroll_offset = self.scroll_offset.saturating_add(3);
485 }
488}