1use super::api::{build_request_with_tools, create_openai_client};
2use super::model::{
3 AgentConfig, ChatMessage, ChatSession, ModelProvider, ToolCallItem, load_agent_config,
4 load_chat_session, load_style, load_system_prompt, save_agent_config, save_chat_session,
5 save_system_prompt, system_prompt_path,
6};
7use super::skill::{self, Skill};
8use super::theme::Theme;
9use super::tools::ToolRegistry;
10use crate::constants::{CONFIG_FIELDS, CONFIG_GLOBAL_FIELDS, TOAST_DURATION_SECS};
11use crate::util::log::{write_error_log, write_info_log};
12use async_openai::types::chat::ChatCompletionTools;
13use futures::StreamExt;
14use ratatui::text::Line;
15use ratatui::widgets::ListState;
16use std::sync::{Arc, Mutex, mpsc};
17pub enum StreamMsg {
21 Chunk,
23 ToolCallRequest(Vec<ToolCallItem>),
25 Done,
27 Error(String),
29}
30
31#[allow(dead_code)]
33pub enum ToolExecStatus {
34 PendingConfirm,
36 Executing,
38 Done(String),
40 Rejected,
42 Failed(String),
44}
45
46pub struct ToolCallStatus {
48 pub tool_call_id: String,
49 pub tool_name: String,
50 pub arguments: String,
51 pub confirm_message: String,
52 pub status: ToolExecStatus,
53}
54
55pub struct ToolResultMsg {
57 pub tool_call_id: String,
58 pub result: String,
59 #[allow(dead_code)]
60 pub is_error: bool,
61}
62
63pub struct ChatApp {
65 pub agent_config: AgentConfig,
67 pub session: ChatSession,
69 pub input: String,
71 pub cursor_pos: usize,
73 pub mode: ChatMode,
75 pub scroll_offset: u16,
77 pub is_loading: bool,
79 pub model_list_state: ListState,
81 pub toast: Option<(String, bool, std::time::Instant)>,
83 pub stream_rx: Option<mpsc::Receiver<StreamMsg>>,
85 pub streaming_content: Arc<Mutex<String>>,
87 pub msg_lines_cache: Option<MsgLinesCache>,
90 pub browse_msg_index: usize,
92 pub browse_scroll_offset: u16,
94 pub last_rendered_streaming_len: usize,
96 pub last_stream_render_time: std::time::Instant,
98 pub config_provider_idx: usize,
100 pub config_field_idx: usize,
102 pub config_editing: bool,
104 pub config_edit_buf: String,
106 pub config_edit_cursor: usize,
108 pub auto_scroll: bool,
110 pub theme: Theme,
112 pub archives: Vec<super::archive::ChatArchive>,
114 pub archive_list_index: usize,
116 pub archive_default_name: String,
118 pub archive_custom_name: String,
120 pub archive_editing_name: bool,
122 pub archive_edit_cursor: usize,
124 pub restore_confirm_needed: bool,
126 pub tool_result_tx: Option<mpsc::SyncSender<ToolResultMsg>>,
128 pub tool_registry: ToolRegistry,
130 pub active_tool_calls: Vec<ToolCallStatus>,
132 pub pending_tool_idx: usize,
134 pub pending_system_prompt_edit: bool,
136 pub loaded_skills: Vec<Skill>,
138 pub at_popup_active: bool,
140 pub at_popup_filter: String,
142 pub at_popup_start_pos: usize,
144 pub at_popup_selected: usize,
146 pub pending_style_edit: bool,
148}
149
150pub struct MsgLinesCache {
152 pub msg_count: usize,
154 pub last_msg_len: usize,
156 pub streaming_len: usize,
158 pub is_loading: bool,
160 pub bubble_max_width: usize,
162 pub browse_index: Option<usize>,
164 pub tool_confirm_idx: Option<usize>,
166 pub lines: Vec<Line<'static>>,
168 pub msg_start_lines: Vec<(usize, usize)>, pub per_msg_lines: Vec<PerMsgCache>,
172 pub streaming_stable_lines: Vec<Line<'static>>,
174 pub streaming_stable_offset: usize,
176}
177
178pub struct PerMsgCache {
180 pub content_len: usize,
182 pub lines: Vec<Line<'static>>,
184 pub msg_index: usize,
186 pub is_selected: bool,
188}
189
190#[derive(PartialEq)]
191pub enum ChatMode {
192 Chat,
194 SelectModel,
196 Browse,
198 Help,
200 Config,
202 ArchiveConfirm,
204 ArchiveList,
206 ToolConfirm,
208}
209
210pub fn config_total_fields() -> usize {
212 CONFIG_FIELDS.len() + CONFIG_GLOBAL_FIELDS.len()
213}
214
215const DEFAULT_SYSTEM_PROMPT: &str = include_str!("../../../assets/system_prompt_default.md");
217
218impl ChatApp {
219 pub fn new() -> Self {
220 let mut agent_config = load_agent_config();
221 if let Some(file_prompt) = load_system_prompt() {
223 agent_config.system_prompt = Some(file_prompt);
224 } else if !system_prompt_path().exists() {
225 if let Some(config_prompt) = agent_config.system_prompt.clone() {
226 let _ = save_system_prompt(&config_prompt);
227 } else {
228 let _ = save_system_prompt(DEFAULT_SYSTEM_PROMPT);
230 agent_config.system_prompt = Some(DEFAULT_SYSTEM_PROMPT.to_string());
231 }
232 }
233 if let Some(s) = load_style() {
235 agent_config.style = Some(s);
236 }
237 let session = load_chat_session();
238 let mut model_list_state = ListState::default();
239 if !agent_config.providers.is_empty() {
240 model_list_state.select(Some(agent_config.active_index));
241 }
242 let theme = Theme::from_name(&agent_config.theme);
243 let loaded_skills = skill::load_all_skills();
244 let tool_registry = ToolRegistry::new(loaded_skills.clone());
245 Self {
246 agent_config,
247 session,
248 input: String::new(),
249 cursor_pos: 0,
250 mode: ChatMode::Chat,
251 scroll_offset: u16::MAX, is_loading: false,
253 model_list_state,
254 toast: None,
255 stream_rx: None,
256 streaming_content: Arc::new(Mutex::new(String::new())),
257 msg_lines_cache: None,
258 browse_msg_index: 0,
259 browse_scroll_offset: 0,
260 last_rendered_streaming_len: 0,
261 last_stream_render_time: std::time::Instant::now(),
262 config_provider_idx: 0,
263 config_field_idx: 0,
264 config_editing: false,
265 config_edit_buf: String::new(),
266 config_edit_cursor: 0,
267 auto_scroll: true,
268 theme,
269 archives: Vec::new(),
270 archive_list_index: 0,
271 archive_default_name: String::new(),
272 archive_custom_name: String::new(),
273 archive_editing_name: false,
274 archive_edit_cursor: 0,
275 restore_confirm_needed: false,
276 tool_result_tx: None,
277 tool_registry,
278 active_tool_calls: Vec::new(),
279 pending_tool_idx: 0,
280 pending_system_prompt_edit: false,
281 loaded_skills,
282 at_popup_active: false,
283 at_popup_filter: String::new(),
284 at_popup_start_pos: 0,
285 at_popup_selected: 0,
286 pending_style_edit: false,
287 }
288 }
289
290 pub fn resolve_system_prompt(&self) -> Option<String> {
292 let template = self.agent_config.system_prompt.as_ref()?;
293 let skills_summary = skill::build_skills_summary(&self.loaded_skills);
294 let tools_summary = self.tool_registry.build_tools_summary();
295 let style_text = self.agent_config.style.as_deref().unwrap_or("(未设置)");
296
297 let resolved = template
298 .replace("{{.skills}}", &skills_summary)
299 .replace("{{.tools}}", &tools_summary)
300 .replace("{{.style}}", style_text);
301 Some(resolved)
302 }
303
304 pub fn switch_theme(&mut self) {
306 self.agent_config.theme = self.agent_config.theme.next();
307 self.theme = Theme::from_name(&self.agent_config.theme);
308 self.msg_lines_cache = None; }
310
311 pub fn show_toast(&mut self, msg: impl Into<String>, is_error: bool) {
313 self.toast = Some((msg.into(), is_error, std::time::Instant::now()));
314 }
315
316 pub fn tick_toast(&mut self) {
318 if let Some((_, _, created)) = &self.toast {
319 if created.elapsed().as_secs() >= TOAST_DURATION_SECS {
320 self.toast = None;
321 }
322 }
323 }
324
325 pub fn active_provider(&self) -> Option<&ModelProvider> {
327 if self.agent_config.providers.is_empty() {
328 return None;
329 }
330 let idx = self
331 .agent_config
332 .active_index
333 .min(self.agent_config.providers.len() - 1);
334 Some(&self.agent_config.providers[idx])
335 }
336
337 pub fn active_model_name(&self) -> String {
339 self.active_provider()
340 .map(|p| p.name.clone())
341 .unwrap_or_else(|| "未配置".to_string())
342 }
343
344 pub fn build_api_messages(&self) -> Vec<ChatMessage> {
346 let max_history = self.agent_config.max_history_messages;
348 if self.session.messages.len() > max_history {
349 self.session.messages[self.session.messages.len() - max_history..].to_vec()
350 } else {
351 self.session.messages.clone()
352 }
353 }
354
355 pub fn send_message(&mut self) {
357 let text = self.input.trim().to_string();
358 if text.is_empty() {
359 return;
360 }
361
362 self.at_popup_active = false;
364
365 self.session.messages.push(ChatMessage::text("user", text));
367 self.input.clear();
368 self.cursor_pos = 0;
369 self.auto_scroll = true;
371 self.scroll_offset = u16::MAX;
372
373 let provider = match self.active_provider() {
375 Some(p) => p.clone(),
376 None => {
377 self.show_toast("未配置模型提供方,请先编辑配置文件", true);
378 return;
379 }
380 };
381
382 self.is_loading = true;
383 self.last_rendered_streaming_len = 0;
385 self.last_stream_render_time = std::time::Instant::now();
386 self.msg_lines_cache = None;
387 self.active_tool_calls.clear();
388 self.pending_tool_idx = 0;
389
390 let api_messages = self.build_api_messages();
391
392 {
394 let mut sc = self.streaming_content.lock().unwrap();
395 sc.clear();
396 }
397
398 let (stream_tx, stream_rx) = mpsc::channel::<StreamMsg>();
400 let (tool_result_tx, tool_result_rx) = mpsc::sync_channel::<ToolResultMsg>(16);
401 self.stream_rx = Some(stream_rx);
402 self.tool_result_tx = Some(tool_result_tx);
403
404 let streaming_content = Arc::clone(&self.streaming_content);
405 let use_stream = self.agent_config.stream_mode;
406 let system_prompt = self.resolve_system_prompt();
407 let tools_enabled = self.agent_config.tools_enabled;
408 let max_tool_rounds = self.agent_config.max_tool_rounds;
409 let tools = if tools_enabled {
410 self.tool_registry.to_openai_tools()
411 } else {
412 vec![]
413 };
414
415 std::thread::spawn(move || {
417 let rt = match tokio::runtime::Runtime::new() {
418 Ok(rt) => rt,
419 Err(e) => {
420 let _ = stream_tx.send(StreamMsg::Error(format!("创建异步运行时失败: {}", e)));
421 return;
422 }
423 };
424
425 rt.block_on(run_agent_loop(
426 provider,
427 api_messages,
428 tools,
429 system_prompt,
430 use_stream,
431 streaming_content,
432 stream_tx,
433 tool_result_rx,
434 max_tool_rounds,
435 ));
436 });
437 }
438
439 pub fn poll_stream(&mut self) {
441 if self.stream_rx.is_none() {
442 return;
443 }
444
445 if self.mode == ChatMode::ToolConfirm {
447 return;
448 }
449
450 let mut finished = false;
451 let mut had_error = false;
452
453 if let Some(ref rx) = self.stream_rx {
455 loop {
456 match rx.try_recv() {
457 Ok(StreamMsg::Chunk) => {
458 if self.auto_scroll {
459 self.scroll_offset = u16::MAX;
460 }
461 }
462 Ok(StreamMsg::ToolCallRequest(tool_calls)) => {
463 self.active_tool_calls.clear();
465 self.pending_tool_idx = 0;
466
467 for tc in tool_calls {
468 let confirm_msg = if let Some(tool) = self.tool_registry.get(&tc.name) {
469 tool.confirmation_message(&tc.arguments)
470 } else {
471 format!("调用工具 {} 参数: {}", tc.name, tc.arguments)
472 };
473 let needs_confirm = self
474 .tool_registry
475 .get(&tc.name)
476 .map(|t| t.requires_confirmation())
477 .unwrap_or(false);
478 self.active_tool_calls.push(ToolCallStatus {
479 tool_call_id: tc.id.clone(),
480 tool_name: tc.name.clone(),
481 arguments: tc.arguments.clone(),
482 confirm_message: confirm_msg,
483 status: if needs_confirm {
484 ToolExecStatus::PendingConfirm
485 } else {
486 ToolExecStatus::Executing
487 },
488 });
489 }
490
491 let first_confirm_idx = self
493 .active_tool_calls
494 .iter()
495 .position(|tc| matches!(tc.status, ToolExecStatus::PendingConfirm));
496
497 if let Some(idx) = first_confirm_idx {
498 self.pending_tool_idx = idx;
499 self.mode = ChatMode::ToolConfirm;
500 } else {
503 self.execute_all_tools_no_confirm();
505 }
506 break;
507 }
508 Ok(StreamMsg::Done) => {
509 finished = true;
510 break;
511 }
512 Ok(StreamMsg::Error(e)) => {
513 self.show_toast(format!("请求失败: {}", e), true);
514 had_error = true;
515 finished = true;
516 break;
517 }
518 Err(mpsc::TryRecvError::Empty) => break,
519 Err(mpsc::TryRecvError::Disconnected) => {
520 finished = true;
521 break;
522 }
523 }
524 }
525 }
526
527 if finished {
528 self.finish_loading(had_error);
529 }
530 }
531
532 fn execute_all_tools_no_confirm(&mut self) {
534 for tc_status in self.active_tool_calls.iter_mut() {
535 if matches!(tc_status.status, ToolExecStatus::Executing) {
536 let result = if let Some(tool) = self.tool_registry.get(&tc_status.tool_name) {
537 tool.execute(&tc_status.arguments)
538 } else {
539 super::tools::ToolResult {
540 output: format!("未知工具: {}", tc_status.tool_name),
541 is_error: true,
542 }
543 };
544 let summary = if result.output.len() > 60 {
545 let mut end = 60;
546 while !result.output.is_char_boundary(end) {
547 end -= 1;
548 }
549 format!("{}...", &result.output[..end])
550 } else {
551 result.output.clone()
552 };
553 let is_error = result.is_error;
554 if let Some(ref tx) = self.tool_result_tx {
555 let _ = tx.send(ToolResultMsg {
556 tool_call_id: tc_status.tool_call_id.clone(),
557 result: result.output,
558 is_error,
559 });
560 }
561 tc_status.status = if is_error {
562 ToolExecStatus::Failed(summary)
563 } else {
564 ToolExecStatus::Done(summary)
565 };
566 }
567 }
568 }
569
570 pub fn execute_pending_tool(&mut self) {
572 let idx = self.pending_tool_idx;
573 if idx >= self.active_tool_calls.len() {
574 self.mode = ChatMode::Chat;
575 return;
576 }
577
578 {
579 let tc_status = &mut self.active_tool_calls[idx];
580 tc_status.status = ToolExecStatus::Executing;
581 }
582
583 let (tool_name, arguments, tool_call_id) = {
584 let tc = &self.active_tool_calls[idx];
585 (
586 tc.tool_name.clone(),
587 tc.arguments.clone(),
588 tc.tool_call_id.clone(),
589 )
590 };
591
592 let result = if let Some(tool) = self.tool_registry.get(&tool_name) {
593 tool.execute(&arguments)
594 } else {
595 super::tools::ToolResult {
596 output: format!("未知工具: {}", tool_name),
597 is_error: true,
598 }
599 };
600
601 let summary = if result.output.len() > 60 {
602 let mut end = 60;
603 while !result.output.is_char_boundary(end) {
604 end -= 1;
605 }
606 format!("{}...", &result.output[..end])
607 } else {
608 result.output.clone()
609 };
610 let is_error = result.is_error;
611
612 if let Some(ref tx) = self.tool_result_tx {
613 let _ = tx.send(ToolResultMsg {
614 tool_call_id,
615 result: result.output,
616 is_error,
617 });
618 }
619
620 {
621 let tc_status = &mut self.active_tool_calls[idx];
622 tc_status.status = if is_error {
623 ToolExecStatus::Failed(summary)
624 } else {
625 ToolExecStatus::Done(summary)
626 };
627 }
628
629 self.advance_tool_confirm();
631 }
632
633 pub fn reject_pending_tool(&mut self) {
635 let idx = self.pending_tool_idx;
636 if idx >= self.active_tool_calls.len() {
637 self.mode = ChatMode::Chat;
638 return;
639 }
640
641 let tool_call_id = self.active_tool_calls[idx].tool_call_id.clone();
642 self.active_tool_calls[idx].status = ToolExecStatus::Rejected;
643
644 if let Some(ref tx) = self.tool_result_tx {
645 let _ = tx.send(ToolResultMsg {
646 tool_call_id,
647 result: "用户拒绝执行该工具".to_string(),
648 is_error: true,
649 });
650 }
651
652 self.advance_tool_confirm();
653 }
654
655 fn advance_tool_confirm(&mut self) {
657 let next = self
659 .active_tool_calls
660 .iter()
661 .enumerate()
662 .find(|(_, tc)| matches!(tc.status, ToolExecStatus::PendingConfirm))
663 .map(|(i, _)| i);
664
665 if let Some(next_idx) = next {
666 self.pending_tool_idx = next_idx;
667 } else {
669 self.execute_all_tools_no_confirm();
671 self.mode = ChatMode::Chat;
673 }
674 }
675
676 fn finish_loading(&mut self, had_error: bool) {
678 self.stream_rx = None;
679 self.tool_result_tx = None;
680 self.is_loading = false;
681 self.last_rendered_streaming_len = 0;
682 self.msg_lines_cache = None;
683 self.active_tool_calls.clear();
684
685 if !had_error {
686 let content = {
687 let sc = self.streaming_content.lock().unwrap();
688 sc.clone()
689 };
690 if !content.is_empty() {
691 self.session
692 .messages
693 .push(ChatMessage::text("assistant", content));
694 self.streaming_content.lock().unwrap().clear();
695 self.show_toast("回复完成 ✓", false);
696 }
697 if self.auto_scroll {
698 self.scroll_offset = u16::MAX;
699 }
700 } else {
701 self.streaming_content.lock().unwrap().clear();
702 }
703
704 let _ = save_chat_session(&self.session);
705 }
706
707 pub fn clear_session(&mut self) {
709 self.session.messages.clear();
710 self.scroll_offset = 0;
711 self.msg_lines_cache = None; let _ = save_chat_session(&self.session);
713 self.show_toast("对话已清空", false);
714 }
715
716 pub fn switch_model(&mut self) {
718 if let Some(sel) = self.model_list_state.selected() {
719 self.agent_config.active_index = sel;
720 let _ = save_agent_config(&self.agent_config);
721 let name = self.active_model_name();
722 self.show_toast(format!("已切换到: {}", name), false);
723 }
724 self.mode = ChatMode::Chat;
725 }
726
727 pub fn scroll_up(&mut self) {
729 self.scroll_offset = self.scroll_offset.saturating_sub(3);
730 self.auto_scroll = false;
732 }
733
734 pub fn scroll_down(&mut self) {
736 self.scroll_offset = self.scroll_offset.saturating_add(3);
737 }
740
741 pub fn start_archive_confirm(&mut self) {
745 use super::archive::generate_default_archive_name;
746 self.archive_default_name = generate_default_archive_name();
747 self.archive_custom_name = String::new();
748 self.archive_editing_name = false;
749 self.archive_edit_cursor = 0;
750 self.mode = ChatMode::ArchiveConfirm;
751 }
752
753 pub fn start_archive_list(&mut self) {
755 use super::archive::list_archives;
756 self.archives = list_archives();
757 self.archive_list_index = 0;
758 self.restore_confirm_needed = false;
759 self.mode = ChatMode::ArchiveList;
760 }
761
762 pub fn do_archive(&mut self, name: &str) {
764 use super::archive::create_archive;
765
766 match create_archive(name, self.session.messages.clone()) {
767 Ok(_) => {
768 self.clear_session();
770 self.show_toast(format!("对话已归档: {}", name), false);
771 }
772 Err(e) => {
773 self.show_toast(e, true);
774 }
775 }
776 self.mode = ChatMode::Chat;
777 }
778
779 pub fn do_restore(&mut self) {
781 use super::archive::restore_archive;
782
783 if let Some(archive) = self.archives.get(self.archive_list_index) {
784 match restore_archive(&archive.name) {
785 Ok(messages) => {
786 self.session.messages = messages;
788 self.scroll_offset = u16::MAX;
789 self.msg_lines_cache = None;
790 self.input.clear();
791 self.cursor_pos = 0;
792 let _ = save_chat_session(&self.session);
793 self.show_toast(format!("已还原归档: {}", archive.name), false);
794 }
795 Err(e) => {
796 self.show_toast(e, true);
797 }
798 }
799 }
800 self.mode = ChatMode::Chat;
801 }
802
803 pub fn do_delete_archive(&mut self) {
805 use super::archive::delete_archive;
806
807 if let Some(archive) = self.archives.get(self.archive_list_index) {
808 match delete_archive(&archive.name) {
809 Ok(_) => {
810 self.show_toast(format!("归档已删除: {}", archive.name), false);
811 self.archives = super::archive::list_archives();
813 if self.archive_list_index >= self.archives.len() && self.archive_list_index > 0
814 {
815 self.archive_list_index -= 1;
816 }
817 }
818 Err(e) => {
819 self.show_toast(e, true);
820 }
821 }
822 }
823 }
824}
825
826async fn run_agent_loop(
830 provider: ModelProvider,
831 mut messages: Vec<ChatMessage>,
832 tools: Vec<ChatCompletionTools>,
833 system_prompt: Option<String>,
834 use_stream: bool,
835 streaming_content: Arc<Mutex<String>>,
836 tx: mpsc::Sender<StreamMsg>,
837 tool_result_rx: mpsc::Receiver<ToolResultMsg>,
838 max_tool_rounds: usize,
839) {
840 let client = create_openai_client(&provider);
841
842 for _round in 0..max_tool_rounds {
843 {
845 let mut sc = streaming_content.lock().unwrap();
846 sc.clear();
847 }
848
849 {
851 let mut log_content = String::new();
852 if let Some(ref sp) = system_prompt {
853 log_content.push_str(&format!("[System] {}\n", sp));
854 }
855 for msg in &messages {
856 match msg.role.as_str() {
857 "assistant" => {
858 if !msg.content.is_empty() {
859 log_content.push_str(&format!("[Assistant] {}\n", msg.content));
860 }
861 if let Some(ref tcs) = msg.tool_calls {
862 for tc in tcs {
863 log_content.push_str(&format!(
864 "[Assistant/ToolCall] {}: {}\n",
865 tc.name, tc.arguments
866 ));
867 }
868 }
869 }
870 "tool" => {
871 let id = msg.tool_call_id.as_deref().unwrap_or("?");
872 log_content.push_str(&format!("[Tool/Result({})] {}\n", id, msg.content));
873 }
874 "user" => {
875 log_content.push_str(&format!("[User] {}\n", msg.content));
876 }
877 other => {
878 log_content.push_str(&format!("[{}] {}\n", other, msg.content));
879 }
880 }
881 }
882 write_info_log("Chat 请求", &log_content);
883 }
884
885 let request = match build_request_with_tools(
886 &provider,
887 &messages,
888 tools.clone(),
889 system_prompt.as_deref(),
890 ) {
891 Ok(req) => req,
892 Err(e) => {
893 let _ = tx.send(StreamMsg::Error(format!("构建请求失败: {}", e)));
894 return;
895 }
896 };
897
898 if use_stream {
899 let mut stream = match client.chat().create_stream(request.clone()).await {
901 Ok(s) => s,
902 Err(e) => {
903 let error_msg = format!("API 请求失败: {}", e);
904 write_error_log("Chat API 流式请求创建", &error_msg);
905 let _ = tx.send(StreamMsg::Error(error_msg));
906 return;
907 }
908 };
909
910 let mut finish_reason: Option<async_openai::types::chat::FinishReason> = None;
911 let mut assistant_text = String::new();
912 let mut raw_tool_calls: std::collections::BTreeMap<u32, (String, String, String)> =
914 std::collections::BTreeMap::new();
915 let mut stream_had_deserialize_error = false;
916
917 'stream: while let Some(result) = stream.next().await {
918 match result {
919 Ok(response) => {
920 for choice in &response.choices {
921 if let Some(ref content) = choice.delta.content {
922 assistant_text.push_str(content);
923 let mut sc = streaming_content.lock().unwrap();
924 sc.push_str(content);
925 drop(sc);
926 let _ = tx.send(StreamMsg::Chunk);
927 }
928 if let Some(ref tc_chunks) = choice.delta.tool_calls {
930 for chunk in tc_chunks {
931 let entry =
932 raw_tool_calls.entry(chunk.index).or_insert_with(|| {
933 (
934 chunk.id.clone().unwrap_or_default(),
935 String::new(),
936 String::new(),
937 )
938 });
939 if entry.0.is_empty() {
940 if let Some(ref id) = chunk.id {
941 entry.0 = id.clone();
942 }
943 }
944 if let Some(ref func) = chunk.function {
945 if let Some(ref name) = func.name {
946 entry.1.push_str(name);
947 }
948 if let Some(ref args) = func.arguments {
949 entry.2.push_str(args);
950 }
951 }
952 }
953 }
954 if let Some(ref fr) = choice.finish_reason {
955 finish_reason = Some(fr.clone());
956 }
957 }
958 }
959 Err(e) => {
960 let error_str = format!("{}", e);
961 if error_str.contains("missing field `index`")
963 || error_str.contains("tool_calls")
964 {
965 stream_had_deserialize_error = true;
967 break 'stream;
968 }
969 write_error_log("Chat API 流式响应", &error_str);
970 let _ = tx.send(StreamMsg::Error(error_str));
971 return;
972 }
973 }
974 }
975
976 if !assistant_text.is_empty() {
978 write_info_log("Chat 回复", &assistant_text);
979 }
980
981 if stream_had_deserialize_error {
983 {
985 let mut sc = streaming_content.lock().unwrap();
986 sc.clear();
987 }
988 match client.chat().create(request).await {
990 Ok(response) => {
991 if let Some(choice) = response.choices.first() {
992 let is_tool_calls = matches!(
993 choice.finish_reason,
994 Some(async_openai::types::chat::FinishReason::ToolCalls)
995 );
996 if is_tool_calls {
997 if let Some(ref tc_list) = choice.message.tool_calls {
998 let tool_items: Vec<ToolCallItem> = tc_list
999 .iter()
1000 .filter_map(|tc| {
1001 if let async_openai::types::chat::ChatCompletionMessageToolCalls::Function(f) = tc {
1002 Some(ToolCallItem {
1003 id: f.id.clone(),
1004 name: f.function.name.clone(),
1005 arguments: f.function.arguments.clone(),
1006 })
1007 } else {
1008 None
1009 }
1010 })
1011 .collect();
1012
1013 if tool_items.is_empty() {
1014 break;
1015 }
1016
1017 {
1019 let mut log_content = String::new();
1020 for item in &tool_items {
1021 log_content.push_str(&format!(
1022 "- {}: {}\n",
1023 item.name, item.arguments
1024 ));
1025 }
1026 write_info_log("工具调用请求", &log_content);
1027 }
1028
1029 let assistant_text =
1030 choice.message.content.clone().unwrap_or_default();
1031 if !assistant_text.is_empty() {
1033 write_info_log("Chat 回复", &assistant_text);
1034 }
1035
1036 messages.push(ChatMessage {
1037 role: "assistant".to_string(),
1038 content: assistant_text,
1039 tool_calls: Some(tool_items.clone()),
1040 tool_call_id: None,
1041 });
1042
1043 if tx
1044 .send(StreamMsg::ToolCallRequest(tool_items.clone()))
1045 .is_err()
1046 {
1047 return;
1048 }
1049
1050 let mut tool_results: Vec<ToolResultMsg> = Vec::new();
1051 for _ in &tool_items {
1052 match tool_result_rx
1053 .recv_timeout(std::time::Duration::from_secs(60))
1054 {
1055 Ok(result) => tool_results.push(result),
1056 Err(_) => {
1057 let _ = tx.send(StreamMsg::Error(
1058 "等待工具执行超时".to_string(),
1059 ));
1060 return;
1061 }
1062 }
1063 }
1064
1065 {
1067 let mut log_content = String::new();
1068 for (i, result) in tool_results.iter().enumerate() {
1069 let tool_name = tool_items
1070 .get(i)
1071 .map(|t| t.name.as_str())
1072 .unwrap_or("unknown");
1073 log_content.push_str(&format!(
1074 "- [{}] {}: {}\n",
1075 result.tool_call_id, tool_name, result.result
1076 ));
1077 }
1078 write_info_log("工具调用结果", &log_content);
1079 }
1080
1081 for result in tool_results {
1082 messages.push(ChatMessage {
1083 role: "tool".to_string(),
1084 content: result.result,
1085 tool_calls: None,
1086 tool_call_id: Some(result.tool_call_id),
1087 });
1088 }
1089 continue;
1090 }
1091 }
1092 if let Some(ref content) = choice.message.content {
1094 write_info_log("Chat 回复", content);
1095 let mut sc = streaming_content.lock().unwrap();
1096 sc.push_str(content);
1097 drop(sc);
1098 let _ = tx.send(StreamMsg::Chunk);
1099 }
1100 }
1101 }
1102 Err(e) => {
1103 let error_msg = format!("API 请求失败(fallback): {}", e);
1104 write_error_log("Chat API fallback 非流式", &error_msg);
1105 let _ = tx.send(StreamMsg::Error(error_msg));
1106 return;
1107 }
1108 }
1109 break;
1110 }
1111
1112 let is_tool_calls = matches!(
1114 finish_reason,
1115 Some(async_openai::types::chat::FinishReason::ToolCalls)
1116 );
1117
1118 if is_tool_calls && !raw_tool_calls.is_empty() {
1119 let tool_items: Vec<ToolCallItem> = raw_tool_calls
1120 .into_values()
1121 .map(|(id, name, arguments)| ToolCallItem {
1122 id,
1123 name,
1124 arguments,
1125 })
1126 .collect();
1127
1128 if tool_items.is_empty() {
1129 break;
1130 }
1131
1132 {
1134 let mut log_content = String::new();
1135 for item in &tool_items {
1136 log_content.push_str(&format!("- {}: {}\n", item.name, item.arguments));
1137 }
1138 write_info_log("工具调用请求", &log_content);
1139 }
1140
1141 messages.push(ChatMessage {
1142 role: "assistant".to_string(),
1143 content: assistant_text,
1144 tool_calls: Some(tool_items.clone()),
1145 tool_call_id: None,
1146 });
1147
1148 if tx
1149 .send(StreamMsg::ToolCallRequest(tool_items.clone()))
1150 .is_err()
1151 {
1152 return;
1153 }
1154
1155 let mut tool_results: Vec<ToolResultMsg> = Vec::new();
1156 for _ in &tool_items {
1157 match tool_result_rx.recv_timeout(std::time::Duration::from_secs(60)) {
1158 Ok(result) => tool_results.push(result),
1159 Err(_) => {
1160 let _ = tx.send(StreamMsg::Error("等待工具执行超时".to_string()));
1161 return;
1162 }
1163 }
1164 }
1165
1166 {
1168 let mut log_content = String::new();
1169 for (i, result) in tool_results.iter().enumerate() {
1170 let (tool_name, tool_args) = tool_items
1171 .get(i)
1172 .map(|t| (t.name.as_str(), t.arguments.as_str()))
1173 .unwrap_or(("unknown", ""));
1174 log_content.push_str(&format!(
1175 "- [{}] {}({})\n结果: {}\n",
1176 result.tool_call_id, tool_name, tool_args, result.result
1177 ));
1178 }
1179 write_info_log("工具调用结果", &log_content);
1180 }
1181
1182 for result in tool_results {
1183 messages.push(ChatMessage {
1184 role: "tool".to_string(),
1185 content: result.result,
1186 tool_calls: None,
1187 tool_call_id: Some(result.tool_call_id),
1188 });
1189 }
1190
1191 continue;
1192 } else {
1193 break;
1195 }
1196 } else {
1197 match client.chat().create(request).await {
1199 Ok(response) => {
1200 if let Some(choice) = response.choices.first() {
1201 let is_tool_calls = matches!(
1202 choice.finish_reason,
1203 Some(async_openai::types::chat::FinishReason::ToolCalls)
1204 );
1205
1206 if is_tool_calls {
1207 if let Some(ref tc_list) = choice.message.tool_calls {
1208 let tool_items: Vec<ToolCallItem> = tc_list
1209 .iter()
1210 .filter_map(|tc| {
1211 if let async_openai::types::chat::ChatCompletionMessageToolCalls::Function(f) = tc {
1212 Some(ToolCallItem {
1213 id: f.id.clone(),
1214 name: f.function.name.clone(),
1215 arguments: f.function.arguments.clone(),
1216 })
1217 } else {
1218 None
1219 }
1220 })
1221 .collect();
1222
1223 if tool_items.is_empty() {
1224 break;
1225 }
1226
1227 {
1229 let mut log_content = String::new();
1230 for item in &tool_items {
1231 log_content.push_str(&format!(
1232 "- {}: {}\n",
1233 item.name, item.arguments
1234 ));
1235 }
1236 write_info_log("工具调用请求", &log_content);
1237 }
1238
1239 let assistant_text =
1240 choice.message.content.clone().unwrap_or_default();
1241 if !assistant_text.is_empty() {
1243 write_info_log("Chat 回复", &assistant_text);
1244 }
1245
1246 messages.push(ChatMessage {
1247 role: "assistant".to_string(),
1248 content: assistant_text,
1249 tool_calls: Some(tool_items.clone()),
1250 tool_call_id: None,
1251 });
1252
1253 if tx
1254 .send(StreamMsg::ToolCallRequest(tool_items.clone()))
1255 .is_err()
1256 {
1257 return;
1258 }
1259
1260 let mut tool_results: Vec<ToolResultMsg> = Vec::new();
1261 for _ in &tool_items {
1262 match tool_result_rx
1263 .recv_timeout(std::time::Duration::from_secs(60))
1264 {
1265 Ok(result) => tool_results.push(result),
1266 Err(_) => {
1267 let _ = tx.send(StreamMsg::Error(
1268 "等待工具执行超时".to_string(),
1269 ));
1270 return;
1271 }
1272 }
1273 }
1274
1275 {
1277 let mut log_content = String::new();
1278 for (i, result) in tool_results.iter().enumerate() {
1279 let tool_name = tool_items
1280 .get(i)
1281 .map(|t| t.name.as_str())
1282 .unwrap_or("unknown");
1283 log_content.push_str(&format!(
1284 "- [{}] {}: {}\n",
1285 result.tool_call_id, tool_name, result.result
1286 ));
1287 }
1288 write_info_log("工具调用结果", &log_content);
1289 }
1290
1291 for result in tool_results {
1292 messages.push(ChatMessage {
1293 role: "tool".to_string(),
1294 content: result.result,
1295 tool_calls: None,
1296 tool_call_id: Some(result.tool_call_id),
1297 });
1298 }
1299
1300 continue;
1301 }
1302 }
1303
1304 if let Some(ref content) = choice.message.content {
1306 write_info_log("Chat 回复", content);
1307 let mut sc = streaming_content.lock().unwrap();
1308 sc.push_str(content);
1309 drop(sc);
1310 let _ = tx.send(StreamMsg::Chunk);
1311 }
1312 }
1313 }
1314 Err(e) => {
1315 let error_msg = format!("API 请求失败: {}", e);
1316 write_error_log("Chat API 非流式请求", &error_msg);
1317 let _ = tx.send(StreamMsg::Error(error_msg));
1318 return;
1319 }
1320 }
1321 break;
1322 }
1323 }
1324
1325 let _ = tx.send(StreamMsg::Done);
1326}