1use std::collections::VecDeque;
7use std::sync::Arc;
8use tracing::warn;
9
10use super::state::{
11 AppState, AttachmentState, ConversationState, ErrorEntry, ErrorSeverity, GenerationStatus,
12 InputBuffer, ModelState, OperationState, StatusState, UIState,
13};
14use super::theme::Theme;
15use super::widgets::{ChatState, InputState};
16use crate::constants::UI_ERROR_LOG_MAX_SIZE;
17use crate::models::{ChatMessage, MessageRole, Model, StreamCallback};
18use crate::session::{ConversationHistory, ConversationManager};
19
20pub struct App {
22 pub input: InputBuffer,
24 pub running: bool,
26 pub current_response: String,
28 pub working_dir: String,
30 pub error_log: VecDeque<ErrorEntry>,
32 pub app_state: AppState,
34
35 pub model_state: ModelState,
37 pub ui_state: UIState,
39 pub session_state: ConversationState,
41 pub operation_state: OperationState,
43 pub status_state: StatusState,
45 pub attachment_state: AttachmentState,
47}
48
49impl App {
50 pub fn new(model: Box<dyn Model>, model_id: String) -> Self {
52 let working_dir = std::env::current_dir()
53 .map(|p| p.to_string_lossy().to_string())
54 .unwrap_or_else(|_| ".".to_string());
55
56 let model_state = ModelState::new(model, model_id);
58
59 let conversation_manager = ConversationManager::new(&working_dir).ok();
61 let current_conversation = conversation_manager
62 .as_ref()
63 .map(|_| ConversationHistory::new(working_dir.clone(), model_state.model_name.clone()));
64
65 let input_history: std::collections::VecDeque<String> = current_conversation
67 .as_ref()
68 .map(|conv| conv.input_history.clone())
69 .unwrap_or_default();
70
71 let ui_state = UIState {
73 chat_state: ChatState::new(),
74 input_state: InputState::new(),
75 theme: Theme::dark(),
76 selected_message: None,
77 attachment_focused: false,
78 selected_attachment: 0,
79 attachment_area_y: None,
80 };
81
82 let session_state = ConversationState::with_conversation(
84 conversation_manager,
85 current_conversation,
86 input_history,
87 );
88
89 Self {
90 input: InputBuffer::new(),
91 running: true,
92 current_response: String::with_capacity(8192),
93 working_dir,
94 error_log: VecDeque::new(),
95 app_state: AppState::Idle,
96 model_state,
97 ui_state,
98 session_state,
99 operation_state: OperationState::new(),
100 status_state: StatusState::new(),
101 attachment_state: AttachmentState::new(),
102 }
103 }
104
105 pub fn cursor_position(&self) -> usize {
110 self.input.cursor_position
111 }
112
113 pub fn set_cursor_position(&mut self, pos: usize) {
115 self.input.cursor_position = pos;
116 }
117
118 pub fn add_message(&mut self, role: MessageRole, content: String) {
122 let mut message = match role {
123 MessageRole::User => ChatMessage::user(content),
124 MessageRole::Assistant => ChatMessage::assistant(content),
125 MessageRole::System => ChatMessage::system(content),
126 MessageRole::Tool => ChatMessage::tool("", "", content),
127 };
128 let (thinking, answer) = ChatMessage::extract_thinking(&message.content);
130 message.content = answer;
131 message.thinking = thinking;
132 self.commit_message(message);
133 }
134
135 pub fn add_message_with_images(&mut self, role: MessageRole, content: String, images: Option<Vec<String>>) {
137 let mut message = match role {
138 MessageRole::User => ChatMessage::user(content),
139 MessageRole::Assistant => ChatMessage::assistant(content),
140 MessageRole::System => ChatMessage::system(content),
141 MessageRole::Tool => ChatMessage::tool("", "", content),
142 };
143 let (thinking, answer) = ChatMessage::extract_thinking(&message.content);
144 message.content = answer;
145 message.thinking = thinking;
146 if let Some(imgs) = images {
147 message = message.with_images(imgs);
148 }
149 self.commit_message(message);
150 }
151
152 pub fn add_assistant_message_with_tool_calls(
154 &mut self,
155 content: String,
156 tool_calls: Vec<crate::models::ToolCall>,
157 ) {
158 let mut message = ChatMessage::assistant(content).with_tool_calls(tool_calls);
159 let (thinking, answer) = ChatMessage::extract_thinking(&message.content);
160 message.content = answer;
161 message.thinking = thinking;
162 self.commit_message(message);
163 }
164
165 pub fn add_tool_result(
167 &mut self,
168 tool_call_id: String,
169 tool_name: String,
170 content: String,
171 ) {
172 let message = ChatMessage::tool(tool_call_id, tool_name, content);
173 self.commit_message(message);
174 }
175
176 fn commit_message(&mut self, message: ChatMessage) {
178 self.session_state.messages.push(message.clone());
179 if let Some(ref mut conv) = self.session_state.current_conversation {
180 conv.add_messages(&[message]);
181 }
182 }
183
184 pub fn clear_input(&mut self) {
186 self.input.clear();
187 }
188
189 pub fn set_status(&mut self, message: impl Into<String>) {
193 self.status_state.set(message);
194 }
195
196 pub fn clear_status(&mut self) {
198 self.status_state.clear();
199 }
200
201 pub fn display_error(&mut self, summary: impl Into<String>, detail: impl Into<String>) {
205 let summary = summary.into();
206 let detail = detail.into();
207
208 self.set_status(format!("[Error] {}", summary));
209
210 if detail.is_empty() {
211 self.add_message(MessageRole::System, format!("Error: {}", summary));
212 } else {
213 self.add_message(MessageRole::System, detail);
214 }
215 }
216
217 pub fn display_error_simple(&mut self, message: impl Into<String>) {
219 let message = message.into();
220 self.display_error(message.clone(), message);
221 }
222
223 pub fn log_error(&mut self, entry: ErrorEntry) {
225 self.status_state.set(entry.display());
226 self.error_log.push_back(entry);
227 if self.error_log.len() > UI_ERROR_LOG_MAX_SIZE {
228 self.error_log.pop_front(); }
230 }
231
232 pub fn log_error_msg(&mut self, severity: ErrorSeverity, msg: impl Into<String>) {
234 self.log_error(ErrorEntry::new(severity, msg.into()));
235 }
236
237 pub fn log_error_with_context(
239 &mut self,
240 severity: ErrorSeverity,
241 msg: impl Into<String>,
242 context: impl Into<String>,
243 ) {
244 self.log_error(ErrorEntry::with_context(severity, msg.into(), context.into()));
245 }
246
247 pub fn recent_errors(&self, count: usize) -> Vec<&ErrorEntry> {
249 self.error_log.iter().rev().take(count).collect()
250 }
251
252 pub fn set_terminal_title(&self, title: &str) {
256 use crossterm::{execute, terminal::SetTitle};
257 use std::io::stdout;
258 let _ = execute!(stdout(), SetTitle(title));
259 }
260
261 pub async fn generate_conversation_title(&mut self) {
265 if self.session_state.conversation_title.is_some() || self.session_state.messages.len() < 2 {
266 return;
267 }
268
269 let mut conversation_summary = String::new();
270 for (i, msg) in self.session_state.messages.iter().take(4).enumerate() {
271 let role = match msg.role {
272 MessageRole::User => "User",
273 MessageRole::Assistant => "Assistant",
274 MessageRole::System | MessageRole::Tool => continue,
275 };
276 conversation_summary.push_str(&format!(
277 "{}: {}\n\n",
278 role,
279 msg.content.chars().take(200).collect::<String>()
280 ));
281 if i >= 3 { break; }
282 }
283
284 let title_prompt = format!(
285 "Based on this conversation, generate a short, descriptive title (2-4 words maximum, no quotes):\n\n{}\n\nTitle:",
286 conversation_summary
287 );
288
289 let messages = vec![ChatMessage::user(title_prompt)];
290
291 let title_string = Arc::new(tokio::sync::Mutex::new(String::new()));
292 let title_clone = Arc::clone(&title_string);
293
294 let callback: StreamCallback = Arc::new(move |chunk: &str| {
295 if let Ok(mut title) = title_clone.try_lock() {
296 title.push_str(chunk);
297 }
298 });
299
300 let model = self.model_state.model.write().await;
301 let config = self.model_state.build_config();
302
303 if model.chat(&messages, &config, Some(callback)).await.is_ok() {
304 let final_title = title_string.lock().await;
305 let title = final_title.lines().next().unwrap_or(&final_title)
306 .trim()
307 .trim_matches(|c| c == '"' || c == '\'' || c == '.' || c == ',')
308 .chars()
309 .take(50)
310 .collect::<String>();
311
312 if !title.is_empty() {
313 self.session_state.conversation_title = Some(title);
314 }
315 }
316 }
317
318 pub fn scroll_up(&mut self, amount: u16) {
321 self.ui_state.chat_state.scroll_up(amount);
322 }
323
324 pub fn scroll_down(&mut self, amount: u16) {
325 self.ui_state.chat_state.scroll_down(amount);
326 }
327
328 pub fn quit(&mut self) {
331 self.running = false;
332 }
333
334 pub fn build_message_history(&self) -> Vec<ChatMessage> {
339 self.session_state.messages
340 .iter()
341 .filter(|msg| {
342 msg.role == MessageRole::User
343 || msg.role == MessageRole::Assistant
344 || msg.role == MessageRole::Tool
345 })
346 .cloned()
347 .collect()
348 }
349
350 pub fn build_managed_message_history(
351 &self,
352 max_context_tokens: usize,
353 reserve_tokens: usize,
354 ) -> Vec<ChatMessage> {
355 use crate::utils::Tokenizer;
356
357 let tokenizer = Tokenizer::new(&self.model_state.model_name);
358 let available_tokens = max_context_tokens.saturating_sub(reserve_tokens);
359
360 let all_messages: Vec<ChatMessage> = self
362 .session_state
363 .messages
364 .iter()
365 .filter(|msg| {
366 msg.role == MessageRole::User
367 || msg.role == MessageRole::Assistant
368 || msg.role == MessageRole::Tool
369 })
370 .cloned()
371 .collect();
372
373 if all_messages.is_empty() {
374 return Vec::new();
375 }
376
377 let messages_for_counting: Vec<(String, String)> = all_messages
378 .iter()
379 .map(|msg| {
380 let role = match msg.role {
381 MessageRole::User => "user",
382 MessageRole::Assistant => "assistant",
383 MessageRole::System => "system",
384 MessageRole::Tool => "tool",
385 };
386 (role.to_string(), msg.content.clone())
387 })
388 .collect();
389
390 let total_tokens = tokenizer
391 .count_chat_tokens(&messages_for_counting)
392 .unwrap_or_else(|_| all_messages.iter().map(|m| m.content.len() / 4).sum());
393
394 if total_tokens <= available_tokens {
395 return all_messages;
396 }
397
398 let mut kept_messages = Vec::new();
399 let mut current_tokens = 0;
400
401 for msg in all_messages.iter().rev() {
402 let msg_text = vec![(
403 match msg.role {
404 MessageRole::User => "user",
405 MessageRole::Assistant => "assistant",
406 MessageRole::System => "system",
407 MessageRole::Tool => "tool",
408 }
409 .to_string(),
410 msg.content.clone(),
411 )];
412
413 let msg_tokens = tokenizer
414 .count_chat_tokens(&msg_text)
415 .unwrap_or(msg.content.len() / 4);
416
417 if current_tokens + msg_tokens <= available_tokens {
418 kept_messages.push(msg.clone());
419 current_tokens += msg_tokens;
420 } else if kept_messages.len() < 2 {
421 kept_messages.push(msg.clone());
422 break;
423 } else {
424 break;
425 }
426 }
427
428 kept_messages.reverse();
429 kept_messages
430 }
431
432 pub fn load_conversation(&mut self, conversation: ConversationHistory) {
435 self.session_state.messages = conversation.messages.clone();
436 self.session_state.current_conversation = Some(conversation);
437 self.set_status("Conversation loaded");
438 }
439
440 pub fn save_conversation(&mut self) -> anyhow::Result<()> {
441 if let Some(ref manager) = self.session_state.conversation_manager
442 && let Some(ref mut conv) = self.session_state.current_conversation {
443 conv.messages = self.session_state.messages.clone();
444 manager.save_conversation(conv)?;
445 self.set_status("Conversation saved");
446 }
447 Ok(())
448 }
449
450 pub fn auto_save_conversation(&mut self) {
451 if self.session_state.messages.is_empty() {
452 return;
453 }
454 if let Err(e) = self.save_conversation() {
455 warn!("Failed to auto-save conversation: {}", e);
456 }
457 }
458
459 pub fn start_generation(&mut self, abort_handle: tokio::task::AbortHandle) {
462 self.operation_state.accumulated_tool_calls.clear();
464
465 self.app_state = AppState::Generating {
466 status: GenerationStatus::Sending,
467 start_time: std::time::Instant::now(),
468 tokens_received: 0,
469 abort_handle: Some(abort_handle),
470 };
471 }
472
473 pub fn transition_to_thinking(&mut self) {
474 if let AppState::Generating { start_time, tokens_received, ref abort_handle, .. } = self.app_state {
475 self.app_state = AppState::Generating {
476 status: GenerationStatus::Thinking,
477 start_time,
478 tokens_received,
479 abort_handle: abort_handle.clone(),
480 };
481 }
482 }
483
484 pub fn transition_to_streaming(&mut self) {
485 if let AppState::Generating { start_time, tokens_received, ref abort_handle, .. } = self.app_state {
486 self.app_state = AppState::Generating {
487 status: GenerationStatus::Streaming,
488 start_time,
489 tokens_received,
490 abort_handle: abort_handle.clone(),
491 };
492 }
493 }
494
495 pub fn set_final_tokens(&mut self, count: usize) {
497 if let AppState::Generating { status, start_time, ref abort_handle, .. } = self.app_state {
498 self.app_state = AppState::Generating {
499 status,
500 start_time,
501 tokens_received: count,
502 abort_handle: abort_handle.clone(),
503 };
504 self.session_state.add_tokens(count);
505 }
506 }
507
508 pub fn stop_generation(&mut self) {
509 self.app_state = AppState::Idle;
510 }
511
512 pub fn abort_generation(&mut self) -> Option<tokio::task::AbortHandle> {
513 if let AppState::Generating { abort_handle, .. } = &mut self.app_state {
514 let handle = abort_handle.take();
515 self.app_state = AppState::Idle;
516 handle
517 } else {
518 None
519 }
520 }
521
522}