Skip to main content

mermaid_cli/tui/
app.rs

1/// Application coordinator
2///
3/// Thin coordinator that composes state modules. All state is delegated to
4/// focused modules in src/tui/state/.
5
6use std::collections::VecDeque;
7use std::sync::Arc;
8use tracing::warn;
9
10use super::state::{
11    AppState, ConversationState, ErrorEntry, ErrorSeverity, GenerationStatus,
12    InputBuffer, ModelState, OperationState, StatusState, UIState,
13};
14use super::theme::Theme;
15use super::widgets::{ChatState, InputState};
16use crate::constants::UI_ERROR_LOG_MAX_SIZE;
17use crate::models::{ChatMessage, MessageRole, Model, ModelConfig, StreamCallback};
18use crate::session::{ConversationHistory, ConversationManager};
19
20/// Application state coordinator
21pub struct App {
22    /// User input buffer
23    pub input: InputBuffer,
24    /// Is the app running?
25    pub running: bool,
26    /// Current model response (for streaming)
27    pub current_response: String,
28    /// Current working directory
29    pub working_dir: String,
30    /// Error log - keeps last N errors for visibility
31    pub error_log: VecDeque<ErrorEntry>,
32    /// State machine for application lifecycle
33    pub app_state: AppState,
34
35    /// Model state - LLM configuration
36    pub model_state: ModelState,
37    /// UI state - visual presentation and widget states
38    pub ui_state: UIState,
39    /// Session state - conversation history and persistence
40    pub session_state: ConversationState,
41    /// Operation state - file reading and tool calls
42    pub operation_state: OperationState,
43    /// Status state - UI status messages
44    pub status_state: StatusState,
45}
46
47impl App {
48    /// Create a new app instance
49    pub fn new(model: Box<dyn Model>, model_id: String) -> Self {
50        let working_dir = std::env::current_dir()
51            .map(|p| p.to_string_lossy().to_string())
52            .unwrap_or_else(|_| ".".to_string());
53
54        // Initialize model state
55        let model_state = ModelState::new(model, model_id);
56
57        // Initialize conversation manager for the current directory
58        let conversation_manager = ConversationManager::new(&working_dir).ok();
59        let current_conversation = conversation_manager
60            .as_ref()
61            .map(|_| ConversationHistory::new(working_dir.clone(), model_state.model_name.clone()));
62
63        // Load input history from conversation if available
64        let input_history: std::collections::VecDeque<String> = conversation_manager
65            .as_ref()
66            .and_then(|_| current_conversation.as_ref())
67            .map(|conv| conv.input_history.clone())
68            .unwrap_or_default();
69
70        // Initialize UIState
71        let ui_state = UIState {
72            chat_state: ChatState::new(),
73            input_state: InputState::new(),
74            theme: Theme::dark(),
75            selected_message: None,
76        };
77
78        // Initialize ConversationState with conversation management
79        let session_state = ConversationState::with_conversation(
80            conversation_manager,
81            current_conversation,
82            input_history,
83        );
84
85        Self {
86            input: InputBuffer::new(),
87            running: true,
88            current_response: String::with_capacity(8192),
89            working_dir,
90            error_log: VecDeque::new(),
91            app_state: AppState::Idle,
92            model_state,
93            ui_state,
94            session_state,
95            operation_state: OperationState::new(),
96            status_state: StatusState::new(),
97        }
98    }
99
100    // ===== Compatibility shims for old field access =====
101    // These will be removed as callers are updated
102
103    /// Get cursor position (compatibility shim)
104    pub fn cursor_position(&self) -> usize {
105        self.input.cursor_position
106    }
107
108    /// Set cursor position (compatibility shim)
109    pub fn set_cursor_position(&mut self, pos: usize) {
110        self.input.cursor_position = pos;
111    }
112
113    // ===== Message Management =====
114
115    /// Add a message to the chat
116    pub fn add_message(&mut self, role: MessageRole, content: String) {
117        let (thinking, answer_content) = ChatMessage::extract_thinking(&content);
118
119        let message = ChatMessage {
120            role,
121            content: answer_content,
122            timestamp: chrono::Local::now(),
123            actions: Vec::new(),
124            thinking,
125            images: None,
126            tool_calls: None,
127            tool_call_id: None,
128            tool_name: None,
129        };
130        self.session_state.messages.push(message.clone());
131
132        if let Some(ref mut conv) = self.session_state.current_conversation {
133            conv.add_messages(&[message]);
134        }
135    }
136
137    /// Add an assistant message with tool_calls attached
138    /// This is used when the model returns tool_calls that need to be recorded
139    /// for proper agent loop conversation history
140    pub fn add_assistant_message_with_tool_calls(
141        &mut self,
142        content: String,
143        tool_calls: Vec<crate::models::ToolCall>,
144    ) {
145        let (thinking, answer_content) = ChatMessage::extract_thinking(&content);
146
147        let message = ChatMessage {
148            role: MessageRole::Assistant,
149            content: answer_content,
150            timestamp: chrono::Local::now(),
151            actions: Vec::new(),
152            thinking,
153            images: None,
154            tool_calls: if tool_calls.is_empty() { None } else { Some(tool_calls) },
155            tool_call_id: None,
156            tool_name: None,
157        };
158        self.session_state.messages.push(message.clone());
159
160        if let Some(ref mut conv) = self.session_state.current_conversation {
161            conv.add_messages(&[message]);
162        }
163    }
164
165    /// Add a tool result message
166    /// This follows the Ollama/OpenAI API format for tool results:
167    /// - role: "tool"
168    /// - content: the result of executing the tool
169    /// - tool_call_id: links back to the original tool_call
170    /// - tool_name: the function name that was called (required by Ollama)
171    pub fn add_tool_result(
172        &mut self,
173        tool_call_id: String,
174        tool_name: String,
175        content: String,
176    ) {
177        let message = ChatMessage {
178            role: MessageRole::Tool,
179            content,
180            timestamp: chrono::Local::now(),
181            actions: Vec::new(),
182            thinking: None,
183            images: None,
184            tool_calls: None,
185            tool_call_id: Some(tool_call_id),
186            tool_name: Some(tool_name),
187        };
188        self.session_state.messages.push(message.clone());
189
190        if let Some(ref mut conv) = self.session_state.current_conversation {
191            conv.add_messages(&[message]);
192        }
193    }
194
195    /// Clear the input buffer
196    pub fn clear_input(&mut self) {
197        self.input.clear();
198    }
199
200    // ===== Status Management =====
201
202    /// Set status message
203    pub fn set_status(&mut self, message: impl Into<String>) {
204        self.status_state.set(message);
205    }
206
207    /// Clear status message
208    pub fn clear_status(&mut self) {
209        self.status_state.clear();
210    }
211
212    // ===== Error Management =====
213
214    /// Display an error consistently across the UI
215    pub fn display_error(&mut self, summary: impl Into<String>, detail: impl Into<String>) {
216        let summary = summary.into();
217        let detail = detail.into();
218
219        self.set_status(format!("[Error] {}", summary));
220
221        if detail.is_empty() {
222            self.add_message(MessageRole::System, format!("Error: {}", summary));
223        } else {
224            self.add_message(MessageRole::System, detail);
225        }
226    }
227
228    /// Display an error with just a message
229    pub fn display_error_simple(&mut self, message: impl Into<String>) {
230        let message = message.into();
231        self.display_error(message.clone(), message);
232    }
233
234    /// Log an error to the error log
235    pub fn log_error(&mut self, entry: ErrorEntry) {
236        self.status_state.set(entry.display());
237        self.error_log.push_back(entry);
238        if self.error_log.len() > UI_ERROR_LOG_MAX_SIZE {
239            self.error_log.pop_front(); // O(1) instead of O(n)
240        }
241    }
242
243    /// Log a simple error message
244    pub fn log_error_msg(&mut self, severity: ErrorSeverity, msg: impl Into<String>) {
245        self.log_error(ErrorEntry::new(severity, msg.into()));
246    }
247
248    /// Log error with context
249    pub fn log_error_with_context(
250        &mut self,
251        severity: ErrorSeverity,
252        msg: impl Into<String>,
253        context: impl Into<String>,
254    ) {
255        self.log_error(ErrorEntry::with_context(severity, msg.into(), context.into()));
256    }
257
258    /// Get recent errors
259    pub fn recent_errors(&self, count: usize) -> Vec<&ErrorEntry> {
260        self.error_log.iter().rev().take(count).collect()
261    }
262
263    // ===== Terminal =====
264
265    /// Set terminal window title
266    pub fn set_terminal_title(&self, title: &str) {
267        use crossterm::{execute, terminal::SetTitle};
268        use std::io::stdout;
269        let _ = execute!(stdout(), SetTitle(title));
270    }
271
272    // ===== Title Generation =====
273
274    /// Generate conversation title from current messages
275    pub async fn generate_conversation_title(&mut self) {
276        if self.session_state.conversation_title.is_some() || self.session_state.messages.len() < 2 {
277            return;
278        }
279
280        let mut conversation_summary = String::new();
281        for (i, msg) in self.session_state.messages.iter().take(4).enumerate() {
282            let role = match msg.role {
283                MessageRole::User => "User",
284                MessageRole::Assistant => "Assistant",
285                MessageRole::System | MessageRole::Tool => continue,
286            };
287            conversation_summary.push_str(&format!(
288                "{}: {}\n\n",
289                role,
290                msg.content.chars().take(200).collect::<String>()
291            ));
292            if i >= 3 { break; }
293        }
294
295        let title_prompt = format!(
296            "Based on this conversation, generate a short, descriptive title (2-4 words maximum, no quotes):\n\n{}\n\nTitle:",
297            conversation_summary
298        );
299
300        let messages = vec![ChatMessage {
301            role: MessageRole::User,
302            content: title_prompt,
303            timestamp: chrono::Local::now(),
304            actions: Vec::new(),
305            thinking: None,
306            images: None,
307            tool_calls: None,
308            tool_call_id: None,
309            tool_name: None,
310        }];
311
312        let title_string = Arc::new(tokio::sync::Mutex::new(String::new()));
313        let title_clone = Arc::clone(&title_string);
314
315        let callback: StreamCallback = Arc::new(move |chunk: &str| {
316            if let Ok(mut title) = title_clone.try_lock() {
317                title.push_str(chunk);
318            }
319        });
320
321        let model = self.model_state.model.write().await;
322        let mut config = ModelConfig::default();
323        config.model = self.model_state.model_id.clone();
324
325        if model.chat(&messages, &config, Some(callback)).await.is_ok() {
326            let final_title = title_string.lock().await;
327            let title = final_title.lines().next().unwrap_or(&final_title)
328                .trim()
329                .trim_matches(|c| c == '"' || c == '\'' || c == '.' || c == ',')
330                .chars()
331                .take(50)
332                .collect::<String>();
333
334            if !title.is_empty() {
335                self.session_state.conversation_title = Some(title);
336            }
337        }
338    }
339
340    // ===== Scrolling =====
341
342    pub fn scroll_up(&mut self, amount: u16) {
343        self.ui_state.chat_state.scroll_up(amount);
344    }
345
346    pub fn scroll_down(&mut self, amount: u16) {
347        self.ui_state.chat_state.scroll_down(amount);
348    }
349
350    // ===== Lifecycle =====
351
352    pub fn quit(&mut self) {
353        self.running = false;
354    }
355
356    // ===== Message History =====
357
358    /// Build message history for model API calls
359    /// Includes User, Assistant, and Tool messages (for proper agent loop)
360    pub fn build_message_history(&self) -> Vec<ChatMessage> {
361        self.session_state.messages
362            .iter()
363            .filter(|msg| {
364                msg.role == MessageRole::User
365                    || msg.role == MessageRole::Assistant
366                    || msg.role == MessageRole::Tool
367            })
368            .cloned()
369            .collect()
370    }
371
372    pub fn build_managed_message_history(
373        &self,
374        max_context_tokens: usize,
375        reserve_tokens: usize,
376    ) -> Vec<ChatMessage> {
377        use crate::utils::Tokenizer;
378
379        let tokenizer = Tokenizer::new(&self.model_state.model_name);
380        let available_tokens = max_context_tokens.saturating_sub(reserve_tokens);
381
382        // Include User, Assistant, and Tool messages for proper agent loop
383        let all_messages: Vec<ChatMessage> = self
384            .session_state
385            .messages
386            .iter()
387            .filter(|msg| {
388                msg.role == MessageRole::User
389                    || msg.role == MessageRole::Assistant
390                    || msg.role == MessageRole::Tool
391            })
392            .cloned()
393            .collect();
394
395        if all_messages.is_empty() {
396            return Vec::new();
397        }
398
399        let messages_for_counting: Vec<(String, String)> = all_messages
400            .iter()
401            .map(|msg| {
402                let role = match msg.role {
403                    MessageRole::User => "user",
404                    MessageRole::Assistant => "assistant",
405                    MessageRole::System => "system",
406                    MessageRole::Tool => "tool",
407                };
408                (role.to_string(), msg.content.clone())
409            })
410            .collect();
411
412        let total_tokens = tokenizer
413            .count_chat_tokens(&messages_for_counting)
414            .unwrap_or_else(|_| all_messages.iter().map(|m| m.content.len() / 4).sum());
415
416        if total_tokens <= available_tokens {
417            return all_messages;
418        }
419
420        let mut kept_messages = Vec::new();
421        let mut current_tokens = 0;
422
423        for msg in all_messages.iter().rev() {
424            let msg_text = vec![(
425                match msg.role {
426                    MessageRole::User => "user",
427                    MessageRole::Assistant => "assistant",
428                    MessageRole::System => "system",
429                    MessageRole::Tool => "tool",
430                }
431                .to_string(),
432                msg.content.clone(),
433            )];
434
435            let msg_tokens = tokenizer
436                .count_chat_tokens(&msg_text)
437                .unwrap_or(msg.content.len() / 4);
438
439            if current_tokens + msg_tokens <= available_tokens {
440                kept_messages.push(msg.clone());
441                current_tokens += msg_tokens;
442            } else if kept_messages.len() < 2 {
443                kept_messages.push(msg.clone());
444                break;
445            } else {
446                break;
447            }
448        }
449
450        kept_messages.reverse();
451        kept_messages
452    }
453
454    // ===== Conversation Persistence =====
455
456    pub fn load_conversation(&mut self, conversation: ConversationHistory) {
457        self.session_state.messages = conversation.messages.clone();
458        self.session_state.current_conversation = Some(conversation);
459        self.set_status("Conversation loaded");
460    }
461
462    pub fn save_conversation(&mut self) -> anyhow::Result<()> {
463        if let Some(ref manager) = self.session_state.conversation_manager {
464            if let Some(ref mut conv) = self.session_state.current_conversation {
465                conv.messages = self.session_state.messages.clone();
466                manager.save_conversation(conv)?;
467                self.set_status("Conversation saved");
468            }
469        }
470        Ok(())
471    }
472
473    pub fn auto_save_conversation(&mut self) {
474        if self.session_state.messages.is_empty() {
475            return;
476        }
477        if let Err(e) = self.save_conversation() {
478            warn!("Failed to auto-save conversation: {}", e);
479        }
480    }
481
482    // ===== Generation State Transitions =====
483
484    pub fn start_generation(&mut self, abort_handle: tokio::task::AbortHandle) {
485        // Clear accumulated tool calls from any previous generation
486        self.operation_state.accumulated_tool_calls.clear();
487
488        self.app_state = AppState::Generating {
489            status: GenerationStatus::Sending,
490            start_time: std::time::Instant::now(),
491            tokens_received: 0,
492            abort_handle: Some(abort_handle),
493        };
494    }
495
496    pub fn transition_to_thinking(&mut self) {
497        if let AppState::Generating { start_time, tokens_received, ref abort_handle, .. } = self.app_state {
498            self.app_state = AppState::Generating {
499                status: GenerationStatus::Thinking,
500                start_time,
501                tokens_received,
502                abort_handle: abort_handle.clone(),
503            };
504        }
505    }
506
507    pub fn transition_to_streaming(&mut self) {
508        if let AppState::Generating { start_time, tokens_received, ref abort_handle, .. } = self.app_state {
509            self.app_state = AppState::Generating {
510                status: GenerationStatus::Streaming,
511                start_time,
512                tokens_received,
513                abort_handle: abort_handle.clone(),
514            };
515        }
516    }
517
518    /// Set the final token count from Ollama's actual response
519    pub fn set_final_tokens(&mut self, count: usize) {
520        if let AppState::Generating { status, start_time, ref abort_handle, .. } = self.app_state {
521            self.app_state = AppState::Generating {
522                status,
523                start_time,
524                tokens_received: count,
525                abort_handle: abort_handle.clone(),
526            };
527            self.session_state.add_tokens(count);
528        }
529    }
530
531    pub fn stop_generation(&mut self) {
532        self.app_state = AppState::Idle;
533    }
534
535    pub fn abort_generation(&mut self) -> Option<tokio::task::AbortHandle> {
536        if let AppState::Generating { abort_handle, .. } = &mut self.app_state {
537            let handle = abort_handle.take();
538            self.app_state = AppState::Idle;
539            handle
540        } else {
541            None
542        }
543    }
544
545}