Skip to main content

mermaid_cli/tui/
app.rs

1//! Application coordinator
2//!
3//! Thin coordinator that composes state modules. All state is delegated to
4//! focused modules in src/tui/state/.
5
6use std::collections::VecDeque;
7use std::sync::Arc;
8
9use super::state::{
10    AppState, AttachmentState, ConversationState, ErrorEntry, ErrorSeverity, GenerationStatus,
11    InputBuffer, ModelState, OperationState, StatusState, UIState,
12};
13use crate::constants::UI_ERROR_LOG_MAX_SIZE;
14use crate::models::{ChatMessage, MessageRole, Model, StreamCallback};
15use crate::session::{ConversationHistory, ConversationManager};
16
17/// Application state coordinator
18pub struct App {
19    /// User input buffer
20    pub input: InputBuffer,
21    /// Is the app running?
22    pub running: bool,
23    /// Current working directory
24    pub working_dir: String,
25    /// Error log - keeps last N errors for visibility
26    pub error_log: VecDeque<ErrorEntry>,
27    /// State machine for application lifecycle
28    pub app_state: AppState,
29
30    /// Model state - LLM configuration
31    pub model_state: ModelState,
32    /// UI state - visual presentation and widget states
33    pub ui_state: UIState,
34    /// Session state - conversation history and persistence
35    pub session_state: ConversationState,
36    /// Operation state - file reading and tool calls
37    pub operation_state: OperationState,
38    /// Status state - UI status messages
39    pub status_state: StatusState,
40    /// Attachment state - pending image attachments
41    pub attachment_state: AttachmentState,
42}
43
44impl App {
45    /// Create a new app instance
46    pub fn new(model: Box<dyn Model>, model_id: String) -> Self {
47        let working_dir = std::env::current_dir()
48            .map(|p| p.to_string_lossy().to_string())
49            .unwrap_or_else(|_| ".".to_string());
50
51        // Initialize model state
52        let model_state = ModelState::new(model, model_id);
53
54        // Initialize conversation manager for the current directory
55        let conversation_manager = ConversationManager::new(&working_dir).ok();
56        let current_conversation = conversation_manager
57            .as_ref()
58            .map(|_| ConversationHistory::new(working_dir.clone(), model_state.model_name.clone()));
59
60        // Load input history from conversation if available
61        let input_history: std::collections::VecDeque<String> = current_conversation
62            .as_ref()
63            .map(|conv| conv.input_history.clone())
64            .unwrap_or_default();
65
66        // Initialize input buffer with persisted history
67        let mut input = InputBuffer::new();
68        input.load_history(input_history);
69
70        // Initialize UIState
71        let ui_state = UIState::new();
72
73        // Initialize ConversationState with conversation management
74        let session_state = ConversationState::with_conversation(
75            conversation_manager,
76            current_conversation,
77        );
78
79        Self {
80            input,
81            running: true,
82            working_dir,
83            error_log: VecDeque::new(),
84            app_state: AppState::Idle,
85            model_state,
86            ui_state,
87            session_state,
88            operation_state: OperationState::new(),
89            status_state: StatusState::new(),
90            attachment_state: AttachmentState::new(),
91        }
92    }
93
94    // ===== Message Management =====
95
96    /// Add a message to the chat (extracts thinking blocks automatically)
97    pub fn add_message(&mut self, role: MessageRole, content: String) {
98        self.add_message_with_images(role, content, None);
99    }
100
101    /// Add a message with optional image attachments
102    pub fn add_message_with_images(
103        &mut self,
104        role: MessageRole,
105        content: String,
106        images: Option<Vec<String>>,
107    ) {
108        let mut message = match role {
109            MessageRole::User => ChatMessage::user(content),
110            MessageRole::Assistant => ChatMessage::assistant(content),
111            MessageRole::System => ChatMessage::system(content),
112            MessageRole::Tool => ChatMessage::tool("", "", content),
113        };
114        let (thinking, answer) = ChatMessage::extract_thinking(&message.content);
115        message.content = answer;
116        message.thinking = thinking;
117        if let Some(imgs) = images {
118            message = message.with_images(imgs);
119        }
120        self.commit_message(message);
121    }
122
123    /// Add an assistant message with tool_calls attached
124    pub fn add_assistant_message_with_tool_calls(
125        &mut self,
126        content: String,
127        tool_calls: Vec<crate::models::ToolCall>,
128    ) {
129        let mut message = ChatMessage::assistant(content).with_tool_calls(tool_calls);
130        let (thinking, answer) = ChatMessage::extract_thinking(&message.content);
131        message.content = answer;
132        message.thinking = thinking;
133        self.commit_message(message);
134    }
135
136    /// Add a tool result message
137    pub fn add_tool_result(&mut self, tool_call_id: String, tool_name: String, content: String) {
138        let message = ChatMessage::tool(tool_call_id, tool_name, content);
139        self.commit_message(message);
140    }
141
142    /// Commit a message to session state and conversation history
143    fn commit_message(&mut self, message: ChatMessage) {
144        self.session_state.messages.push(message.clone());
145        if let Some(ref mut conv) = self.session_state.current_conversation {
146            conv.add_messages(&[message]);
147        }
148    }
149
150    /// Clear the input buffer
151    pub fn clear_input(&mut self) {
152        self.input.clear();
153    }
154
155    // ===== Status Management =====
156
157    /// Set status message
158    pub fn set_status(&mut self, message: impl Into<String>) {
159        self.status_state.set(message);
160    }
161
162    /// Clear status message
163    pub fn clear_status(&mut self) {
164        self.status_state.clear();
165    }
166
167    // ===== Error Management =====
168
169    /// Display an error consistently across the UI
170    pub fn display_error(&mut self, summary: impl Into<String>, detail: impl Into<String>) {
171        let summary = summary.into();
172        let detail = detail.into();
173
174        self.set_status(format!("[Error] {}", summary));
175
176        if detail.is_empty() {
177            self.add_message(MessageRole::System, format!("Error: {}", summary));
178        } else {
179            self.add_message(MessageRole::System, detail);
180        }
181    }
182
183    /// Display an error with just a message
184    pub fn display_error_simple(&mut self, message: impl Into<String>) {
185        let message = message.into();
186        self.display_error(message.clone(), message);
187    }
188
189    /// Log an error to the error log
190    pub fn log_error(&mut self, entry: ErrorEntry) {
191        self.status_state.set(entry.display());
192        self.error_log.push_back(entry);
193        if self.error_log.len() > UI_ERROR_LOG_MAX_SIZE {
194            self.error_log.pop_front(); // O(1) instead of O(n)
195        }
196    }
197
198    /// Log a simple error message
199    pub fn log_error_msg(&mut self, severity: ErrorSeverity, msg: impl Into<String>) {
200        self.log_error(ErrorEntry::new(severity, msg.into()));
201    }
202
203    /// Log error with context
204    pub fn log_error_with_context(
205        &mut self,
206        severity: ErrorSeverity,
207        msg: impl Into<String>,
208        context: impl Into<String>,
209    ) {
210        self.log_error(ErrorEntry::with_context(
211            severity,
212            msg.into(),
213            context.into(),
214        ));
215    }
216
217    /// Get recent errors
218    pub fn recent_errors(&self, count: usize) -> Vec<&ErrorEntry> {
219        self.error_log.iter().rev().take(count).collect()
220    }
221
222    // ===== Terminal =====
223
224    /// Set terminal window title
225    pub fn set_terminal_title(&self, title: &str) {
226        use crossterm::{execute, terminal::SetTitle};
227        use std::io::stdout;
228        let _ = execute!(stdout(), SetTitle(title));
229    }
230
231    // ===== Title Generation =====
232
233    /// Spawn title generation as a background task (non-blocking).
234    /// Returns a JoinHandle the caller can poll with `is_finished()`.
235    pub fn spawn_title_generation(&self) -> Option<tokio::task::JoinHandle<Option<String>>> {
236        if self.session_state.conversation_title.is_some() || self.session_state.messages.len() < 2
237        {
238            return None;
239        }
240
241        let mut summary = String::new();
242        for msg in self
243            .session_state
244            .messages
245            .iter()
246            .filter(|m| matches!(m.role, MessageRole::User | MessageRole::Assistant))
247            .take(4)
248        {
249            let role = if msg.role == MessageRole::User {
250                "User"
251            } else {
252                "Assistant"
253            };
254            summary.push_str(&format!(
255                "{}: {}\n\n",
256                role,
257                msg.content.chars().take(200).collect::<String>()
258            ));
259        }
260
261        let model = self.model_state.model.clone();
262        let mut config = self.model_state.build_config();
263        config.thinking_enabled = Some(false);
264
265        Some(tokio::spawn(async move {
266            let prompt = format!(
267                "Based on this conversation, generate a short, descriptive title (2-4 words maximum, no quotes):\n\n{}\n\nTitle:",
268                summary
269            );
270            let buf = Arc::new(tokio::sync::Mutex::new(String::new()));
271            let buf_clone = Arc::clone(&buf);
272            let callback: StreamCallback = Arc::new(move |chunk: &str| {
273                if let Ok(mut t) = buf_clone.try_lock() {
274                    t.push_str(chunk);
275                }
276            });
277
278            let model = model.read().await;
279            if model
280                .chat(&[ChatMessage::user(prompt)], &config, Some(callback))
281                .await
282                .is_ok()
283            {
284                let raw = buf.lock().await;
285                let title: String = raw
286                    .lines()
287                    .next()
288                    .unwrap_or(&raw)
289                    .trim()
290                    .trim_matches(|c| c == '"' || c == '\'' || c == '.' || c == ',')
291                    .chars()
292                    .take(50)
293                    .collect();
294                if !title.is_empty() {
295                    return Some(title);
296                }
297            }
298            None
299        }))
300    }
301
302    // ===== Scrolling =====
303
304    pub fn scroll_up(&mut self, amount: u16) {
305        self.ui_state.chat_state.scroll_up(amount);
306    }
307
308    pub fn scroll_down(&mut self, amount: u16) {
309        self.ui_state.chat_state.scroll_down(amount);
310    }
311
312    // ===== Lifecycle =====
313
314    pub fn quit(&mut self) {
315        self.running = false;
316    }
317
318    // ===== Message History =====
319
320    /// Filter and prepare messages for model API calls.
321    /// Includes User, Assistant, and Tool messages for proper agent loop.
322    /// Injects timestamp context into User messages for the model's temporal awareness.
323    fn prepare_api_messages(&self) -> Vec<ChatMessage> {
324        self.session_state
325            .messages
326            .iter()
327            .filter(|msg| {
328                msg.role == MessageRole::User
329                    || msg.role == MessageRole::Assistant
330                    || msg.role == MessageRole::Tool
331            })
332            .map(|msg| {
333                if msg.role == MessageRole::User {
334                    let ts = msg.timestamp.format("%Y-%m-%d %H:%M:%S %Z").to_string();
335                    let mut m = msg.clone();
336                    m.content = format!("[Sent at: {}]\n{}", ts, m.content);
337                    m
338                } else {
339                    msg.clone()
340                }
341            })
342            .collect()
343    }
344
345    /// Build message history for model API calls (all messages, no truncation)
346    pub fn build_message_history(&self) -> Vec<ChatMessage> {
347        self.prepare_api_messages()
348    }
349
350    pub fn build_managed_message_history(
351        &self,
352        max_context_tokens: usize,
353        reserve_tokens: usize,
354    ) -> Vec<ChatMessage> {
355        use crate::utils::Tokenizer;
356
357        let tokenizer = Tokenizer::new(&self.model_state.model_name);
358        let available_tokens = max_context_tokens.saturating_sub(reserve_tokens);
359
360        let all_messages = self.prepare_api_messages();
361
362        if all_messages.is_empty() {
363            return Vec::new();
364        }
365
366        let messages_for_counting: Vec<(String, String)> = all_messages
367            .iter()
368            .map(|msg| {
369                let role = match msg.role {
370                    MessageRole::User => "user",
371                    MessageRole::Assistant => "assistant",
372                    MessageRole::System => "system",
373                    MessageRole::Tool => "tool",
374                };
375                (role.to_string(), msg.content.clone())
376            })
377            .collect();
378
379        let total_tokens = tokenizer
380            .count_chat_tokens(&messages_for_counting)
381            .unwrap_or_else(|_| all_messages.iter().map(|m| m.content.len() / 4).sum());
382
383        if total_tokens <= available_tokens {
384            return all_messages;
385        }
386
387        let mut kept_messages = Vec::new();
388        let mut current_tokens = 0;
389
390        for msg in all_messages.iter().rev() {
391            let msg_text = vec![(
392                match msg.role {
393                    MessageRole::User => "user",
394                    MessageRole::Assistant => "assistant",
395                    MessageRole::System => "system",
396                    MessageRole::Tool => "tool",
397                }
398                .to_string(),
399                msg.content.clone(),
400            )];
401
402            let msg_tokens = tokenizer
403                .count_chat_tokens(&msg_text)
404                .unwrap_or(msg.content.len() / 4);
405
406            if current_tokens + msg_tokens <= available_tokens {
407                kept_messages.push(msg.clone());
408                current_tokens += msg_tokens;
409            } else if kept_messages.len() < 2 {
410                kept_messages.push(msg.clone());
411                break;
412            } else {
413                break;
414            }
415        }
416
417        kept_messages.reverse();
418        kept_messages
419    }
420
421    // ===== Conversation Persistence =====
422
423    pub fn load_conversation(&mut self, conversation: ConversationHistory) {
424        self.session_state.messages = conversation.messages.clone();
425        self.session_state.current_conversation = Some(conversation);
426        self.set_status("Conversation loaded");
427    }
428
429    pub fn save_conversation(&mut self) -> anyhow::Result<()> {
430        if let Some(ref manager) = self.session_state.conversation_manager
431            && let Some(ref mut conv) = self.session_state.current_conversation
432        {
433            conv.messages = self.session_state.messages.clone();
434            manager.save_conversation(conv)?;
435            self.set_status("Conversation saved");
436        }
437        Ok(())
438    }
439
440    pub fn auto_save_conversation(&mut self) {
441        if self.session_state.messages.is_empty() {
442            return;
443        }
444        if let Some(ref manager) = self.session_state.conversation_manager
445            && let Some(ref mut conv) = self.session_state.current_conversation
446        {
447            conv.messages = self.session_state.messages.clone();
448            let conv_clone = conv.clone();
449            let manager_clone = manager.clone();
450            tokio::task::spawn_blocking(move || {
451                if let Err(e) = manager_clone.save_conversation(&conv_clone) {
452                    tracing::warn!("Failed to auto-save conversation: {}", e);
453                }
454            });
455        }
456    }
457
458    // ===== Generation State Transitions =====
459
460    pub fn start_generation(&mut self, abort_handle: tokio::task::AbortHandle) {
461        self.app_state = AppState::Generating {
462            status: GenerationStatus::Sending,
463            start_time: std::time::Instant::now(),
464            tokens_received: 0,
465            abort_handle: Some(abort_handle),
466            response_buffer: String::with_capacity(8192),
467        };
468    }
469
470    /// Update the abort handle for a new model call within the same turn.
471    /// Keeps the existing start_time and token count (cumulative for the turn).
472    pub fn update_abort_handle(&mut self, abort_handle: tokio::task::AbortHandle) {
473        if let AppState::Generating {
474            abort_handle: ref mut existing,
475            ..
476        } = self.app_state
477        {
478            *existing = Some(abort_handle);
479        }
480    }
481
482    /// Reset status to Sending for a new model call within the same turn.
483    pub fn transition_to_sending(&mut self) {
484        if let AppState::Generating { status, .. } = &mut self.app_state {
485            *status = GenerationStatus::Sending;
486        }
487    }
488
489    pub fn transition_to_thinking(&mut self) {
490        if let AppState::Generating { status, .. } = &mut self.app_state {
491            *status = GenerationStatus::Thinking;
492        }
493    }
494
495    pub fn transition_to_streaming(&mut self) {
496        if let AppState::Generating { status, .. } = &mut self.app_state {
497            *status = GenerationStatus::Streaming;
498        }
499    }
500
501    /// Add tokens from a completed model call (accumulates across the turn)
502    pub fn set_final_tokens(&mut self, count: usize) {
503        if let AppState::Generating {
504            tokens_received, ..
505        } = &mut self.app_state
506        {
507            *tokens_received += count;
508        }
509        self.session_state.add_tokens(count);
510    }
511
512    pub fn stop_generation(&mut self) {
513        self.app_state = AppState::Idle;
514    }
515
516    pub fn abort_generation(&mut self) -> (Option<tokio::task::AbortHandle>, String) {
517        if let AppState::Generating {
518            abort_handle,
519            response_buffer,
520            ..
521        } = &mut self.app_state
522        {
523            let handle = abort_handle.take();
524            let buffer = std::mem::take(response_buffer);
525            self.app_state = AppState::Idle;
526            (handle, buffer)
527        } else {
528            (None, String::new())
529        }
530    }
531
532    // ===== Response Buffer Accessors =====
533
534    /// Append text to the response buffer. No-op if not generating.
535    /// Enforces MAX_RESPONSE_CHARS size limit.
536    pub fn push_response(&mut self, text: &str) {
537        if let AppState::Generating {
538            response_buffer, ..
539        } = &mut self.app_state
540        {
541            response_buffer.push_str(text);
542            if response_buffer.len() > crate::constants::WEB_CONTENT_MAX_CHARS * 80 {
543                // 400k chars limit (MAX_RESPONSE_CHARS from stream_handler)
544                let end = response_buffer.floor_char_boundary(400_000);
545                response_buffer.truncate(end);
546                response_buffer
547                    .push_str("\n\n[TRUNCATED: Response exceeded size limit]\n");
548                self.set_status("[WARNING] Response truncated (size limit reached)");
549            }
550        }
551    }
552
553    /// Get response buffer length (0 if not generating)
554    pub fn response_len(&self) -> usize {
555        if let AppState::Generating {
556            response_buffer, ..
557        } = &self.app_state
558        {
559            response_buffer.len()
560        } else {
561            0
562        }
563    }
564
565    /// Take the response buffer, leaving it empty. Returns empty string if not generating.
566    pub fn take_response(&mut self) -> String {
567        if let AppState::Generating {
568            response_buffer, ..
569        } = &mut self.app_state
570        {
571            std::mem::take(response_buffer)
572        } else {
573            String::new()
574        }
575    }
576
577    /// Clear the response buffer (for per-model-call reset within a turn)
578    pub fn clear_response(&mut self) {
579        if let AppState::Generating {
580            response_buffer, ..
581        } = &mut self.app_state
582        {
583            response_buffer.clear();
584        }
585    }
586}