1use super::commands::model_entry_matches;
2use super::*;
3
4impl PiApp {
5 pub(super) fn handle_custom_extension_key(&mut self, key: &KeyMsg) -> bool {
6 if !self.custom_overlay_input_is_available() {
7 return false;
8 }
9 if key.key_type == KeyType::CtrlC {
10 return false;
11 }
12
13 if let Some(encoded) = encode_custom_ui_key(key) {
14 const MAX_CUSTOM_KEY_QUEUE: usize = 256;
15 if self.extension_custom_key_queue.len() >= MAX_CUSTOM_KEY_QUEUE {
16 let _ = self.extension_custom_key_queue.pop_front();
17 }
18 self.extension_custom_key_queue.push_back(encoded);
19 }
20
21 true
22 }
23
24 pub(super) fn format_hotkeys(&self) -> String {
28 use crate::keybindings::ActionCategory;
29 use std::fmt::Write;
30
31 let mut output = String::new();
32 let _ = writeln!(output, "Keyboard Shortcuts");
33 let _ = writeln!(output, "==================");
34 let _ = writeln!(output);
35 let _ = writeln!(
36 output,
37 "Config: {}",
38 KeyBindings::user_config_path().display()
39 );
40 let _ = writeln!(output);
41
42 for category in ActionCategory::all() {
43 let actions: Vec<_> = self.keybindings.iter_category(*category).collect();
44
45 if actions.iter().all(|(_, bindings)| bindings.is_empty()) {
47 continue;
48 }
49
50 let _ = writeln!(output, "## {}", category.display_name());
51 let _ = writeln!(output);
52
53 for (action, bindings) in actions {
54 if bindings.is_empty() {
55 continue;
56 }
57
58 let keys: Vec<_> = bindings
60 .iter()
61 .map(std::string::ToString::to_string)
62 .collect();
63 let keys_str = keys.join(", ");
64
65 let _ = writeln!(output, " {:20} {}", keys_str, action.display_name());
66 }
67 let _ = writeln!(output);
68 }
69
70 output
71 }
72
73 pub(super) fn resolve_action(&self, candidates: &[AppAction]) -> Option<AppAction> {
74 let &first = candidates.first()?;
75
76 if candidates.contains(&AppAction::Exit)
80 && self.agent_state == AgentState::Idle
81 && self.input.value().is_empty()
82 {
83 return Some(AppAction::Exit);
84 }
85
86 Some(first)
87 }
88
89 pub(super) fn handle_capability_prompt_key(&mut self, key: &KeyMsg) -> Option<Cmd> {
90 let prompt = self.capability_prompt.as_mut()?;
91
92 match key.key_type {
93 KeyType::Right | KeyType::Tab => prompt.focus_next(),
95 KeyType::Left => prompt.focus_prev(),
96 KeyType::Runes if key.runes == ['l'] => prompt.focus_next(),
97 KeyType::Runes if key.runes == ['h'] => prompt.focus_prev(),
98
99 KeyType::Enter => {
101 let action = prompt.selected_action();
102 let response = ExtensionUiResponse {
103 id: prompt.request.id.clone(),
104 value: Some(Value::Bool(action.is_allow())),
105 cancelled: false,
106 };
107 if action.is_persistent() {
109 if let Ok(mut store) = crate::permissions::PermissionStore::open_default() {
110 let _ = store.record(
111 &prompt.extension_id,
112 &prompt.capability,
113 action.is_allow(),
114 );
115 }
116 }
117 self.capability_prompt = None;
118 self.send_extension_ui_response(response);
119 }
120
121 KeyType::Esc => {
123 let response = ExtensionUiResponse {
124 id: prompt.request.id.clone(),
125 value: Some(Value::Bool(false)),
126 cancelled: true,
127 };
128 self.capability_prompt = None;
129 self.send_extension_ui_response(response);
130 }
131
132 _ => {}
133 }
134
135 None
136 }
137
138 pub(super) fn handle_paste_event(&mut self, key: &KeyMsg) -> bool {
139 if key.key_type != KeyType::Runes || key.runes.is_empty() {
140 return false;
141 }
142
143 let pasted: String = key.runes.iter().collect();
144 let Some((insert, count)) = self.normalize_pasted_paths(&pasted) else {
145 return false;
146 };
147
148 self.input.insert_string(&insert);
149 if count > 0 {
150 self.status_message = Some(format!(
151 "Attached {} file{}",
152 count,
153 if count == 1 { "" } else { "s" }
154 ));
155 }
156 true
157 }
158
159 fn normalize_pasted_paths(&self, pasted: &str) -> Option<(String, usize)> {
160 let mut refs = Vec::new();
161 for line in pasted.lines() {
162 let trimmed = line.trim();
163 if trimmed.is_empty() {
164 continue;
165 }
166 let path = self.normalize_pasted_path(trimmed)?;
167 refs.push(path);
168 }
169
170 if refs.is_empty() {
171 return None;
172 }
173
174 let mut insert = refs
175 .iter()
176 .map(|path| format_file_ref(path))
177 .collect::<Vec<_>>()
178 .join(" ");
179 if !insert.ends_with(' ') {
180 insert.push(' ');
181 }
182
183 Some((insert, refs.len()))
184 }
185
186 fn normalize_pasted_path(&self, raw: &str) -> Option<String> {
187 let trimmed = raw.trim();
188 if trimmed.is_empty() || trimmed.starts_with('@') {
189 return None;
190 }
191
192 let unquoted = strip_wrapping_quotes(trimmed);
193 let unescaped = unescape_dragged_path(unquoted);
194 let path = file_url_to_path(&unescaped).unwrap_or_else(|| PathBuf::from(&unescaped));
195 let resolved = resolve_read_path(path.to_string_lossy().as_ref(), &self.cwd);
196 if !resolved.exists() {
197 return None;
198 }
199
200 Some(path_for_display(&resolved, &self.cwd))
201 }
202
203 pub(super) fn insert_file_ref_path(&mut self, path: &Path) {
204 let display = path_for_display(path, &self.cwd);
205 let mut insert_text = format_file_ref(&display);
206 if !insert_text.ends_with(' ') {
207 insert_text.push(' ');
208 }
209 self.input.insert_string(&insert_text);
210 }
211
212 #[allow(clippy::missing_const_for_fn)]
213 pub(super) fn paste_image_from_clipboard() -> Option<PathBuf> {
214 #[cfg(all(feature = "clipboard", feature = "image-resize"))]
215 {
216 use image::ImageEncoder;
217
218 let mut clipboard = ArboardClipboard::new().ok()?;
219 let image = clipboard.get_image().ok()?;
220
221 let width = u32::try_from(image.width).ok()?;
222 let height = u32::try_from(image.height).ok()?;
223 let bytes = image.bytes.into_owned();
224 let width_usize = usize::try_from(width).ok()?;
225 let height_usize = usize::try_from(height).ok()?;
226 let expected = width_usize.checked_mul(height_usize)?.checked_mul(4)?;
227 if bytes.len() != expected {
228 return None;
229 }
230
231 let mut temp_file = tempfile::Builder::new()
232 .prefix("pi-paste-")
233 .suffix(".png")
234 .tempfile()
235 .ok()?;
236 let encoder = image::codecs::png::PngEncoder::new(&mut temp_file);
237 if encoder
238 .write_image(&bytes, width, height, image::ExtendedColorType::Rgba8)
239 .is_err()
240 {
241 return None;
242 }
243 let (_file, path) = temp_file.keep().ok()?;
244 Some(path)
245 }
246
247 #[cfg(not(all(feature = "clipboard", feature = "image-resize")))]
248 {
249 None
250 }
251 }
252
253 pub(super) fn open_external_editor(&self) -> std::io::Result<String> {
258 use std::io::Write;
259
260 let editor = std::env::var("VISUAL")
262 .or_else(|_| std::env::var("EDITOR"))
263 .unwrap_or_else(|_| "vi".to_string());
264
265 let mut temp_file = tempfile::NamedTempFile::new()?;
267 let current_text = self.input.value();
268 temp_file.write_all(current_text.as_bytes())?;
269 temp_file.flush()?;
270
271 let temp_path = temp_file.path().to_path_buf();
272
273 let _ = crossterm::terminal::disable_raw_mode();
275 let _ = crossterm::execute!(std::io::stdout(), crossterm::terminal::LeaveAlternateScreen);
276
277 #[cfg(unix)]
280 let status = std::process::Command::new("sh")
281 .args(["-c", &format!("{editor} \"$1\"")])
282 .arg("--") .arg(&temp_path)
284 .status();
285
286 #[cfg(not(unix))]
287 let status = std::process::Command::new("cmd")
288 .args(["/c", &format!("{} \"{}\"", editor, temp_path.display())])
289 .status();
290
291 let _ = crossterm::execute!(std::io::stdout(), crossterm::terminal::EnterAlternateScreen);
293 let _ = crossterm::terminal::enable_raw_mode();
294 let _ = crossterm::execute!(
295 std::io::stdout(),
296 crossterm::terminal::Clear(crossterm::terminal::ClearType::All)
297 );
298
299 let status = status?;
300
301 if !status.success() {
302 return Err(std::io::Error::other(format!(
303 "Editor exited with status: {status}"
304 )));
305 }
306
307 let new_text = std::fs::read_to_string(&temp_path)?;
309 Ok(new_text)
310 }
311
312 fn navigate_history_back(&mut self) {
314 if !self.history.has_entries() {
315 return;
316 }
317
318 self.history.cursor_up();
319 self.apply_history_selection();
320 }
321
322 fn navigate_history_forward(&mut self) {
324 if self.history.cursor_is_empty() {
326 return;
327 }
328
329 self.history.cursor_down();
330 self.apply_history_selection();
331 }
332
333 fn apply_history_selection(&mut self) {
334 let selected = self.history.selected_value();
335 if selected.is_empty() {
336 self.input.reset();
337 } else {
338 self.input.set_value(selected);
339 }
340 }
341
342 fn handle_double_escape_action(&mut self) -> (bool, Option<Cmd>) {
343 let action = self
344 .config
345 .double_escape_action
346 .as_deref()
347 .unwrap_or("tree")
348 .trim();
349 if action.eq_ignore_ascii_case("none") {
350 self.last_escape_time = None;
351 return (false, None);
352 }
353 let now = std::time::Instant::now();
354 if let Some(last_time) = self.last_escape_time {
355 if now.duration_since(last_time) < std::time::Duration::from_millis(500) {
356 self.last_escape_time = None;
357 return (true, self.trigger_double_escape_action());
358 }
359 }
360 self.last_escape_time = Some(now);
361 (false, None)
362 }
363
364 fn trigger_double_escape_action(&mut self) -> Option<Cmd> {
365 let raw_action = self
366 .config
367 .double_escape_action
368 .as_deref()
369 .unwrap_or("tree")
370 .trim();
371 let action = raw_action.to_ascii_lowercase();
372 match action.as_str() {
373 "none" => None,
374 "tree" => self.handle_slash_command(SlashCommand::Tree, ""),
375 "fork" => self.handle_slash_command(SlashCommand::Fork, ""),
376 _ => {
377 self.status_message = Some(format!(
378 "Unknown doubleEscapeAction: {raw_action} (expected tree, fork, or none)"
379 ));
380 self.handle_slash_command(SlashCommand::Tree, "")
381 }
382 }
383 }
384
385 #[allow(clippy::too_many_lines)]
386 pub fn cycle_model(&mut self, delta: i32) {
387 if self.agent_state != AgentState::Idle {
388 self.status_message = Some("Cannot switch models while processing".to_string());
389 return;
390 }
391
392 let scope_configured = self
393 .config
394 .enabled_models
395 .as_ref()
396 .is_some_and(|patterns| !patterns.is_empty());
397 let use_scope = scope_configured || !self.model_scope.is_empty();
398 let mut fell_back_to_available = false;
399 let mut candidates = if use_scope {
400 self.model_scope.clone()
401 } else {
402 self.available_models.clone()
403 };
404 if use_scope && candidates.is_empty() {
405 candidates.clone_from(&self.available_models);
406 fell_back_to_available = true;
407 }
408
409 candidates.sort_by(|a, b| {
410 let left = format!("{}/{}", a.model.provider, a.model.id);
411 let right = format!("{}/{}", b.model.provider, b.model.id);
412 left.cmp(&right)
413 });
414 candidates.dedup_by(|left, right| model_entry_matches(left, right));
415
416 if candidates.is_empty() {
417 self.status_message = Some("No models available".to_string());
418 return;
419 }
420
421 let current_index = candidates
422 .iter()
423 .position(|entry| model_entry_matches(entry, &self.model_entry));
424
425 let next_index = current_index.map_or_else(
426 || {
427 if delta >= 0 { 0 } else { candidates.len() - 1 }
428 },
429 |idx| {
430 if delta >= 0 {
431 (idx + 1) % candidates.len()
432 } else {
433 idx.checked_sub(1).unwrap_or(candidates.len() - 1)
434 }
435 },
436 );
437
438 let next = candidates[next_index].clone();
439
440 if model_entry_matches(&next, &self.model_entry) {
441 self.status_message = Some(if use_scope && !fell_back_to_available {
442 "Only one model in scope".to_string()
443 } else {
444 "Only one model available".to_string()
445 });
446 return;
447 }
448
449 let provider_impl = match providers::create_provider(&next, self.extensions.as_ref()) {
450 Ok(provider_impl) => provider_impl,
451 Err(err) => {
452 self.status_message = Some(err.to_string());
453 return;
454 }
455 };
456 let resolved_key_opt = super::commands::resolve_model_key_from_default_auth(&next);
457 if crate::models::model_requires_configured_credential(&next) && resolved_key_opt.is_none()
458 {
459 self.status_message = Some(format!(
460 "Missing credentials for provider {}. Run /login {}.",
461 next.model.provider, next.model.provider
462 ));
463 return;
464 }
465
466 if let Err(message) =
467 self.switch_active_model(&next, provider_impl, resolved_key_opt.as_deref(), "cycle")
468 {
469 self.status_message = Some(message);
470 return;
471 }
472 self.status_message = Some(if fell_back_to_available {
473 format!(
474 "No scoped models matched; cycling all available models. Switched model: {}",
475 self.model
476 )
477 } else {
478 format!("Switched model: {}", self.model)
479 });
480 }
481
482 pub(super) fn cycle_thinking_level(&mut self) {
483 let levels = self.model_entry.available_thinking_levels();
484 if levels.len() <= 1 {
485 self.status_message = Some("Current model does not support thinking".to_string());
486 return;
487 }
488
489 let Ok(mut agent_guard) = self.agent.try_lock() else {
490 self.status_message = Some("Agent busy; try again".to_string());
491 return;
492 };
493 let Ok(mut session_guard) = self.session.try_lock() else {
494 self.status_message = Some("Session busy; try again".to_string());
495 return;
496 };
497
498 let current = session_guard
499 .effective_thinking_level_for_current_path()
500 .as_deref()
501 .and_then(|value| value.parse::<crate::model::ThinkingLevel>().ok())
502 .or_else(|| agent_guard.stream_options().thinking_level)
503 .unwrap_or_default();
504
505 let current_index = levels
506 .iter()
507 .position(|level| *level == current)
508 .unwrap_or(0);
509 let next = levels[(current_index + 1) % levels.len()];
510
511 let previous_level = session_guard
512 .effective_thinking_level_for_current_path()
513 .as_deref()
514 .and_then(|value| value.parse::<crate::model::ThinkingLevel>().ok());
515 session_guard.header.thinking_level = Some(next.to_string());
516 let changed = previous_level != Some(next);
517 if changed {
518 session_guard.append_thinking_level_change(next.to_string());
519 }
520
521 agent_guard.stream_options_mut().thinking_level = Some(next);
522 drop(session_guard);
523 drop(agent_guard);
524
525 if changed {
526 self.spawn_save_session();
527 }
528
529 self.status_message = Some(format!("Thinking level: {next}"));
530 }
531
532 pub(super) fn quit_cmd(&mut self) -> Cmd {
533 if let Some(manager) = &self.extensions {
534 manager.clear_ui_sender();
535 }
536
537 let shutdown_tx = self.event_tx.clone();
540 self.runtime_handle.spawn(async move {
541 let shutdown_cx = Cx::for_request();
542 super::enqueue_ui_shutdown(&shutdown_tx, &shutdown_cx).await;
543 });
544
545 let (tx, _rx) = mpsc::channel::<PiMsg>(1);
548 drop(std::mem::replace(&mut self.event_tx, tx));
549 quit()
550 }
551
552 #[allow(clippy::too_many_lines)]
557 pub(super) fn handle_action(&mut self, action: AppAction, key: &KeyMsg) -> Option<Cmd> {
558 match action {
559 AppAction::Interrupt => {
563 if self.agent_state != AgentState::Idle {
565 self.last_escape_time = None;
566 let restored = self.restore_queued_messages_to_editor(true);
567 if restored > 0 {
568 self.status_message = Some(format!(
569 "Restored {restored} queued message{}",
570 if restored == 1 { "" } else { "s" }
571 ));
572 } else {
573 self.status_message = Some("Aborting request...".to_string());
574 }
575 return None;
576 }
577 if key.key_type == KeyType::Esc {
578 let (triggered, cmd) = self.handle_double_escape_action();
579 if triggered {
580 return cmd;
581 }
582 }
583 if key.key_type == KeyType::Esc && self.input_mode == InputMode::MultiLine {
585 self.input_mode = InputMode::SingleLine;
586 self.set_input_height(3);
587 self.status_message = Some("Single-line mode".to_string());
588 }
589 None
591 }
592 AppAction::Clear | AppAction::Copy => {
593 if self.agent_state != AgentState::Idle {
597 if let Some(handle) = &self.abort_handle {
598 handle.abort();
599 }
600 self.status_message = Some("Aborting request...".to_string());
601 return None;
602 }
603
604 let editor_text = self.input.value();
606 if !editor_text.is_empty() {
607 self.input.reset();
608 self.last_ctrlc_time = Some(std::time::Instant::now());
609 self.status_message = Some("Input cleared".to_string());
610 return None;
611 }
612
613 let now = std::time::Instant::now();
615 if let Some(last_time) = self.last_ctrlc_time {
616 if now.duration_since(last_time) < std::time::Duration::from_millis(500) {
618 return Some(self.quit_cmd());
619 }
620 }
621 self.last_ctrlc_time = Some(now);
623 self.status_message = Some("Press Ctrl+C again to quit".to_string());
624 None
625 }
626 AppAction::PasteImage => {
627 if let Some(path) = Self::paste_image_from_clipboard() {
628 self.insert_file_ref_path(&path);
629 self.status_message = Some("Image attached".to_string());
630 }
631 None
632 }
633 AppAction::Exit => {
634 if self.agent_state == AgentState::Idle && self.input.value().is_empty() {
636 return Some(self.quit_cmd());
637 }
638 None
640 }
641 AppAction::Suspend => {
642 #[cfg(unix)]
644 {
645 use std::process::Command;
646 let pid = std::process::id().to_string();
649 let _ = Command::new("kill").args(["-TSTP", &pid]).status();
650 self.status_message = Some("Resumed from background".to_string());
651 }
652 #[cfg(not(unix))]
653 {
654 self.status_message =
655 Some("Suspend not supported on this platform".to_string());
656 }
657 None
658 }
659 AppAction::ExternalEditor => {
660 if self.agent_state != AgentState::Idle {
662 self.status_message = Some("Cannot open editor while processing".to_string());
663 return None;
664 }
665 match self.open_external_editor() {
666 Ok(new_text) => {
667 self.input.set_value(&new_text);
668 self.status_message = Some("Editor content loaded".to_string());
669 }
670 Err(e) => {
671 self.status_message = Some(format!("Editor error: {e}"));
672 }
673 }
674 None
675 }
676 AppAction::Help => self.handle_slash_command(SlashCommand::Help, ""),
677 AppAction::OpenSettings => self.handle_slash_command(SlashCommand::Settings, ""),
678
679 AppAction::CycleModelForward => {
683 self.cycle_model(1);
684 None
685 }
686 AppAction::CycleModelBackward => {
687 self.cycle_model(-1);
688 None
689 }
690 AppAction::CycleThinkingLevel => {
691 self.cycle_thinking_level();
692 None
693 }
694 AppAction::SelectModel => {
695 self.open_model_selector_configured_only();
696 None
697 }
698
699 AppAction::Submit => {
703 if self.agent_state != AgentState::Idle {
705 self.queue_input(QueuedMessageKind::Steering);
706 return None;
707 }
708 if self.input_mode == InputMode::MultiLine {
709 self.input.insert_rune('\n');
711 return None;
712 }
713 let value = self.input.value();
714 if !value.trim().is_empty() {
715 return self.submit_message(value.trim());
716 }
717 None
719 }
720 AppAction::FollowUp => {
721 if self.agent_state != AgentState::Idle {
724 self.queue_input(QueuedMessageKind::FollowUp);
725 return None;
726 }
727 let value = self.input.value();
728 if self.input_mode == InputMode::SingleLine && value.trim().is_empty() {
729 self.input_mode = InputMode::MultiLine;
730 self.set_input_height(6);
731 self.status_message = Some("Multi-line mode".to_string());
732 return None;
733 }
734 if !value.trim().is_empty() {
735 return self.submit_message(value.trim());
736 }
737 None
738 }
739 AppAction::NewLine => {
740 self.input.insert_rune('\n');
741 self.input_mode = InputMode::MultiLine;
742 self.set_input_height(6);
743 None
744 }
745
746 AppAction::CursorUp => {
750 if self.agent_state == AgentState::Idle && self.input_mode == InputMode::SingleLine
751 {
752 self.navigate_history_back();
753 }
754 None
756 }
757 AppAction::CursorDown => {
758 if self.agent_state == AgentState::Idle && self.input_mode == InputMode::SingleLine
759 {
760 self.navigate_history_forward();
761 }
762 None
763 }
764
765 AppAction::PageUp => {
769 let saved_offset = self.conversation_viewport.y_offset();
773 let content = self.build_conversation_content();
774 let effective = self.view_effective_conversation_height().max(1);
775 self.conversation_viewport.height = effective;
776 self.conversation_viewport.set_content(content.trim_end());
777 self.conversation_viewport.set_y_offset(saved_offset);
778 self.conversation_viewport.page_up();
779 self.follow_stream_tail = false;
780 None
781 }
782 AppAction::PageDown => {
783 let saved_offset = self.conversation_viewport.y_offset();
787 let content = self.build_conversation_content();
788 let effective = self.view_effective_conversation_height().max(1);
789 self.conversation_viewport.height = effective;
790 self.conversation_viewport.set_content(content.trim_end());
791 self.conversation_viewport.set_y_offset(saved_offset);
792 self.conversation_viewport.page_down();
793 if self.is_at_bottom() {
795 self.follow_stream_tail = true;
796 }
797 None
798 }
799
800 AppAction::Tab => {
804 if self.agent_state != AgentState::Idle || self.session_picker.is_some() {
805 return None;
806 }
807
808 let text = self.input.value();
809 if text.trim().is_empty() {
810 self.autocomplete.close();
811 return None;
812 }
813
814 let cursor = self.input.cursor_byte_offset();
815 let response = self.autocomplete.provider.suggest(&text, cursor);
816
817 if response.items.is_empty() {
818 self.autocomplete.close();
819 return None;
820 }
821
822 if response.items.len() == 1
823 && response
824 .items
825 .first()
826 .is_some_and(|item| item.kind == AutocompleteItemKind::Path)
827 {
828 let item = response.items[0].clone();
829 self.autocomplete.replace_range = response.replace;
830 self.accept_autocomplete(&item);
831 self.autocomplete.close();
832 return None;
833 }
834
835 self.autocomplete.open_with(response);
836 None
837 }
838
839 AppAction::Dequeue => {
843 let restored = self.restore_queued_messages_to_editor(false);
844 if restored == 0 {
845 self.status_message = Some("No queued messages to restore".to_string());
846 } else {
847 self.status_message = Some(format!(
848 "Restored {restored} queued message{}",
849 if restored == 1 { "" } else { "s" }
850 ));
851 }
852 None
853 }
854
855 AppAction::ToggleThinking => {
859 self.thinking_visible = !self.thinking_visible;
860 self.message_render_cache.invalidate_all();
861 let content = self.build_conversation_content();
862 let effective = self.view_effective_conversation_height().max(1);
863 self.conversation_viewport.height = effective;
864 self.conversation_viewport.set_content(content.trim_end());
865 self.status_message = Some(if self.thinking_visible {
866 "Thinking shown".to_string()
867 } else {
868 "Thinking hidden".to_string()
869 });
870 None
871 }
872 AppAction::ExpandTools => {
873 self.tools_expanded = !self.tools_expanded;
874 if self.tools_expanded {
878 for msg in &mut self.messages {
879 if msg.role == MessageRole::Tool {
880 msg.collapsed = false;
881 }
882 }
883 }
884 self.message_render_cache.invalidate_all();
885 let content = self.build_conversation_content();
886 let effective = self.view_effective_conversation_height().max(1);
887 self.conversation_viewport.height = effective;
888 self.conversation_viewport.set_content(content.trim_end());
889 self.status_message = Some(if self.tools_expanded {
890 "Tool output expanded".to_string()
891 } else {
892 "Tool output collapsed".to_string()
893 });
894 None
895 }
896
897 AppAction::BranchPicker => {
901 self.open_branch_picker();
902 None
903 }
904 AppAction::BranchNextSibling => {
905 self.cycle_sibling_branch(true);
906 None
907 }
908 AppAction::BranchPrevSibling => {
909 self.cycle_sibling_branch(false);
910 None
911 }
912
913 _ => None,
916 }
917 }
918
919 pub(super) fn should_consume_action(&self, action: AppAction) -> bool {
924 match action {
925 AppAction::CursorUp | AppAction::CursorDown => {
928 self.agent_state == AgentState::Idle && self.input_mode == InputMode::SingleLine
929 }
930
931 AppAction::Exit => {
933 self.agent_state == AgentState::Idle && self.input.value().is_empty()
934 }
935
936 AppAction::PageUp
943 | AppAction::PageDown
944 | AppAction::CycleModelForward
945 | AppAction::CycleModelBackward
946 | AppAction::CycleThinkingLevel
947 | AppAction::ToggleThinking
948 | AppAction::ExpandTools
949 | AppAction::FollowUp
950 | AppAction::NewLine
951 | AppAction::Submit
952 | AppAction::Dequeue
953 | AppAction::Interrupt
954 | AppAction::Clear
955 | AppAction::Copy
956 | AppAction::PasteImage
957 | AppAction::Suspend
958 | AppAction::ExternalEditor
959 | AppAction::Help
960 | AppAction::OpenSettings
961 | AppAction::Tab
962 | AppAction::BranchPicker
963 | AppAction::BranchNextSibling
964 | AppAction::BranchPrevSibling
965 | AppAction::SelectModel => true,
966
967 _ => false,
969 }
970 }
971}
972
973fn encode_custom_ui_key(key: &KeyMsg) -> Option<String> {
974 let control = |byte: u8| Some(char::from(byte).to_string());
975 match key.key_type {
976 KeyType::Runes => {
977 if key.runes.is_empty() {
978 None
979 } else {
980 let text: String = key.runes.iter().collect();
981 if key.alt {
982 Some(format!("\u{1b}{text}"))
983 } else {
984 Some(text)
985 }
986 }
987 }
988 KeyType::Space => Some(" ".to_string()),
989 KeyType::Enter | KeyType::ShiftEnter | KeyType::CtrlEnter | KeyType::CtrlShiftEnter => {
990 Some("\r".to_string())
991 }
992 KeyType::Tab => Some("\t".to_string()),
993 KeyType::ShiftTab => Some("\u{1b}[Z".to_string()),
994 KeyType::Esc => Some("\u{1b}".to_string()),
995 KeyType::Backspace | KeyType::CtrlH => Some("\u{7f}".to_string()),
996 KeyType::Up => Some("\u{1b}[A".to_string()),
997 KeyType::Down => Some("\u{1b}[B".to_string()),
998 KeyType::Right => Some("\u{1b}[C".to_string()),
999 KeyType::Left => Some("\u{1b}[D".to_string()),
1000 KeyType::Home => Some("\u{1b}[H".to_string()),
1001 KeyType::End => Some("\u{1b}[F".to_string()),
1002 KeyType::PgUp => Some("\u{1b}[5~".to_string()),
1003 KeyType::PgDown => Some("\u{1b}[6~".to_string()),
1004 KeyType::Delete => Some("\u{1b}[3~".to_string()),
1005 KeyType::Insert => Some("\u{1b}[2~".to_string()),
1006 KeyType::CtrlA => control(0x01),
1007 KeyType::CtrlB => control(0x02),
1008 KeyType::CtrlD => control(0x04),
1009 KeyType::CtrlE => control(0x05),
1010 KeyType::CtrlF => control(0x06),
1011 KeyType::CtrlG => control(0x07),
1012 KeyType::CtrlJ => control(0x0a),
1013 KeyType::CtrlK => control(0x0b),
1014 KeyType::CtrlL => control(0x0c),
1015 KeyType::CtrlN => control(0x0e),
1016 KeyType::CtrlO => control(0x0f),
1017 KeyType::CtrlP => control(0x10),
1018 KeyType::CtrlQ => control(0x11),
1019 KeyType::CtrlR => control(0x12),
1020 KeyType::CtrlS => control(0x13),
1021 KeyType::CtrlT => control(0x14),
1022 KeyType::CtrlU => control(0x15),
1023 KeyType::CtrlV => control(0x16),
1024 KeyType::CtrlW => control(0x17),
1025 KeyType::CtrlX => control(0x18),
1026 KeyType::CtrlY => control(0x19),
1027 KeyType::CtrlZ => control(0x1a),
1028 KeyType::Null => control(0x00),
1029 _ => None,
1030 }
1031}
1032
1033#[cfg(test)]
1034mod tests {
1035 use super::*;
1036 use crate::agent::{Agent, AgentConfig};
1037 use crate::config::Config;
1038 use crate::model::{StreamEvent, Usage};
1039 use crate::models::ModelEntry;
1040 use crate::provider::{Context, InputType, Model, ModelCost, Provider, StreamOptions};
1041 use crate::resources::{ResourceCliOptions, ResourceLoader};
1042 use crate::session::Session;
1043 use crate::tools::ToolRegistry;
1044 use asupersync::channel::mpsc;
1045 use asupersync::runtime::RuntimeBuilder;
1046 use futures::stream;
1047 use std::collections::HashMap;
1048 use std::path::Path;
1049 use std::pin::Pin;
1050 use std::sync::Arc;
1051 use std::sync::OnceLock;
1052
1053 struct DummyProvider;
1054
1055 #[async_trait::async_trait]
1056 impl Provider for DummyProvider {
1057 fn name(&self) -> &'static str {
1058 "dummy"
1059 }
1060
1061 fn api(&self) -> &'static str {
1062 "dummy"
1063 }
1064
1065 fn model_id(&self) -> &'static str {
1066 "dummy-model"
1067 }
1068
1069 async fn stream(
1070 &self,
1071 _context: &Context<'_>,
1072 _options: &StreamOptions,
1073 ) -> crate::error::Result<
1074 Pin<Box<dyn futures::Stream<Item = crate::error::Result<StreamEvent>> + Send>>,
1075 > {
1076 Ok(Box::pin(stream::empty()))
1077 }
1078 }
1079
1080 fn runtime() -> &'static asupersync::runtime::Runtime {
1081 static RT: OnceLock<asupersync::runtime::Runtime> = OnceLock::new();
1082 RT.get_or_init(|| {
1083 RuntimeBuilder::multi_thread()
1084 .blocking_threads(1, 8)
1085 .build()
1086 .expect("build runtime")
1087 })
1088 }
1089
1090 fn runtime_handle() -> asupersync::runtime::RuntimeHandle {
1091 runtime().handle()
1092 }
1093
1094 fn model_entry(
1095 provider: &str,
1096 id: &str,
1097 api_key: Option<&str>,
1098 headers: HashMap<String, String>,
1099 ) -> ModelEntry {
1100 ModelEntry {
1101 model: Model {
1102 id: id.to_string(),
1103 name: id.to_string(),
1104 api: "openai-completions".to_string(),
1105 provider: provider.to_string(),
1106 base_url: "https://example.invalid".to_string(),
1107 reasoning: true,
1108 input: vec![InputType::Text],
1109 cost: ModelCost {
1110 input: 0.0,
1111 output: 0.0,
1112 cache_read: 0.0,
1113 cache_write: 0.0,
1114 },
1115 context_window: 128_000,
1116 max_tokens: 8_192,
1117 headers: HashMap::new(),
1118 },
1119 api_key: api_key.map(str::to_string),
1120 headers,
1121 auth_header: true,
1122 compat: None,
1123 oauth_config: None,
1124 }
1125 }
1126
1127 fn build_test_app_with_event_rx(
1128 current: ModelEntry,
1129 available: Vec<ModelEntry>,
1130 ) -> (PiApp, mpsc::Receiver<PiMsg>) {
1131 let provider: Arc<dyn Provider> = Arc::new(DummyProvider);
1132 let agent = Agent::new(
1133 provider,
1134 ToolRegistry::new(&[], Path::new("."), None),
1135 AgentConfig::default(),
1136 );
1137 let session = Arc::new(asupersync::sync::Mutex::new(Session::in_memory()));
1138 let resources = ResourceLoader::empty(false);
1139 let resource_cli = ResourceCliOptions {
1140 no_skills: false,
1141 no_prompt_templates: false,
1142 no_extensions: false,
1143 no_themes: false,
1144 skill_paths: Vec::new(),
1145 prompt_paths: Vec::new(),
1146 extension_paths: Vec::new(),
1147 theme_paths: Vec::new(),
1148 };
1149 let (event_tx, event_rx) = mpsc::channel(64);
1150 let config = Config {
1151 last_changelog_version: Some(crate::platform::VERSION.to_string()),
1152 ..Config::default()
1153 };
1154 (
1155 PiApp::new(
1156 agent,
1157 session,
1158 config,
1159 resources,
1160 resource_cli,
1161 Path::new(".").to_path_buf(),
1162 current,
1163 Vec::new(),
1164 available,
1165 Vec::new(),
1166 event_tx,
1167 runtime_handle(),
1168 false,
1169 false,
1170 None,
1171 Some(KeyBindings::new()),
1172 Vec::new(),
1173 Usage::default(),
1174 ),
1175 event_rx,
1176 )
1177 }
1178
1179 fn build_test_app(current: ModelEntry, available: Vec<ModelEntry>) -> PiApp {
1180 let (app, _event_rx) = build_test_app_with_event_rx(current, available);
1181 app
1182 }
1183
1184 #[test]
1185 fn cycle_model_replaces_stream_options_api_key_and_headers() {
1186 let mut current_headers = HashMap::new();
1187 current_headers.insert("x-stale".to_string(), "old".to_string());
1188 let current = model_entry("openai", "gpt-4o-mini", Some("old-key"), current_headers);
1189
1190 let mut next_headers = HashMap::new();
1191 next_headers.insert("x-provider-header".to_string(), "next".to_string());
1192 let next = model_entry(
1193 "openrouter",
1194 "openai/gpt-4o-mini",
1195 Some("next-key"),
1196 next_headers,
1197 );
1198
1199 let mut app = build_test_app(current.clone(), vec![current, next]);
1200 {
1201 let mut guard = app.agent.try_lock().expect("agent lock");
1202 guard.stream_options_mut().api_key = Some("stale-key".to_string());
1203 guard
1204 .stream_options_mut()
1205 .headers
1206 .insert("x-stale".to_string(), "stale".to_string());
1207 }
1208
1209 app.cycle_model(1);
1210
1211 let mut guard = app.agent.try_lock().expect("agent lock");
1212 assert_eq!(
1213 guard.stream_options_mut().api_key.as_deref(),
1214 Some("next-key")
1215 );
1216 assert_eq!(
1217 guard
1218 .stream_options_mut()
1219 .headers
1220 .get("x-provider-header")
1221 .map(String::as_str),
1222 Some("next")
1223 );
1224 assert!(
1225 !guard.stream_options_mut().headers.contains_key("x-stale"),
1226 "cycling models must replace stale provider headers"
1227 );
1228 }
1229
1230 #[test]
1231 fn cycle_model_clears_stale_api_key_when_next_model_has_no_key() {
1232 let current = model_entry("openai", "gpt-4o-mini", Some("old-key"), HashMap::new());
1233 let mut next = model_entry("ollama", "llama3.2", None, HashMap::new());
1234 next.auth_header = false;
1235 let mut app = build_test_app(current.clone(), vec![current, next]);
1236 {
1237 let mut guard = app.agent.try_lock().expect("agent lock");
1238 guard.stream_options_mut().api_key = Some("stale-key".to_string());
1239 guard
1240 .stream_options_mut()
1241 .headers
1242 .insert("x-stale".to_string(), "stale".to_string());
1243 }
1244
1245 app.cycle_model(1);
1246
1247 let mut guard = app.agent.try_lock().expect("agent lock");
1248 assert!(
1249 guard.stream_options_mut().api_key.is_none(),
1250 "cycling to a keyless model must clear stale API key"
1251 );
1252 assert!(
1253 guard.stream_options_mut().headers.is_empty(),
1254 "cycling to keyless model with no headers must clear stale headers"
1255 );
1256 }
1257
1258 #[test]
1259 fn cycle_model_clamps_thinking_level_for_non_reasoning_targets() {
1260 let current = model_entry("openai", "gpt-5.2", Some("old-key"), HashMap::new());
1261 let mut next = model_entry("ollama", "llama3.2", None, HashMap::new());
1262 next.auth_header = false;
1263 next.model.reasoning = false;
1264 let mut app = build_test_app(current.clone(), vec![current, next]);
1265
1266 {
1267 let mut guard = app.agent.try_lock().expect("agent lock");
1268 guard.stream_options_mut().thinking_level = Some(crate::model::ThinkingLevel::High);
1269 }
1270 {
1271 let mut guard = app.session.try_lock().expect("session lock");
1272 guard.header.thinking_level = Some(crate::model::ThinkingLevel::High.to_string());
1273 }
1274
1275 app.cycle_model(1);
1276
1277 let mut agent_guard = app.agent.try_lock().expect("agent lock");
1278 assert_eq!(
1279 agent_guard.stream_options_mut().thinking_level,
1280 Some(crate::model::ThinkingLevel::Off)
1281 );
1282 drop(agent_guard);
1283
1284 let session_guard = app.session.try_lock().expect("session lock");
1285 assert_eq!(
1286 session_guard.header.thinking_level.as_deref(),
1287 Some("off"),
1288 "session thinking level should clamp alongside the active model"
1289 );
1290 }
1291
1292 #[test]
1293 fn slash_model_allows_switch_to_keyless_provider_without_api_key() {
1294 let current = model_entry("openai", "gpt-4o-mini", Some("old-key"), HashMap::new());
1295 let mut keyless = model_entry("ollama", "llama3.2", None, HashMap::new());
1296 keyless.auth_header = false;
1297 let mut app = build_test_app(current.clone(), vec![current, keyless]);
1298
1299 let _ = app.handle_slash_command(SlashCommand::Model, "ollama/llama3.2");
1300
1301 assert_eq!(app.model, "ollama/llama3.2");
1302 let mut guard = app.agent.try_lock().expect("agent lock");
1303 assert!(
1304 guard.stream_options_mut().api_key.is_none(),
1305 "keyless model switch must not keep stale API key"
1306 );
1307 }
1308
1309 #[test]
1310 fn slash_model_rejects_missing_credentials_for_required_provider() {
1311 let current = model_entry("openai", "gpt-4o-mini", Some("old-key"), HashMap::new());
1312 let mut requires_creds = model_entry("acme-remote", "cloud-model", None, HashMap::new());
1313 requires_creds.auth_header = true;
1314 let mut app = build_test_app(current.clone(), vec![current, requires_creds]);
1315
1316 let _ = app.handle_slash_command(SlashCommand::Model, "acme-remote/cloud-model");
1317
1318 assert_eq!(app.model, "openai/gpt-4o-mini");
1319 assert!(
1320 app.status_message
1321 .as_deref()
1322 .is_some_and(|msg| msg.contains("Missing credentials for provider acme-remote")),
1323 "switch should fail fast when selected provider still lacks credentials"
1324 );
1325 }
1326
1327 #[test]
1328 fn slash_model_treats_blank_inline_key_as_missing_credentials() {
1329 let current = model_entry("openai", "gpt-4o-mini", Some("old-key"), HashMap::new());
1330 let mut blank_key = model_entry("acme-remote", "cloud-model", Some(" "), HashMap::new());
1331 blank_key.auth_header = true;
1332 let mut app = build_test_app(current.clone(), vec![current, blank_key]);
1333
1334 let _ = app.handle_slash_command(SlashCommand::Model, "acme-remote/cloud-model");
1335
1336 assert_eq!(app.model, "openai/gpt-4o-mini");
1337 assert!(
1338 app.status_message
1339 .as_deref()
1340 .is_some_and(|msg| msg.contains("Missing credentials for provider acme-remote")),
1341 "blank inline keys must not bypass credential checks"
1342 );
1343 }
1344
1345 #[test]
1346 fn slash_thinking_clamps_and_avoids_duplicate_history_for_non_reasoning_models() {
1347 let mut current = model_entry("ollama", "llama3.2", None, HashMap::new());
1348 current.auth_header = false;
1349 current.model.reasoning = false;
1350 let mut app = build_test_app(current.clone(), vec![current]);
1351
1352 let _ = app.handle_slash_command(SlashCommand::Thinking, "high");
1353 let _ = app.handle_slash_command(SlashCommand::Thinking, "high");
1354
1355 let agent_guard = app.agent.try_lock().expect("agent lock");
1356 assert_eq!(
1357 agent_guard.stream_options().thinking_level,
1358 Some(crate::model::ThinkingLevel::Off)
1359 );
1360 drop(agent_guard);
1361
1362 let session_guard = app.session.try_lock().expect("session lock");
1363 assert_eq!(session_guard.header.thinking_level.as_deref(), Some("off"));
1364 let thinking_changes = session_guard
1365 .entries_for_current_path()
1366 .iter()
1367 .filter(|entry| matches!(entry, crate::session::SessionEntry::ThinkingLevelChange(_)))
1368 .count();
1369 assert_eq!(
1370 thinking_changes, 1,
1371 "reapplying the same effective thinking level should not add duplicate history"
1372 );
1373 }
1374
1375 #[test]
1376 fn cycle_thinking_level_action_updates_runtime_and_session_state() {
1377 let current = model_entry("openai", "gpt-5.2", Some("old-key"), HashMap::new());
1378 let mut app = build_test_app(current.clone(), vec![current]);
1379
1380 app.handle_action(
1381 AppAction::CycleThinkingLevel,
1382 &KeyMsg::from_type(KeyType::ShiftTab),
1383 );
1384
1385 let agent_guard = app.agent.try_lock().expect("agent lock");
1386 assert_eq!(
1387 agent_guard.stream_options().thinking_level,
1388 Some(crate::model::ThinkingLevel::Minimal)
1389 );
1390 drop(agent_guard);
1391
1392 let session_guard = app.session.try_lock().expect("session lock");
1393 assert_eq!(
1394 session_guard.header.thinking_level.as_deref(),
1395 Some("minimal")
1396 );
1397 let thinking_changes = session_guard
1398 .entries_for_current_path()
1399 .iter()
1400 .filter(|entry| matches!(entry, crate::session::SessionEntry::ThinkingLevelChange(_)))
1401 .count();
1402 assert_eq!(thinking_changes, 1);
1403 drop(session_guard);
1404
1405 assert_eq!(
1406 app.status_message.as_deref(),
1407 Some("Thinking level: minimal")
1408 );
1409 }
1410
1411 #[test]
1412 fn cycle_thinking_level_action_reports_unsupported_models() {
1413 let mut current = model_entry("ollama", "llama3.2", None, HashMap::new());
1414 current.auth_header = false;
1415 current.model.reasoning = false;
1416 let mut app = build_test_app(current.clone(), vec![current]);
1417
1418 app.handle_action(
1419 AppAction::CycleThinkingLevel,
1420 &KeyMsg::from_type(KeyType::ShiftTab),
1421 );
1422
1423 let agent_guard = app.agent.try_lock().expect("agent lock");
1424 assert_eq!(agent_guard.stream_options().thinking_level, None);
1425 drop(agent_guard);
1426
1427 let session_guard = app.session.try_lock().expect("session lock");
1428 assert_eq!(session_guard.header.thinking_level, None);
1429 let thinking_changes = session_guard
1430 .entries_for_current_path()
1431 .iter()
1432 .filter(|entry| matches!(entry, crate::session::SessionEntry::ThinkingLevelChange(_)))
1433 .count();
1434 assert_eq!(thinking_changes, 0);
1435 drop(session_guard);
1436
1437 assert_eq!(
1438 app.status_message.as_deref(),
1439 Some("Current model does not support thinking")
1440 );
1441 }
1442
1443 #[test]
1444 fn cycle_thinking_level_action_does_not_persist_without_agent_lock() {
1445 let current = model_entry("openai", "gpt-5.2", Some("old-key"), HashMap::new());
1446 let mut app = build_test_app(current.clone(), vec![current]);
1447 let agent = Arc::clone(&app.agent);
1448 let _agent_guard = agent.try_lock().expect("agent lock");
1449
1450 app.handle_action(
1451 AppAction::CycleThinkingLevel,
1452 &KeyMsg::from_type(KeyType::ShiftTab),
1453 );
1454
1455 let session_guard = app.session.try_lock().expect("session lock");
1456 assert_eq!(session_guard.header.thinking_level, None);
1457 let thinking_changes = session_guard
1458 .entries_for_current_path()
1459 .iter()
1460 .filter(|entry| matches!(entry, crate::session::SessionEntry::ThinkingLevelChange(_)))
1461 .count();
1462 assert_eq!(thinking_changes, 0);
1463 drop(session_guard);
1464
1465 assert_eq!(app.status_message.as_deref(), Some("Agent busy; try again"));
1466 }
1467
1468 #[test]
1469 fn cycle_thinking_level_action_is_consumed_by_app() {
1470 let current = model_entry("openai", "gpt-5.2", Some("old-key"), HashMap::new());
1471 let app = build_test_app(current.clone(), vec![current]);
1472
1473 assert!(app.should_consume_action(AppAction::CycleThinkingLevel));
1474 }
1475
1476 #[test]
1477 fn double_escape_action_none_does_not_arm_or_trigger() {
1478 let current = model_entry("openai", "gpt-5.2", Some("old-key"), HashMap::new());
1479 let mut app = build_test_app(current.clone(), vec![current]);
1480 app.config.double_escape_action = Some("none".to_string());
1481
1482 let (triggered, cmd) = app.handle_double_escape_action();
1483 assert!(!triggered);
1484 assert!(cmd.is_none());
1485 assert!(app.last_escape_time.is_none());
1486
1487 let (triggered_again, cmd_again) = app.handle_double_escape_action();
1488 assert!(!triggered_again);
1489 assert!(cmd_again.is_none());
1490 assert!(app.last_escape_time.is_none());
1491 }
1492
1493 #[test]
1494 fn session_header_sync_updates_runtime_model_and_clamps_thinking() {
1495 let current = model_entry("openai", "gpt-5.2", Some("old-key"), HashMap::new());
1496 let mut next_headers = HashMap::new();
1497 next_headers.insert("x-provider-header".to_string(), "next".to_string());
1498 let mut next = model_entry("acme-local", "plain-model", None, next_headers.clone());
1499 next.auth_header = false;
1500 next.model.reasoning = false;
1501 let mut app = build_test_app(current.clone(), vec![current, next.clone()]);
1502
1503 {
1504 let mut guard = app.agent.try_lock().expect("agent lock");
1505 guard.stream_options_mut().api_key = Some("stale-key".to_string());
1506 let _ = guard
1507 .stream_options_mut()
1508 .headers
1509 .insert("x-stale".to_string(), "old".to_string());
1510 guard.stream_options_mut().thinking_level = Some(crate::model::ThinkingLevel::High);
1511 }
1512 {
1513 let mut guard = app.session.try_lock().expect("session lock");
1514 guard.header.provider = Some(next.model.provider.clone());
1515 guard.header.model_id = Some(next.model.id);
1516 guard.header.thinking_level = Some(crate::model::ThinkingLevel::High.to_string());
1517 }
1518
1519 app.sync_runtime_selection_from_session_header()
1520 .expect("sync runtime selection");
1521
1522 let agent_guard = app.agent.try_lock().expect("agent lock");
1523 assert_eq!(agent_guard.provider().name(), "acme-local");
1524 assert_eq!(agent_guard.provider().model_id(), "plain-model");
1525 assert_eq!(agent_guard.stream_options().api_key, None);
1526 assert_eq!(agent_guard.stream_options().headers, next_headers);
1527 assert_eq!(
1528 agent_guard.stream_options().thinking_level,
1529 Some(crate::model::ThinkingLevel::Off)
1530 );
1531 drop(agent_guard);
1532
1533 assert_eq!(app.model, "acme-local/plain-model");
1534 assert_eq!(app.model_entry.model.provider, "acme-local");
1535 assert_eq!(app.model_entry.model.id, "plain-model");
1536 let shared_guard = app.model_entry_shared.lock().expect("shared model lock");
1537 assert_eq!(shared_guard.model.provider, "acme-local");
1538 assert_eq!(shared_guard.model.id, "plain-model");
1539 drop(shared_guard);
1540
1541 let session_guard = app.session.try_lock().expect("session lock");
1542 assert_eq!(session_guard.header.thinking_level.as_deref(), Some("off"));
1543 let thinking_changes = session_guard
1544 .entries_for_current_path()
1545 .iter()
1546 .filter(|entry| matches!(entry, crate::session::SessionEntry::ThinkingLevelChange(_)))
1547 .count();
1548 assert_eq!(thinking_changes, 1);
1549 }
1550
1551 #[test]
1552 fn session_header_sync_rejects_missing_credentials_without_switching() {
1553 let current = model_entry("openai", "gpt-4o-mini", Some("old-key"), HashMap::new());
1554 let mut requires_creds = model_entry("acme-remote", "cloud-model", None, HashMap::new());
1555 requires_creds.auth_header = true;
1556 let mut app = build_test_app(current.clone(), vec![current, requires_creds]);
1557
1558 {
1559 let mut guard = app.session.try_lock().expect("session lock");
1560 guard.header.provider = Some("acme-remote".to_string());
1561 guard.header.model_id = Some("cloud-model".to_string());
1562 }
1563
1564 let err = app
1565 .sync_runtime_selection_from_session_header()
1566 .expect_err("missing credentials should fail closed");
1567 assert_eq!(
1568 err,
1569 "Missing credentials for provider acme-remote. Run /login acme-remote."
1570 );
1571 assert_eq!(app.model, "openai/gpt-4o-mini");
1572 assert_eq!(app.model_entry.model.provider, "openai");
1573 assert_eq!(app.model_entry.model.id, "gpt-4o-mini");
1574 }
1575
1576 #[test]
1577 fn session_header_sync_ignores_incomplete_model_header_and_keeps_current_runtime() {
1578 let mut current = model_entry("acme-local", "plain-model", None, HashMap::new());
1579 current.auth_header = false;
1580 current.model.reasoning = false;
1581 let mut app = build_test_app(current.clone(), vec![current]);
1582
1583 {
1584 let mut guard = app.agent.try_lock().expect("agent lock");
1585 guard.stream_options_mut().thinking_level = Some(crate::model::ThinkingLevel::High);
1586 }
1587 {
1588 let mut guard = app.session.try_lock().expect("session lock");
1589 guard.header.provider = Some("partial-provider".to_string());
1590 guard.header.model_id = None;
1591 guard.header.thinking_level = Some(crate::model::ThinkingLevel::High.to_string());
1592 }
1593
1594 app.sync_runtime_selection_from_session_header()
1595 .expect("incomplete headers should not block runtime sync");
1596
1597 let agent_guard = app.agent.try_lock().expect("agent lock");
1598 assert_eq!(agent_guard.provider().name(), "acme-local");
1599 assert_eq!(agent_guard.provider().model_id(), "plain-model");
1600 assert_eq!(
1601 agent_guard.stream_options().thinking_level,
1602 Some(crate::model::ThinkingLevel::Off)
1603 );
1604 drop(agent_guard);
1605
1606 assert_eq!(app.model, "acme-local/plain-model");
1607 assert_eq!(app.model_entry.model.provider, "acme-local");
1608 assert_eq!(app.model_entry.model.id, "plain-model");
1609
1610 let session_guard = app.session.try_lock().expect("session lock");
1611 assert_eq!(
1612 session_guard.header.provider.as_deref(),
1613 Some("partial-provider")
1614 );
1615 assert_eq!(session_guard.header.model_id, None);
1616 assert_eq!(session_guard.header.thinking_level.as_deref(), Some("off"));
1617 }
1618
1619 #[test]
1620 fn custom_extension_key_handler_queues_rune_input_when_active() {
1621 let current = model_entry("openai", "gpt-4o-mini", Some("old-key"), HashMap::new());
1622 let mut app = build_test_app(current.clone(), vec![current]);
1623 app.extension_custom_active = true;
1624
1625 let consumed = app.handle_custom_extension_key(&KeyMsg::from_char('w'));
1626 assert!(consumed, "custom overlay should consume key input");
1627 assert_eq!(
1628 app.extension_custom_key_queue.pop_front().as_deref(),
1629 Some("w")
1630 );
1631 }
1632
1633 #[test]
1634 fn custom_extension_key_handler_preserves_ctrl_c_for_global_exit() {
1635 let current = model_entry("openai", "gpt-4o-mini", Some("old-key"), HashMap::new());
1636 let mut app = build_test_app(current.clone(), vec![current]);
1637 app.extension_custom_active = true;
1638
1639 let consumed = app.handle_custom_extension_key(&KeyMsg::from_type(KeyType::CtrlC));
1640 assert!(
1641 !consumed,
1642 "Ctrl+C should remain available for normal global handling"
1643 );
1644 assert!(app.extension_custom_key_queue.is_empty());
1645 }
1646
1647 #[test]
1648 fn quit_cmd_schedules_shutdown_when_event_queue_is_full() {
1649 let current = model_entry("openai", "gpt-4o-mini", Some("old-key"), HashMap::new());
1650 let (mut app, mut event_rx) = build_test_app_with_event_rx(current.clone(), vec![current]);
1651 app.event_tx
1652 .try_send(PiMsg::System("busy".to_string()))
1653 .expect("fill bounded event channel");
1654
1655 let _ = app.quit_cmd();
1656
1657 let (first, second) = runtime().block_on(async {
1658 let cx = asupersync::Cx::for_request();
1659 let first = event_rx.recv(&cx).await.expect("first queued message");
1660 let second = event_rx.recv(&cx).await.expect("shutdown message");
1661 (first, second)
1662 });
1663
1664 assert!(matches!(first, PiMsg::System(text) if text == "busy"));
1665 assert!(matches!(second, PiMsg::UiShutdown));
1666 }
1667}