1use std::collections::HashSet;
7use std::path::{Path, PathBuf};
8
9use anyhow::{Result, bail};
10use chrono::{Datelike, Local};
11use glob::Pattern;
12use thiserror::Error;
13
14use crate::auth::AuthStorage;
15use crate::cli;
16use crate::config::Config;
17use crate::model::{self, AssistantMessage, ContentBlock, ImageContent, TextContent};
18use crate::models::{
19 ModelEntry, ModelRegistry, default_models_path, model_entry_is_ready,
20 model_requires_configured_credential, normalize_api_key_opt,
21};
22use crate::provider::{StreamOptions, ThinkingBudgets};
23use crate::provider_metadata::{
24 canonical_provider_id, provider_ids_match, split_provider_model_spec,
25};
26use crate::session::Session;
27use crate::tools::process_file_arguments;
28
29#[derive(Debug, Clone)]
30pub struct InitialMessage {
31 pub text: String,
32 pub images: Vec<ImageContent>,
33}
34
35#[derive(Debug, Clone)]
36pub struct ScopedModel {
37 pub model: ModelEntry,
38 pub thinking_level: Option<model::ThinkingLevel>,
39}
40
41#[derive(Debug, Clone)]
42struct ParsedModelResult {
43 model: Option<ModelEntry>,
44 thinking_level: Option<model::ThinkingLevel>,
45 warning: Option<String>,
46}
47
48#[derive(Debug, Clone)]
49pub struct ModelSelection {
50 pub model_entry: ModelEntry,
51 pub thinking_level: model::ThinkingLevel,
52 pub scoped_models: Vec<ScopedModel>,
53 pub fallback_message: Option<String>,
54}
55
56#[derive(Debug, Error)]
57pub enum StartupError {
58 #[error("No models available. Set API keys in environment variables or create {models_path}")]
59 NoModelsAvailable { models_path: PathBuf },
60 #[error("No API key found for provider {provider}. Set env var or use --api-key.")]
61 MissingApiKey { provider: String },
62}
63
64#[derive(Debug, Clone)]
65struct ContextFile {
66 path: String,
67 content: String,
68}
69
70struct RestoreResult {
71 model: Option<ModelEntry>,
72 fallback_message: Option<String>,
73 deferred_warning: Option<String>,
74}
75
76pub fn apply_piped_stdin(cli: &mut cli::Cli, stdin_content: Option<String>) {
77 if let Some(stdin_content) = stdin_content {
78 let stdin_content = stdin_content.trim();
80 if stdin_content.is_empty() {
81 return;
82 }
83 cli.print = true;
84 cli.args.insert(0, stdin_content.to_string());
85 }
86}
87
88#[allow(clippy::missing_const_for_fn)]
89pub fn normalize_cli(cli: &mut cli::Cli) {
90 if cli.print {
91 cli.no_session = true;
92 }
93
94 if let Some(provider) = &mut cli.provider {
95 *provider = provider.to_ascii_lowercase();
96 }
97}
98
99pub fn validate_rpc_args(cli: &cli::Cli) -> Result<()> {
100 if cli.mode.as_deref() == Some("rpc") && !cli.file_args().is_empty() {
101 bail!("Error: @file arguments are not supported in RPC mode");
102 }
103 Ok(())
104}
105
106pub fn prepare_initial_message(
107 cwd: &Path,
108 file_args: &[String],
109 messages: &mut Vec<String>,
110 auto_resize_images: bool,
111) -> Result<Option<InitialMessage>> {
112 if file_args.is_empty() {
113 return Ok(None);
114 }
115
116 let processed = process_file_arguments(file_args, cwd, auto_resize_images)?;
117 let mut initial_message = processed.text;
118 let has_message = !messages.is_empty();
119 if has_message {
120 initial_message.push_str(&messages.remove(0));
121 }
122
123 if initial_message.is_empty() && processed.images.is_empty() && !has_message {
124 return Ok(None);
125 }
126
127 Ok(Some(InitialMessage {
128 text: initial_message,
129 images: processed.images,
130 }))
131}
132
133pub fn build_initial_content(initial: &InitialMessage) -> Vec<ContentBlock> {
134 let mut content = Vec::new();
135 content.push(ContentBlock::Text(TextContent::new(initial.text.clone())));
136 for image in &initial.images {
137 content.push(ContentBlock::Image(image.clone()));
138 }
139 content
140}
141
142#[allow(clippy::too_many_arguments)]
143pub fn build_system_prompt(
144 cli: &cli::Cli,
145 cwd: &Path,
146 enabled_tools: &[&str],
147 skills_prompt: Option<&str>,
148 global_dir: &Path,
149 package_dir: &Path,
150 test_mode: bool,
151 include_cwd: bool,
152) -> Result<String> {
153 use std::fmt::Write as _;
154
155 let custom_prompt = resolve_prompt_input(cli.system_prompt.as_deref(), "system prompt")?;
156 let append_prompt =
157 resolve_prompt_input(cli.append_system_prompt.as_deref(), "append system prompt")?;
158 let context_files = load_project_context_files(cwd, global_dir);
159
160 let mut prompt =
161 custom_prompt.unwrap_or_else(|| default_system_prompt(enabled_tools, package_dir));
162
163 if let Some(append_prompt) = append_prompt {
164 prompt.push_str("\n\n");
165 prompt.push_str(&append_prompt);
166 }
167
168 if !context_files.is_empty() {
169 prompt.push_str("\n\n# Project Context\n\n");
170 prompt.push_str("Project-specific instructions and guidelines:\n\n");
171 for file in &context_files {
172 let _ = write!(prompt, "## {}\n\n{}\n\n", file.path, file.content);
173 }
174 }
175
176 if let Some(skills_prompt) = skills_prompt {
177 prompt.push_str(skills_prompt);
178 }
179
180 let date_time = if test_mode {
181 "<TIMESTAMP>".to_string()
182 } else {
183 format_current_datetime()
184 };
185 let _ = write!(prompt, "\nCurrent date and time: {date_time}");
186 if include_cwd {
187 let cwd_display = if test_mode {
188 "<CWD>".to_string()
189 } else {
190 cwd.display().to_string()
191 };
192 let _ = write!(prompt, "\nCurrent working directory: {cwd_display}");
193 }
194
195 Ok(prompt)
196}
197
198fn resolve_prompt_input(input: Option<&str>, description: &str) -> Result<Option<String>> {
199 let Some(value) = input else {
200 return Ok(None);
201 };
202
203 let path = Path::new(value);
204 if path.exists() {
205 let content = std::fs::read_to_string(path)
206 .map_err(|err| anyhow::anyhow!("Could not read {description} file {value}: {err}"))?;
207 Ok(Some(content))
208 } else {
209 Ok(Some(value.to_string()))
210 }
211}
212
213fn default_system_prompt(enabled_tools: &[&str], package_dir: &Path) -> String {
214 let tool_descriptions = [
215 ("read", "Read file contents"),
216 ("bash", "Execute bash commands (ls, grep, find, etc.)"),
217 (
218 "edit",
219 "Make surgical edits to files (find exact text and replace)",
220 ),
221 ("write", "Create or overwrite files"),
222 (
223 "grep",
224 "Search file contents for patterns (respects .gitignore, supports hashline=true for use with hashline_edit)",
225 ),
226 ("find", "Find files by glob pattern (respects .gitignore)"),
227 ("ls", "List directory contents"),
228 (
229 "hashline_edit",
230 "Apply precise file edits using LINE#HASH tags from read or grep with hashline=true",
231 ),
232 ];
233
234 let mut tools = Vec::new();
235 for tool in enabled_tools {
236 if let Some((_, description)) = tool_descriptions.iter().find(|(name, _)| name == tool) {
237 tools.push(format!("- {tool}: {description}"));
238 }
239 }
240
241 let tools_list = if tools.is_empty() {
242 "(none)".to_string()
243 } else {
244 tools.join("\n")
245 };
246
247 let has_tool = |name: &str| enabled_tools.contains(&name);
248 let has_bash = has_tool("bash");
249 let has_edit = has_tool("edit");
250 let has_write = has_tool("write");
251 let has_grep = has_tool("grep");
252 let has_find = has_tool("find");
253 let has_ls = has_tool("ls");
254 let has_read = has_tool("read");
255 let has_hashline_edit = has_tool("hashline_edit");
256
257 let mut guidelines_list = Vec::new();
258 if has_bash && !has_grep && !has_find && !has_ls {
259 guidelines_list.push("Use bash for file operations like ls, rg, find");
260 } else if has_bash && (has_grep || has_find || has_ls) {
261 guidelines_list.push(
262 "Prefer grep/find/ls tools over bash for file exploration (faster, respects .gitignore)",
263 );
264 }
265
266 if has_read && has_edit {
267 guidelines_list.push(
268 "Use read to examine files before editing. You must use this tool instead of cat or sed.",
269 );
270 }
271 if has_edit {
272 guidelines_list.push("Use edit for precise changes (old text must match exactly)");
273 }
274 if has_hashline_edit && has_read {
275 guidelines_list.push(
276 "For large files or complex multi-site edits, use read or grep with hashline=true to get LINE#HASH tags, then use hashline_edit for precise line-addressed edits",
277 );
278 }
279 if has_write {
280 guidelines_list.push("Use write only for new files or complete rewrites");
281 }
282 if has_edit || has_write {
283 guidelines_list.push(
284 "When summarizing your actions, output plain text directly - do NOT use cat or bash to display what you did",
285 );
286 }
287
288 guidelines_list.push("Be concise in your responses");
289 guidelines_list.push("Show file paths clearly when working with files");
290
291 let guidelines = guidelines_list
292 .iter()
293 .map(|g| format!("- {g}"))
294 .collect::<Vec<_>>()
295 .join("\n");
296
297 let readme_path = package_dir.join("README.md").display().to_string();
298 let docs_path = package_dir.join("docs").display().to_string();
299 let examples_path = package_dir.join("examples").display().to_string();
300
301 format!(
302 "You are an expert coding assistant operating inside pi, a coding agent harness. You help users by reading files, executing commands, editing code, and writing new files.\n\nAvailable tools:\n{tools_list}\n\nIn addition to the tools above, you may have access to other custom tools depending on the project.\n\nGuidelines:\n{guidelines}\n\nPi documentation (read only when the user asks about pi itself, its SDK, extensions, themes, skills, or TUI):\n- Main documentation: {readme_path}\n- Additional docs: {docs_path}\n- Examples: {examples_path} (extensions, custom tools, SDK)\n- When asked about: extensions (docs/extensions.md, examples/extensions/), themes (docs/themes.md), skills (docs/skills.md), prompt templates (docs/prompt-templates.md), TUI components (docs/tui.md), keybindings (docs/keybindings.md), SDK integrations (docs/sdk.md), custom providers (docs/custom-provider.md), adding models (docs/models.md), pi packages (docs/packages.md)\n- When working on pi topics, read the docs and examples, and follow .md cross-references before implementing\n- Always read pi .md files completely and follow links to related docs (e.g., tui.md for TUI API details)"
303 )
304}
305
306fn load_project_context_files(cwd: &Path, global_dir: &Path) -> Vec<ContextFile> {
307 let mut context_files = Vec::new();
308 let mut seen = HashSet::new();
309
310 if let Some(global) = load_context_file_from_dir(global_dir) {
311 seen.insert(global.path.clone());
312 context_files.push(global);
313 }
314
315 let mut ancestor_files = Vec::new();
316 let mut current = cwd.to_path_buf();
317
318 loop {
319 if let Some(context) = load_context_file_from_dir(¤t) {
320 if seen.insert(context.path.clone()) {
321 ancestor_files.push(context);
322 }
323 }
324
325 if !current.pop() {
326 break;
327 }
328 }
329
330 ancestor_files.reverse();
331 context_files.extend(ancestor_files);
332 context_files
333}
334
335fn load_context_file_from_dir(dir: &Path) -> Option<ContextFile> {
336 let candidates = ["AGENTS.md", "CLAUDE.md"];
337 for filename in candidates {
338 let path = dir.join(filename);
339 if path.exists() {
340 match std::fs::read_to_string(&path) {
341 Ok(content) => {
342 return Some(ContextFile {
343 path: path.display().to_string(),
344 content,
345 });
346 }
347 Err(err) => {
348 eprintln!("Warning: Could not read {}: {err}", path.display());
349 }
350 }
351 }
352 }
353 None
354}
355
356fn format_current_datetime() -> String {
357 let now = Local::now();
358 let date = format!(
359 "{}, {} {}, {}",
360 now.format("%A"),
361 now.format("%B"),
362 now.day(),
363 now.year()
364 );
365 let time = format!("{} {}", now.format("%I:%M:%S %p"), now.format("%Z"));
366 format!("{date}, {time}")
367}
368
369#[allow(clippy::too_many_lines)]
370pub fn select_model_and_thinking(
371 cli: &cli::Cli,
372 config: &Config,
373 session: &Session,
374 registry: &ModelRegistry,
375 scoped_models: &[ScopedModel],
376 global_dir: &Path,
377) -> Result<ModelSelection> {
378 let is_continuing = cli.r#continue || cli.resume || cli.session.is_some();
379 let mut selected_model: Option<ModelEntry> = None;
380 let mut scoped_thinking: Option<model::ThinkingLevel> = None;
381 let mut fallback_message = None;
382 let mut deferred_restore_warning = None;
383
384 if let (Some(provider), Some(model_id)) = (cli.provider.as_deref(), cli.model.as_deref()) {
385 let found = registry
386 .find(provider, model_id)
387 .or_else(|| crate::models::ad_hoc_model_entry(provider, model_id));
388 if found.is_none() {
389 bail!("Model {provider}/{model_id} not found");
390 }
391 selected_model = found;
392 } else if let Some(provider) = cli.provider.as_deref() {
393 let candidates: Vec<ModelEntry> = registry
394 .models()
395 .iter()
396 .filter(|m| provider_ids_match(&m.model.provider, provider))
397 .cloned()
398 .collect();
399 if candidates.is_empty() {
400 bail!("No models available for provider {provider}");
401 }
402 let ready_candidates: Vec<ModelEntry> = candidates
403 .iter()
404 .filter(|entry| model_entry_is_ready(entry))
405 .cloned()
406 .collect();
407 let preferred_pool = if ready_candidates.is_empty() {
408 candidates.as_slice()
409 } else {
410 ready_candidates.as_slice()
411 };
412 selected_model = config
413 .default_model
414 .as_deref()
415 .and_then(|default_model| registry.find(provider, default_model))
416 .filter(|found| {
417 preferred_pool.iter().any(|candidate| {
418 candidate.model.id.eq_ignore_ascii_case(&found.model.id)
419 && provider_ids_match(&candidate.model.provider, &found.model.provider)
420 })
421 })
422 .or_else(|| Some(default_model_from_candidates(preferred_pool)));
423 } else if let Some(model_id) = cli.model.as_deref() {
424 if let Some((provider, scoped_model_id)) = split_provider_model_spec(model_id) {
425 selected_model = registry
426 .find(provider, scoped_model_id)
427 .or_else(|| crate::models::ad_hoc_model_entry(provider, scoped_model_id));
428 }
429
430 if selected_model.is_none() {
431 let matches: Vec<ModelEntry> = registry
432 .models()
433 .iter()
434 .filter(|m| m.model.id.eq_ignore_ascii_case(model_id))
435 .cloned()
436 .collect();
437 if matches.is_empty() {
438 bail!("Model {model_id} not found");
439 }
440 if let Some(default_provider) = config.default_provider.as_deref() {
441 if let Some(found) = matches
442 .iter()
443 .find(|m| provider_ids_match(&m.model.provider, default_provider))
444 {
445 selected_model = Some(found.clone());
446 }
447 }
448 if selected_model.is_none() {
449 selected_model = select_preferred_exact_id_match(&matches);
450 }
451 }
452 } else if !scoped_models.is_empty() && !is_continuing {
453 if let (Some(default_provider), Some(default_model)) = (
454 config.default_provider.as_deref(),
455 config.default_model.as_deref(),
456 ) {
457 if let Some(found) = scoped_models.iter().find(|sm| {
458 provider_ids_match(&sm.model.model.provider, default_provider)
459 && sm.model.model.id.eq_ignore_ascii_case(default_model)
460 }) {
461 selected_model = Some(found.model.clone());
462 if cli.thinking.is_none() {
463 scoped_thinking = found.thinking_level;
464 }
465 }
466 }
467 if selected_model.is_none() {
468 let first = &scoped_models[0];
469 selected_model = Some(first.model.clone());
470 if cli.thinking.is_none() {
471 scoped_thinking = first.thinking_level;
472 }
473 }
474 }
475
476 if selected_model.is_none() {
477 if let Some((provider, model_id)) = model_from_session_state(session) {
478 let restore = restore_model_from_session(&provider, &model_id, None, registry);
479 selected_model = restore.model;
480 fallback_message = restore.fallback_message;
481 deferred_restore_warning = restore.deferred_warning;
482 }
483 }
484
485 if selected_model.is_none() {
486 if let (Some(default_provider), Some(default_model)) = (
487 config.default_provider.as_deref(),
488 config.default_model.as_deref(),
489 ) {
490 if let Some(found) = registry.find(default_provider, default_model) {
491 selected_model = Some(found);
492 }
493 }
494 }
495
496 if selected_model.is_none() {
497 let available = registry.get_available();
498 if !available.is_empty() {
499 let fallback = default_model_from_available(&available);
500 if fallback_message.is_none() {
501 if let Some(warning) = deferred_restore_warning.take() {
502 fallback_message = Some(format!(
503 "{warning} Using {}/{}.",
504 fallback.model.provider, fallback.model.id
505 ));
506 }
507 }
508 selected_model = Some(fallback);
509 }
510 }
511
512 let explicit_model_selection = cli.provider.is_some() || cli.model.is_some();
516 let missing_creds = if explicit_model_selection {
517 None
518 } else {
519 selected_model.as_ref().and_then(|entry| {
520 if model_entry_is_ready(entry) {
521 None
522 } else {
523 Some((entry.model.provider.clone(), entry.model.id.clone()))
524 }
525 })
526 };
527 if let Some((missing_provider, missing_model_id)) = missing_creds {
528 let available = registry.get_available();
529 if !available.is_empty() {
530 let fallback = default_model_from_available(&available);
531 if fallback_message.is_none() {
532 fallback_message = Some(format!(
533 "Missing credentials for {missing_provider}/{missing_model_id}. Using {}/{} based on detected keys.",
534 fallback.model.provider, fallback.model.id
535 ));
536 }
537 selected_model = Some(fallback);
538 } else if !registry.models().is_empty() {
539 let fallback = default_model_from_catalog(registry.models());
542 if fallback_message.is_none() {
543 fallback_message = Some(format!(
544 "Missing credentials for {missing_provider}/{missing_model_id}. Defaulting to {}/{} for setup.",
545 fallback.model.provider, fallback.model.id
546 ));
547 }
548 selected_model = Some(fallback);
549 }
550 }
551
552 if selected_model.is_none() && !registry.models().is_empty() {
556 let fallback = default_model_from_catalog(registry.models());
557 if fallback_message.is_none() {
558 if let Some(warning) = deferred_restore_warning.take() {
559 fallback_message = Some(format!(
560 "{warning} Defaulting to {}/{} for setup.",
561 fallback.model.provider, fallback.model.id
562 ));
563 }
564 }
565 selected_model = Some(fallback);
566 }
567
568 let Some(model_entry) = selected_model else {
569 let models_path = default_models_path(global_dir);
570 return Err(StartupError::NoModelsAvailable { models_path }.into());
571 };
572
573 if let Some(warning) = deferred_restore_warning.take() {
574 fallback_message = Some(match fallback_message.take() {
575 Some(message) => format!("{warning} {message}"),
576 None => format!(
577 "{warning} Using {}/{}.",
578 model_entry.model.provider, model_entry.model.id
579 ),
580 });
581 }
582
583 let mut thinking_level: Option<model::ThinkingLevel> = None;
584
585 if let Some(cli_thinking) = cli.thinking.as_deref() {
586 thinking_level = Some(parse_thinking_level(cli_thinking)?);
587 } else if scoped_thinking.is_some() {
588 thinking_level = scoped_thinking;
589 } else if is_continuing {
590 if let Some(saved) = thinking_level_from_session_state(session) {
591 thinking_level = Some(saved);
592 }
593 }
594
595 if thinking_level.is_none() {
596 thinking_level = config
597 .default_thinking_level
598 .as_deref()
599 .and_then(parse_thinking_level_opt);
600 }
601
602 let thinking_level =
603 model_entry.clamp_thinking_level(thinking_level.unwrap_or(model::ThinkingLevel::XHigh));
604
605 Ok(ModelSelection {
606 model_entry,
607 thinking_level,
608 scoped_models: scoped_models.to_vec(),
609 fallback_message,
610 })
611}
612
613fn parse_thinking_level(value: &str) -> Result<model::ThinkingLevel> {
614 value
615 .parse()
616 .map_err(|err| anyhow::anyhow!("Invalid thinking level \"{value}\": {err}"))
617}
618
619fn parse_thinking_level_opt(value: &str) -> Option<model::ThinkingLevel> {
620 value.parse().ok()
621}
622
623fn last_model_from_session(session: &Session) -> Option<(String, String)> {
624 session.effective_model_for_current_path()
625}
626
627fn last_thinking_level(session: &Session) -> Option<model::ThinkingLevel> {
628 session
629 .effective_thinking_level_for_current_path()
630 .as_deref()
631 .and_then(parse_thinking_level_opt)
632}
633
634fn model_from_session_state(session: &Session) -> Option<(String, String)> {
635 last_model_from_session(session)
636}
637
638fn thinking_level_from_session_state(session: &Session) -> Option<model::ThinkingLevel> {
639 last_thinking_level(session)
640}
641
642pub fn update_session_for_selection(session: &mut Session, selection: &ModelSelection) {
643 let previous_model = model_from_session_state(session);
644 let previous_thinking = thinking_level_from_session_state(session);
645 let (stored_provider, stored_model_id, model_changed) = match previous_model {
646 Some((provider, model_id))
647 if provider_ids_match(&provider, &selection.model_entry.model.provider)
648 && model_id.eq_ignore_ascii_case(&selection.model_entry.model.id) =>
649 {
650 (provider, model_id, false)
651 }
652 _ => (
653 selection.model_entry.model.provider.clone(),
654 selection.model_entry.model.id.clone(),
655 true,
656 ),
657 };
658
659 session.set_model_header(
660 Some(stored_provider.clone()),
661 Some(stored_model_id.clone()),
662 Some(selection.thinking_level.to_string()),
663 );
664
665 if model_changed {
666 session.append_model_change(stored_provider, stored_model_id);
667 }
668
669 let thinking_changed = previous_thinking != Some(selection.thinking_level);
670
671 if thinking_changed {
672 session.append_thinking_level_change(selection.thinking_level.to_string());
673 }
674}
675
676fn restore_model_from_session(
677 saved_provider: &str,
678 saved_model_id: &str,
679 current_model: Option<ModelEntry>,
680 registry: &ModelRegistry,
681) -> RestoreResult {
682 let restored = registry
683 .find(saved_provider, saved_model_id)
684 .or_else(|| crate::models::ad_hoc_model_entry(saved_provider, saved_model_id));
685
686 if restored.is_some() {
687 return RestoreResult {
688 model: restored,
689 fallback_message: None,
690 deferred_warning: None,
691 };
692 }
693
694 let reason = "model no longer exists";
695
696 if let Some(current) = current_model {
697 return RestoreResult {
698 model: Some(current.clone()),
699 fallback_message: Some(format!(
700 "Could not restore model {saved_provider}/{saved_model_id} ({reason}). Using {}/{}.",
701 current.model.provider, current.model.id
702 )),
703 deferred_warning: None,
704 };
705 }
706
707 let available = registry.get_available();
708 if !available.is_empty() {
709 let fallback = default_model_from_available(&available);
710 return RestoreResult {
711 model: Some(fallback.clone()),
712 fallback_message: Some(format!(
713 "Could not restore model {saved_provider}/{saved_model_id} ({reason}). Using {}/{}.",
714 fallback.model.provider, fallback.model.id
715 )),
716 deferred_warning: None,
717 };
718 }
719
720 RestoreResult {
721 model: None,
722 fallback_message: None,
723 deferred_warning: Some(format!(
724 "Could not restore model {saved_provider}/{saved_model_id} ({reason})."
725 )),
726 }
727}
728
729fn default_model_from_available(available: &[ModelEntry]) -> ModelEntry {
730 default_model_from_candidates(available)
731}
732
733fn default_model_from_catalog(models: &[ModelEntry]) -> ModelEntry {
734 default_model_from_candidates(models)
735}
736
737pub fn bootstrap_model_entry(registry: &ModelRegistry) -> Option<ModelEntry> {
738 let available = registry.get_available();
739 if !available.is_empty() {
740 return Some(default_model_from_available(&available));
741 }
742
743 (!registry.models().is_empty()).then(|| default_model_from_catalog(registry.models()))
744}
745
746fn select_preferred_exact_id_match(candidates: &[ModelEntry]) -> Option<ModelEntry> {
747 if candidates.is_empty() {
748 return None;
749 }
750
751 let ready_candidates: Vec<ModelEntry> = candidates
752 .iter()
753 .filter(|entry| model_entry_is_ready(entry))
754 .cloned()
755 .collect();
756 let preferred_pool = if ready_candidates.is_empty() {
757 candidates
758 } else {
759 ready_candidates.as_slice()
760 };
761
762 Some(default_model_from_candidates(preferred_pool))
763}
764
765fn default_model_from_candidates(candidates: &[ModelEntry]) -> ModelEntry {
766 let defaults = [
767 ("openai-codex", "gpt-5.4"),
769 ("openai-codex", "gpt-5.3-codex"),
770 ("openai-codex", "gpt-5.2-codex"),
771 ("openai-codex", "gpt-5.1-codex-max"),
772 ("openai", "gpt-5.4"),
774 ("openai", "gpt-5.3-codex"),
775 ("openai", "gpt-5.2-codex"),
776 ("openai", "gpt-5.1-codex"),
777 ("amazon-bedrock", "us.anthropic.claude-opus-4-20250514-v1:0"),
778 ("anthropic", "claude-opus-4-5"),
779 ("azure-openai-responses", "gpt-5.2"),
780 ("google", "gemini-2.5-pro"),
781 ("google-gemini-cli", "gemini-2.5-pro"),
782 ("google-antigravity", "gemini-3-pro-high"),
783 ("google-vertex", "gemini-3-pro-preview"),
784 ("github-copilot", "gpt-4o"),
785 ("openrouter", "openai/gpt-5.1-codex"),
786 ("vercel-ai-gateway", "anthropic/claude-opus-4.5"),
787 ("xai", "grok-4-fast-non-reasoning"),
788 ("groq", "openai/gpt-oss-120b"),
789 ("cerebras", "zai-glm-4.6"),
790 ("zai", "glm-4.6"),
791 ("mistral", "devstral-medium-latest"),
792 ("minimax", "MiniMax-M2.5"),
793 ("minimax-cn", "MiniMax-M2.5"),
794 ("huggingface", "moonshotai/Kimi-K2.5"),
795 ("opencode", "claude-opus-4-6"),
796 ("kimi-coding", "kimi-k2-thinking"),
797 ];
798
799 let canonical = |provider: &str| {
800 canonical_provider_id(provider)
801 .unwrap_or(provider)
802 .to_ascii_lowercase()
803 };
804
805 for (provider, model_id) in defaults {
806 if let Some(found) = candidates.iter().find(|m| {
807 canonical(&m.model.provider) == canonical(provider)
808 && m.model.id.eq_ignore_ascii_case(model_id)
809 }) {
810 return found.clone();
811 }
812 }
813
814 candidates[0].clone()
815}
816
817pub fn resolve_api_key(
818 auth: &AuthStorage,
819 cli: &cli::Cli,
820 entry: &ModelEntry,
821) -> Result<Option<String>> {
822 let key = normalize_api_key_opt(cli.api_key.clone())
823 .or_else(|| normalize_api_key_opt(auth.resolve_api_key(&entry.model.provider, None)))
824 .or_else(|| normalize_api_key_opt(entry.api_key.clone()));
825
826 if model_requires_configured_credential(entry) && key.is_none() {
827 return Err(StartupError::MissingApiKey {
828 provider: entry.model.provider.clone(),
829 }
830 .into());
831 }
832
833 Ok(key)
834}
835
836pub fn build_stream_options(
837 config: &Config,
838 api_key: Option<String>,
839 selection: &ModelSelection,
840 session: &Session,
841) -> StreamOptions {
842 let mut options = StreamOptions {
843 api_key,
844 headers: selection.model_entry.headers.clone(),
845 session_id: Some(session.header.id.clone()),
846 ..Default::default()
847 };
848
849 options.thinking_level = Some(selection.thinking_level);
850
851 if let Some(budgets) = &config.thinking_budgets {
852 let defaults = ThinkingBudgets::default();
853 options.thinking_budgets = Some(ThinkingBudgets {
854 minimal: budgets.minimal.unwrap_or(defaults.minimal),
855 low: budgets.low.unwrap_or(defaults.low),
856 medium: budgets.medium.unwrap_or(defaults.medium),
857 high: budgets.high.unwrap_or(defaults.high),
858 xhigh: budgets.xhigh.unwrap_or(defaults.xhigh),
859 });
860 }
861
862 options
863}
864
865pub fn parse_models_arg(models: &str) -> Vec<String> {
868 models
869 .split(',')
870 .map(str::trim)
871 .filter(|s| !s.is_empty())
872 .map(ToString::to_string)
873 .collect()
874}
875
876pub fn resolve_model_scope(
877 patterns: &[String],
878 registry: &ModelRegistry,
879 allow_missing_keys: bool,
880) -> Vec<ScopedModel> {
881 let available_models = if allow_missing_keys {
882 registry.models().to_vec()
883 } else {
884 registry.get_available()
885 };
886
887 let mut scoped_models: Vec<ScopedModel> = Vec::new();
888
889 for pattern in patterns {
890 if pattern.contains('*') || pattern.contains('?') || pattern.contains('[') {
891 let mut glob_pattern = pattern.as_str();
892 let mut thinking_level = None;
893 if let Some((prefix, suffix)) = pattern.rsplit_once(':') {
894 if let Some(parsed) = parse_thinking_level_opt(suffix) {
895 thinking_level = Some(parsed);
896 glob_pattern = prefix;
897 }
898 }
899
900 let glob = match Pattern::new(&glob_pattern.to_lowercase()) {
901 Ok(glob) => glob,
902 Err(err) => {
903 eprintln!("Warning: Invalid model pattern \"{pattern}\": {err}");
904 continue;
905 }
906 };
907
908 let mut matched_any = false;
909 for model in &available_models {
910 let full_id = format!("{}/{}", model.model.provider, model.model.id);
911 let candidate_full = full_id.to_lowercase();
912 let candidate_id = model.model.id.to_lowercase();
913 if glob.matches(&candidate_full) || glob.matches(&candidate_id) {
914 matched_any = true;
915 if !scoped_models
916 .iter()
917 .any(|sm| models_equal(&sm.model, model))
918 {
919 scoped_models.push(ScopedModel {
920 model: model.clone(),
921 thinking_level,
922 });
923 }
924 }
925 }
926
927 if !matched_any {
928 eprintln!("Warning: No models match pattern \"{pattern}\"");
929 }
930 continue;
931 }
932
933 let parsed = parse_model_pattern(pattern, &available_models);
934 if let Some(warning) = parsed.warning {
935 eprintln!("Warning: {warning}");
936 }
937
938 if let Some(model) = parsed.model {
939 if !scoped_models
940 .iter()
941 .any(|sm| models_equal(&sm.model, &model))
942 {
943 scoped_models.push(ScopedModel {
944 model,
945 thinking_level: parsed.thinking_level,
946 });
947 }
948 } else {
949 eprintln!("Warning: No models match pattern \"{pattern}\"");
950 }
951 }
952
953 scoped_models
954}
955
956fn parse_model_pattern(pattern: &str, available_models: &[ModelEntry]) -> ParsedModelResult {
957 if let Some((prefix, suffix)) = pattern.rsplit_once(':') {
961 if let Some(thinking_level) = parse_thinking_level_opt(suffix) {
962 let result = parse_model_pattern(prefix, available_models);
963 if result.model.is_some() {
964 return ParsedModelResult {
965 model: result.model,
966 thinking_level: if result.warning.is_some() {
967 None
968 } else {
969 Some(thinking_level)
970 },
971 warning: result.warning,
972 };
973 }
974 }
975 }
976
977 if let Some(model) = try_match_model(pattern, available_models) {
978 return ParsedModelResult {
979 model: Some(model),
980 thinking_level: None,
981 warning: None,
982 };
983 }
984
985 let Some((prefix, suffix)) = pattern.rsplit_once(':') else {
986 return ParsedModelResult {
987 model: None,
988 thinking_level: None,
989 warning: None,
990 };
991 };
992
993 let result = parse_model_pattern(prefix, available_models);
995 if result.model.is_some() {
996 return ParsedModelResult {
997 model: result.model,
998 thinking_level: None,
999 warning: Some(format!(
1000 "Invalid thinking level \"{suffix}\" in pattern \"{pattern}\". Using default instead."
1001 )),
1002 };
1003 }
1004
1005 result
1006}
1007
1008fn try_match_model(pattern: &str, available_models: &[ModelEntry]) -> Option<ModelEntry> {
1009 if let Some((provider, model_id)) = split_provider_model_spec(pattern) {
1010 if let Some(found) = available_models.iter().find(|m| {
1011 provider_ids_match(&m.model.provider, provider)
1012 && m.model.id.eq_ignore_ascii_case(model_id)
1013 }) {
1014 return Some(found.clone());
1015 }
1016
1017 if let Some(ad_hoc) = crate::models::ad_hoc_model_entry(provider, model_id) {
1018 return Some(ad_hoc);
1019 }
1020 }
1021
1022 let exact_matches: Vec<ModelEntry> = available_models
1023 .iter()
1024 .filter(|m| m.model.id.eq_ignore_ascii_case(pattern))
1025 .cloned()
1026 .collect();
1027 if let Some(found) = select_preferred_exact_id_match(&exact_matches) {
1028 return Some(found);
1029 }
1030
1031 let pattern_lower = pattern.to_lowercase();
1032 let matches: Vec<ModelEntry> = available_models
1033 .iter()
1034 .filter(|m| {
1035 m.model.id.to_lowercase().contains(&pattern_lower)
1036 || m.model.name.to_lowercase().contains(&pattern_lower)
1037 })
1038 .cloned()
1039 .collect();
1040
1041 if matches.is_empty() {
1042 return None;
1043 }
1044
1045 let mut aliases: Vec<ModelEntry> = matches
1046 .iter()
1047 .filter(|m| is_alias(&m.model.id))
1048 .cloned()
1049 .collect();
1050 let mut dated: Vec<ModelEntry> = matches
1051 .iter()
1052 .filter(|m| !is_alias(&m.model.id))
1053 .cloned()
1054 .collect();
1055
1056 if !aliases.is_empty() {
1057 aliases.sort_by(|a, b| b.model.id.cmp(&a.model.id));
1058 return aliases.first().cloned();
1059 }
1060
1061 dated.sort_by(|a, b| b.model.id.cmp(&a.model.id));
1062 dated.first().cloned()
1063}
1064
1065fn is_alias(model_id: &str) -> bool {
1066 if model_id.ends_with("-latest") {
1067 return true;
1068 }
1069
1070 let parts: Vec<&str> = model_id.split('-').collect();
1072 if parts.len() >= 3 {
1073 let y = parts[parts.len() - 3];
1074 let m = parts[parts.len() - 2];
1075 let d = parts[parts.len() - 1];
1076 if y.len() == 4
1077 && m.len() == 2
1078 && d.len() == 2
1079 && y.chars().all(|c| c.is_ascii_digit())
1080 && m.chars().all(|c| c.is_ascii_digit())
1081 && d.chars().all(|c| c.is_ascii_digit())
1082 {
1083 return false;
1084 }
1085 }
1086
1087 let Some((_, date_suffix)) = model_id.rsplit_once('-') else {
1088 return true;
1089 };
1090
1091 if date_suffix.len() == 8 && date_suffix.chars().all(|c| c.is_ascii_digit()) {
1092 return false;
1093 }
1094
1095 if date_suffix.len() == 4 && date_suffix.chars().all(|c| c.is_ascii_digit()) {
1096 return false;
1097 }
1098
1099 true
1100}
1101
1102fn models_equal(left: &ModelEntry, right: &ModelEntry) -> bool {
1103 provider_ids_match(&left.model.provider, &right.model.provider)
1104 && left.model.id.eq_ignore_ascii_case(&right.model.id)
1105}
1106
1107pub fn output_final_text(message: &AssistantMessage) {
1108 for block in &message.content {
1109 if let ContentBlock::Text(text) = block {
1110 println!("{}", text.text);
1111 }
1112 }
1113}
1114
1115pub fn render_session_html(session: &Session) -> String {
1116 session.to_html()
1117}
1118
1119#[cfg(test)]
1120mod tests {
1121 use std::collections::HashMap;
1122
1123 use clap::Parser;
1124 use tempfile::tempdir;
1125
1126 use super::*;
1127 use crate::auth::AuthStorage;
1128 use crate::provider::{InputType, Model, ModelCost};
1129
1130 fn test_model_entry(id: &str, provider: &str, reasoning: bool) -> ModelEntry {
1131 ModelEntry {
1132 model: Model {
1133 id: id.to_string(),
1134 name: id.to_string(),
1135 api: "openai-responses".to_string(),
1136 provider: provider.to_string(),
1137 base_url: "https://example.test/v1".to_string(),
1138 reasoning,
1139 input: vec![InputType::Text],
1140 cost: ModelCost {
1141 input: 0.0,
1142 output: 0.0,
1143 cache_read: 0.0,
1144 cache_write: 0.0,
1145 },
1146 context_window: 128_000,
1147 max_tokens: 8_192,
1148 headers: HashMap::new(),
1149 },
1150 api_key: Some("test-key".to_string()),
1151 headers: HashMap::new(),
1152 auth_header: true,
1153 compat: None,
1154 oauth_config: None,
1155 }
1156 }
1157
1158 fn registry_with_entries(entries: Vec<ModelEntry>) -> ModelRegistry {
1159 let dir = tempdir().expect("tempdir");
1160 let auth = AuthStorage::load(dir.path().join("auth.json")).expect("load auth");
1161 let mut registry = ModelRegistry::load(&auth, None);
1162 registry.merge_entries(entries);
1163 registry
1164 }
1165
1166 #[test]
1167 fn parse_models_arg_splits_and_trims() {
1168 assert_eq!(
1169 parse_models_arg("gpt-4*, claude* ,,"),
1170 vec!["gpt-4*".to_string(), "claude*".to_string()]
1171 );
1172 }
1173
1174 #[test]
1175 fn default_model_from_available_prefers_azure_legacy_default() {
1176 let available = vec![
1177 test_model_entry("gpt-4o-mini", "azure-openai-responses", true),
1178 test_model_entry("gpt-5.2", "azure-openai-responses", true),
1179 ];
1180
1181 let selected = default_model_from_available(&available);
1182 assert_eq!(selected.model.provider, "azure-openai-responses");
1183 assert_eq!(selected.model.id, "gpt-5.2");
1184 }
1185
1186 #[test]
1187 fn default_model_from_available_applies_vercel_gateway_alias_mapping() {
1188 let available = vec![
1189 test_model_entry("gpt-4o-mini", "vercel", true),
1190 test_model_entry("anthropic/claude-opus-4.5", "vercel", true),
1191 ];
1192
1193 let selected = default_model_from_available(&available);
1194 assert_eq!(selected.model.provider, "vercel");
1195 assert_eq!(selected.model.id, "anthropic/claude-opus-4.5");
1196 }
1197
1198 #[test]
1199 fn resolve_api_key_allows_keyless_model_when_credentials_not_required() {
1200 let dir = tempdir().expect("tempdir");
1201 let auth = AuthStorage::load(dir.path().join("auth.json")).expect("load auth");
1202 let mut entry = test_model_entry("llama3.2", "ollama", false);
1203 entry.api_key = None;
1204 entry.auth_header = false;
1205
1206 let cli = cli::Cli::parse_from(["pi"]);
1207 let resolved = resolve_api_key(&auth, &cli, &entry).expect("resolve keyless model");
1208 assert!(resolved.is_none());
1209 }
1210
1211 #[test]
1212 fn resolve_api_key_still_requires_credentials_for_remote_provider() {
1213 let dir = tempdir().expect("tempdir");
1214 let auth = AuthStorage::load(dir.path().join("auth.json")).expect("load auth");
1215 let mut entry = test_model_entry("gpt-4o-mini", "openai", true);
1216 entry.api_key = None;
1217 entry.auth_header = true;
1218
1219 let cli = cli::Cli::parse_from(["pi"]);
1220 let err = resolve_api_key(&auth, &cli, &entry).unwrap_err();
1221 let startup = err
1222 .downcast_ref::<StartupError>()
1223 .expect("missing key should map to startup error");
1224 assert!(matches!(
1225 startup,
1226 StartupError::MissingApiKey { provider } if provider == "openai"
1227 ));
1228 }
1229
1230 #[test]
1231 fn default_model_from_available_applies_kimi_coding_alias_mapping() {
1232 let available = vec![
1233 test_model_entry("kimi-k2-instruct", "kimi-for-coding", true),
1234 test_model_entry("kimi-k2-thinking", "kimi-for-coding", true),
1235 ];
1236
1237 let selected = default_model_from_available(&available);
1238 assert_eq!(selected.model.provider, "kimi-for-coding");
1239 assert_eq!(selected.model.id, "kimi-k2-thinking");
1240 }
1241
1242 #[test]
1243 fn default_model_from_available_prefers_latest_openai_codex_default() {
1244 let available = vec![
1245 test_model_entry("gpt-5.3-codex", "openai-codex", true),
1246 test_model_entry("gpt-5.4", "openai-codex", true),
1247 ];
1248
1249 let selected = default_model_from_available(&available);
1250 assert_eq!(selected.model.provider, "openai-codex");
1251 assert_eq!(selected.model.id, "gpt-5.4");
1252 }
1253
1254 #[test]
1255 fn default_model_from_available_matches_default_id_case_insensitively() {
1256 let available = vec![test_model_entry("GPT-5.4", "openai-codex", true)];
1257 let selected = default_model_from_available(&available);
1258 assert_eq!(selected.model.provider, "openai-codex");
1259 assert_eq!(selected.model.id, "GPT-5.4");
1260 }
1261
1262 #[test]
1263 fn apply_piped_stdin_trims_newlines_and_prepends_message() {
1264 let mut cli = cli::Cli::parse_from(["pi", "existing-message"]);
1265 apply_piped_stdin(&mut cli, Some("from-stdin\n".to_string()));
1266
1267 assert!(cli.print);
1268 assert_eq!(
1269 cli.args,
1270 vec!["from-stdin".to_string(), "existing-message".to_string()]
1271 );
1272 }
1273
1274 #[test]
1275 fn apply_piped_stdin_ignores_empty_input() {
1276 let mut cli = cli::Cli::parse_from(["pi", "existing-message"]);
1277 apply_piped_stdin(&mut cli, Some("\n".to_string()));
1278
1279 assert!(!cli.print);
1280 assert_eq!(cli.args, vec!["existing-message".to_string()]);
1281 }
1282
1283 #[test]
1284 fn normalize_cli_enables_no_session_for_print_and_lowercases_provider() {
1285 let mut cli = cli::Cli::parse_from(["pi", "--provider", "OpenAI", "--print", "hello"]);
1286 assert!(!cli.no_session);
1287 assert_eq!(cli.provider.as_deref(), Some("OpenAI"));
1288
1289 normalize_cli(&mut cli);
1290
1291 assert!(cli.no_session);
1292 assert_eq!(cli.provider.as_deref(), Some("openai"));
1293 }
1294
1295 #[test]
1296 fn validate_rpc_args_rejects_file_arguments() {
1297 let cli = cli::Cli::parse_from(["pi", "--mode", "rpc", "@src/main.rs", "hello"]);
1298
1299 let err = validate_rpc_args(&cli).expect_err("rpc mode should reject @file args");
1300 assert!(
1301 err.to_string()
1302 .contains("@file arguments are not supported in RPC mode")
1303 );
1304 }
1305
1306 #[test]
1307 fn validate_rpc_args_allows_non_rpc_file_arguments() {
1308 let cli = cli::Cli::parse_from(["pi", "--mode", "json", "@src/main.rs", "hello"]);
1309 assert!(validate_rpc_args(&cli).is_ok());
1310 }
1311
1312 #[test]
1313 fn parse_model_pattern_prefers_alias_when_alias_and_dated_match() {
1314 let available = vec![
1315 test_model_entry("gpt-5.1-codex-20250101", "openai", true),
1316 test_model_entry("gpt-5.1-codex-latest", "openai", true),
1317 ];
1318
1319 let parsed = parse_model_pattern("gpt-5.1-codex", &available);
1320 let model = parsed.model.expect("model should match");
1321
1322 assert_eq!(model.model.id, "gpt-5.1-codex-latest");
1323 assert!(parsed.thinking_level.is_none());
1324 assert!(parsed.warning.is_none());
1325 }
1326
1327 #[test]
1328 fn try_match_model_prefers_existing_entry_for_provider_alias() {
1329 let mut openrouter = test_model_entry("openai/gpt-4o-mini", "openrouter", true);
1330 openrouter
1331 .headers
1332 .insert("x-test".to_string(), "1".to_string());
1333
1334 let matched = try_match_model("open-router/openai/gpt-4o-mini", &[openrouter.clone()])
1335 .expect("provider alias should match existing entry");
1336
1337 assert_eq!(matched.model.provider, "openrouter");
1338 assert_eq!(matched.model.id, "openai/gpt-4o-mini");
1339 assert_eq!(
1340 matched.headers.get("x-test").map(String::as_str),
1341 Some("1"),
1342 "must preserve existing model metadata instead of falling back to ad-hoc"
1343 );
1344 }
1345
1346 #[test]
1347 fn select_model_and_thinking_provider_only_accepts_provider_alias() {
1348 let cli = cli::Cli::parse_from(["pi", "--provider", "open-router"]);
1349 let config = Config::default();
1350 let session = Session::in_memory();
1351 let registry = registry_with_entries(vec![test_model_entry(
1352 "openai/gpt-4o-mini",
1353 "openrouter",
1354 true,
1355 )]);
1356
1357 let selection =
1358 select_model_and_thinking(&cli, &config, &session, ®istry, &[], Path::new("/tmp"))
1359 .expect("provider alias should resolve");
1360
1361 assert!(provider_ids_match(
1362 &selection.model_entry.model.provider,
1363 "open-router"
1364 ));
1365 assert!(!selection.model_entry.model.id.is_empty());
1366 }
1367
1368 #[test]
1369 fn select_model_and_thinking_provider_only_prefers_ready_model() {
1370 let cli = cli::Cli::parse_from(["pi", "--provider", "acme"]);
1371 let config = Config::default();
1372 let session = Session::in_memory();
1373
1374 let mut unready_remote = test_model_entry("cloud-model", "acme", true);
1375 unready_remote.api_key = None;
1376 unready_remote.auth_header = true;
1377
1378 let mut keyless_ready = test_model_entry("local-model", "acme", false);
1379 keyless_ready.api_key = None;
1380 keyless_ready.auth_header = false;
1381
1382 let registry = registry_with_entries(vec![unready_remote, keyless_ready]);
1383 let selection =
1384 select_model_and_thinking(&cli, &config, &session, ®istry, &[], Path::new("/tmp"))
1385 .expect("provider selection should prefer ready models");
1386
1387 assert_eq!(selection.model_entry.model.provider, "acme");
1388 assert_eq!(selection.model_entry.model.id, "local-model");
1389 }
1390
1391 #[test]
1392 fn select_model_and_thinking_provider_only_prefers_provider_default_over_registry_order() {
1393 let cli = cli::Cli::parse_from(["pi", "--provider", "openai"]);
1394 let config = Config::default();
1395 let session = Session::in_memory();
1396 let registry = registry_with_entries(vec![
1397 test_model_entry("gpt-4o", "openai", true),
1398 test_model_entry("gpt-5.4", "openai", true),
1399 ]);
1400
1401 let selection =
1402 select_model_and_thinking(&cli, &config, &session, ®istry, &[], Path::new("/tmp"))
1403 .expect("provider-only selection should honor preferred defaults");
1404
1405 assert_eq!(selection.model_entry.model.provider, "openai");
1406 assert_eq!(selection.model_entry.model.id, "gpt-5.4");
1407 }
1408
1409 #[test]
1410 fn select_model_and_thinking_provider_only_honors_configured_default_model() {
1411 let cli = cli::Cli::parse_from(["pi", "--provider", "openai"]);
1412 let config = Config {
1413 default_model: Some("gpt-4o-mini".to_string()),
1414 ..Config::default()
1415 };
1416 let session = Session::in_memory();
1417 let registry = registry_with_entries(vec![
1418 test_model_entry("gpt-5.4", "openai", true),
1419 test_model_entry("gpt-4o-mini", "openai", true),
1420 ]);
1421
1422 let selection =
1423 select_model_and_thinking(&cli, &config, &session, ®istry, &[], Path::new("/tmp"))
1424 .expect("provider-only selection should honor configured default_model");
1425
1426 assert_eq!(selection.model_entry.model.provider, "openai");
1427 assert_eq!(selection.model_entry.model.id, "gpt-4o-mini");
1428 }
1429
1430 #[test]
1431 fn select_model_and_thinking_provider_only_skips_unready_configured_default_model() {
1432 let cli = cli::Cli::parse_from(["pi", "--provider", "acme"]);
1433 let config = Config {
1434 default_model: Some("cloud-model".to_string()),
1435 ..Config::default()
1436 };
1437 let session = Session::in_memory();
1438
1439 let mut unready_remote = test_model_entry("cloud-model", "acme", true);
1440 unready_remote.api_key = None;
1441 unready_remote.auth_header = true;
1442
1443 let mut keyless_ready = test_model_entry("local-model", "acme", false);
1444 keyless_ready.api_key = None;
1445 keyless_ready.auth_header = false;
1446
1447 let registry = registry_with_entries(vec![unready_remote, keyless_ready]);
1448 let selection =
1449 select_model_and_thinking(&cli, &config, &session, ®istry, &[], Path::new("/tmp"))
1450 .expect("provider-only selection should still prefer a ready model");
1451
1452 assert_eq!(selection.model_entry.model.provider, "acme");
1453 assert_eq!(selection.model_entry.model.id, "local-model");
1454 }
1455
1456 #[test]
1457 fn select_model_and_thinking_preserves_restore_warning_when_defaulting_for_setup() {
1458 let cli = cli::Cli::parse_from(["pi"]);
1459 let config = Config::default();
1460 let mut session = Session::in_memory();
1461 session.append_model_change("missing-provider".to_string(), "missing-model".to_string());
1462
1463 let mut setup_default = test_model_entry("gpt-5.4", "openai-codex", true);
1464 setup_default.api_key = None;
1465 setup_default.auth_header = true;
1466
1467 let registry = registry_with_entries(vec![setup_default]);
1468 let selection =
1469 select_model_and_thinking(&cli, &config, &session, ®istry, &[], Path::new("/tmp"))
1470 .expect("selection should fall back to a stable setup model");
1471
1472 assert_eq!(selection.model_entry.model.provider, "openai-codex");
1473 assert_eq!(selection.model_entry.model.id, "gpt-5.4");
1474 assert_eq!(
1475 selection.fallback_message.as_deref(),
1476 Some(
1477 "Could not restore model missing-provider/missing-model (model no longer exists). Defaulting to openai-codex/gpt-5.4 for setup."
1478 )
1479 );
1480 }
1481
1482 #[test]
1483 fn select_model_and_thinking_preserves_restore_warning_when_using_config_default() {
1484 let cli = cli::Cli::parse_from(["pi"]);
1485 let config = Config {
1486 default_provider: Some("openai-codex".to_string()),
1487 default_model: Some("gpt-4o-mini".to_string()),
1488 ..Config::default()
1489 };
1490 let mut session = Session::in_memory();
1491 session.append_model_change("missing-provider".to_string(), "missing-model".to_string());
1492
1493 let registry =
1494 registry_with_entries(vec![test_model_entry("gpt-4o-mini", "openai-codex", true)]);
1495 let selection =
1496 select_model_and_thinking(&cli, &config, &session, ®istry, &[], Path::new("/tmp"))
1497 .expect("selection should use the configured default model");
1498
1499 assert_eq!(selection.model_entry.model.provider, "openai-codex");
1500 assert_eq!(selection.model_entry.model.id, "gpt-4o-mini");
1501 assert_eq!(
1502 selection.fallback_message.as_deref(),
1503 Some(
1504 "Could not restore model missing-provider/missing-model (model no longer exists). Using openai-codex/gpt-4o-mini."
1505 )
1506 );
1507 }
1508
1509 #[test]
1510 fn select_model_and_thinking_restores_model_from_header_when_history_missing() {
1511 let cli = cli::Cli::parse_from(["pi"]);
1512 let config = Config::default();
1513 let mut session = Session::in_memory();
1514 session.header.provider = Some("openai-codex".to_string());
1515 session.header.model_id = Some("gpt-5.4".to_string());
1516
1517 let registry = registry_with_entries(vec![
1518 test_model_entry("gpt-5.4", "openai-codex", true),
1519 test_model_entry("gpt-4o-mini", "openai", true),
1520 ]);
1521
1522 let selection =
1523 select_model_and_thinking(&cli, &config, &session, ®istry, &[], Path::new("/tmp"))
1524 .expect("header-only session should restore saved model");
1525
1526 assert_eq!(selection.model_entry.model.provider, "openai-codex");
1527 assert_eq!(selection.model_entry.model.id, "gpt-5.4");
1528 }
1529
1530 #[test]
1531 fn select_model_and_thinking_restores_thinking_from_header_when_history_missing() {
1532 let cli = cli::Cli::parse_from(["pi", "--continue"]);
1533 let config = Config::default();
1534 let mut session = Session::in_memory();
1535 session.header.provider = Some("openai-codex".to_string());
1536 session.header.model_id = Some("gpt-5.4".to_string());
1537 session.header.thinking_level = Some("high".to_string());
1538
1539 let registry =
1540 registry_with_entries(vec![test_model_entry("gpt-5.4", "openai-codex", true)]);
1541 let selection =
1542 select_model_and_thinking(&cli, &config, &session, ®istry, &[], Path::new("/tmp"))
1543 .expect("header-only session should restore saved thinking level");
1544
1545 assert_eq!(selection.thinking_level, model::ThinkingLevel::High);
1546 }
1547
1548 #[test]
1549 fn select_model_and_thinking_restores_model_from_active_branch_only() {
1550 let cli = cli::Cli::parse_from(["pi"]);
1551 let config = Config::default();
1552 let mut session = Session::in_memory();
1553 let root_id = session.append_message(crate::session::SessionMessage::User {
1554 content: crate::model::UserContent::Text("root".to_string()),
1555 timestamp: Some(0),
1556 });
1557 let openai_id =
1558 session.append_model_change("openai-codex".to_string(), "test-gpt-5.4".to_string());
1559 assert!(session.create_branch_from(&root_id));
1560 session.append_model_change("anthropic".to_string(), "test-claude-sonnet-4".to_string());
1561 assert!(session.create_branch_from(&openai_id));
1562
1563 let registry = registry_with_entries(vec![
1564 test_model_entry("test-gpt-5.4", "openai-codex", true),
1565 test_model_entry("test-claude-sonnet-4", "anthropic", true),
1566 ]);
1567
1568 let selection =
1569 select_model_and_thinking(&cli, &config, &session, ®istry, &[], Path::new("/tmp"))
1570 .expect("active branch model should restore");
1571
1572 assert_eq!(selection.model_entry.model.provider, "openai-codex");
1573 assert_eq!(selection.model_entry.model.id, "test-gpt-5.4");
1574 }
1575
1576 #[test]
1577 fn select_model_and_thinking_restores_thinking_from_active_branch_only() {
1578 let cli = cli::Cli::parse_from(["pi", "--continue"]);
1579 let config = Config::default();
1580 let mut session = Session::in_memory();
1581 session.header.provider = Some("openai-codex".to_string());
1582 session.header.model_id = Some("gpt-5.4".to_string());
1583 let root_id = session.append_message(crate::session::SessionMessage::User {
1584 content: crate::model::UserContent::Text("root".to_string()),
1585 timestamp: Some(0),
1586 });
1587 let high_id = session.append_thinking_level_change("high".to_string());
1588 assert!(session.create_branch_from(&root_id));
1589 session.append_thinking_level_change("minimal".to_string());
1590 assert!(session.create_branch_from(&high_id));
1591
1592 let registry =
1593 registry_with_entries(vec![test_model_entry("gpt-5.4", "openai-codex", true)]);
1594 let selection =
1595 select_model_and_thinking(&cli, &config, &session, ®istry, &[], Path::new("/tmp"))
1596 .expect("active branch thinking level should restore");
1597
1598 assert_eq!(selection.thinking_level, model::ThinkingLevel::High);
1599 }
1600
1601 #[test]
1602 fn update_session_for_selection_skips_duplicate_changes_for_header_only_session() {
1603 let mut session = Session::in_memory();
1604 session.header.provider = Some("openai-codex".to_string());
1605 session.header.model_id = Some("gpt-5.4".to_string());
1606 session.header.thinking_level = Some("high".to_string());
1607
1608 let selection = ModelSelection {
1609 model_entry: test_model_entry("gpt-5.4", "openai-codex", true),
1610 thinking_level: model::ThinkingLevel::High,
1611 scoped_models: Vec::new(),
1612 fallback_message: None,
1613 };
1614
1615 update_session_for_selection(&mut session, &selection);
1616
1617 assert!(
1618 session.entries.is_empty(),
1619 "header-only session with unchanged selection should not invent history entries"
1620 );
1621 }
1622
1623 #[test]
1624 fn update_session_for_selection_preserves_alias_equivalent_model_state() {
1625 let mut session = Session::in_memory();
1626 session.append_model_change("gemini".to_string(), "gemini-2.5-pro".to_string());
1627 session.set_model_header(
1628 Some("gemini".to_string()),
1629 Some("gemini-2.5-pro".to_string()),
1630 Some("high".to_string()),
1631 );
1632
1633 let selection = ModelSelection {
1634 model_entry: test_model_entry("GEMINI-2.5-PRO", "google", true),
1635 thinking_level: model::ThinkingLevel::High,
1636 scoped_models: Vec::new(),
1637 fallback_message: None,
1638 };
1639
1640 update_session_for_selection(&mut session, &selection);
1641
1642 let model_changes: Vec<_> = session
1643 .entries_for_current_path()
1644 .iter()
1645 .filter_map(|entry| {
1646 if let crate::session::SessionEntry::ModelChange(change) = entry {
1647 Some((change.provider.as_str(), change.model_id.as_str()))
1648 } else {
1649 None
1650 }
1651 })
1652 .collect();
1653 assert_eq!(
1654 model_changes,
1655 vec![("gemini", "gemini-2.5-pro")],
1656 "alias-equivalent startup restore should not append duplicate history"
1657 );
1658 assert_eq!(session.header.provider.as_deref(), Some("gemini"));
1659 assert_eq!(session.header.model_id.as_deref(), Some("gemini-2.5-pro"));
1660 }
1661
1662 #[test]
1663 fn select_model_and_thinking_model_only_prefers_default_provider_alias() {
1664 let model_id = "__test-openrouter-alias-model__";
1665 let cli = cli::Cli::parse_from(["pi", "--model", model_id]);
1666 let config = Config {
1667 default_provider: Some("open-router".to_string()),
1668 ..Config::default()
1669 };
1670 let session = Session::in_memory();
1671 let registry = registry_with_entries(vec![
1672 test_model_entry(model_id, "openai", true),
1673 test_model_entry(model_id, "openrouter", true),
1674 ]);
1675
1676 let selection =
1677 select_model_and_thinking(&cli, &config, &session, ®istry, &[], Path::new("/tmp"))
1678 .expect("default provider alias should resolve in model-only selection");
1679
1680 assert_eq!(selection.model_entry.model.provider, "openrouter");
1681 assert_eq!(selection.model_entry.model.id, model_id);
1682 }
1683
1684 #[test]
1685 fn select_model_and_thinking_model_only_matches_case_insensitively() {
1686 let model_id = "__test-case-insensitive-model__";
1687 let cli = cli::Cli::parse_from(["pi", "--model", "__TEST-CASE-INSENSITIVE-MODEL__"]);
1688 let config = Config::default();
1689 let session = Session::in_memory();
1690 let registry = registry_with_entries(vec![test_model_entry(model_id, "openai", true)]);
1691
1692 let selection =
1693 select_model_and_thinking(&cli, &config, &session, ®istry, &[], Path::new("/tmp"))
1694 .expect("model-only selection should be case-insensitive");
1695
1696 assert_eq!(selection.model_entry.model.provider, "openai");
1697 assert_eq!(selection.model_entry.model.id, model_id);
1698 }
1699
1700 #[test]
1701 fn select_model_and_thinking_model_only_prefers_openai_codex_for_duplicate_latest_id() {
1702 let cli = cli::Cli::parse_from(["pi", "--model", "gpt-5.4"]);
1703 let config = Config::default();
1704 let session = Session::in_memory();
1705 let registry = registry_with_entries(vec![
1706 test_model_entry("gpt-5.4", "openai", true),
1707 test_model_entry("gpt-5.4", "openai-codex", true),
1708 ]);
1709
1710 let selection =
1711 select_model_and_thinking(&cli, &config, &session, ®istry, &[], Path::new("/tmp"))
1712 .expect("duplicate exact-id matches should honor preferred provider ordering");
1713
1714 assert_eq!(selection.model_entry.model.provider, "openai-codex");
1715 assert_eq!(selection.model_entry.model.id, "gpt-5.4");
1716 }
1717
1718 #[test]
1719 fn select_model_and_thinking_model_only_prefers_ready_duplicate_exact_id_match() {
1720 let model_id = "__test-ready-duplicate-model__";
1721 let cli = cli::Cli::parse_from(["pi", "--model", model_id]);
1722 let config = Config {
1723 default_provider: None,
1724 ..Config::default()
1725 };
1726 let session = Session::in_memory();
1727 let mut codex = test_model_entry(model_id, "openai-codex", true);
1728 codex.api_key = None;
1729 codex.auth_header = true;
1730 let registry =
1731 registry_with_entries(vec![test_model_entry(model_id, "openai", true), codex]);
1732
1733 let selection =
1734 select_model_and_thinking(&cli, &config, &session, ®istry, &[], Path::new("/tmp"))
1735 .expect("duplicate exact-id matches should still prefer ready entries");
1736
1737 assert_eq!(selection.model_entry.model.provider, "openai");
1738 assert_eq!(selection.model_entry.model.id, model_id);
1739 }
1740
1741 #[test]
1742 fn select_model_and_thinking_scoped_models_prefers_default_provider_alias() {
1743 let cli = cli::Cli::parse_from(["pi"]);
1744 let config = Config {
1745 default_provider: Some("open-router".to_string()),
1746 default_model: Some("gpt-4o-mini".to_string()),
1747 ..Config::default()
1748 };
1749 let session = Session::in_memory();
1750 let registry = registry_with_entries(Vec::new());
1751 let scoped_models = vec![
1752 ScopedModel {
1753 model: test_model_entry("gpt-4o-mini", "openai", true),
1754 thinking_level: None,
1755 },
1756 ScopedModel {
1757 model: test_model_entry("gpt-4o-mini", "openrouter", true),
1758 thinking_level: Some(model::ThinkingLevel::High),
1759 },
1760 ];
1761
1762 let selection = select_model_and_thinking(
1763 &cli,
1764 &config,
1765 &session,
1766 ®istry,
1767 &scoped_models,
1768 Path::new("/tmp"),
1769 )
1770 .expect("scoped models should honor default provider alias");
1771
1772 assert_eq!(selection.model_entry.model.provider, "openrouter");
1773 assert_eq!(selection.model_entry.model.id, "gpt-4o-mini");
1774 assert_eq!(selection.thinking_level, model::ThinkingLevel::High);
1775 }
1776
1777 #[test]
1778 fn select_model_and_thinking_scoped_models_matches_default_model_case_insensitively() {
1779 let cli = cli::Cli::parse_from(["pi"]);
1780 let config = Config {
1781 default_provider: Some("open-router".to_string()),
1782 default_model: Some("GPT-4O-MINI".to_string()),
1783 ..Config::default()
1784 };
1785 let session = Session::in_memory();
1786 let registry = registry_with_entries(Vec::new());
1787 let scoped_models = vec![
1788 ScopedModel {
1789 model: test_model_entry("gpt-4o-mini", "openrouter", true),
1790 thinking_level: Some(model::ThinkingLevel::Low),
1791 },
1792 ScopedModel {
1793 model: test_model_entry("gpt-4o", "openrouter", true),
1794 thinking_level: Some(model::ThinkingLevel::High),
1795 },
1796 ];
1797
1798 let selection = select_model_and_thinking(
1799 &cli,
1800 &config,
1801 &session,
1802 ®istry,
1803 &scoped_models,
1804 Path::new("/tmp"),
1805 )
1806 .expect("scoped default model should match case-insensitively");
1807
1808 assert_eq!(selection.model_entry.model.provider, "openrouter");
1809 assert_eq!(selection.model_entry.model.id, "gpt-4o-mini");
1810 assert_eq!(selection.thinking_level, model::ThinkingLevel::Low);
1811 }
1812
1813 #[test]
1814 fn parse_model_pattern_picks_latest_dated_when_no_alias_exists() {
1815 let available = vec![
1816 test_model_entry("gpt-5.1-codex-20250101", "openai", true),
1817 test_model_entry("gpt-5.1-codex-20250601", "openai", true),
1818 ];
1819
1820 let parsed = parse_model_pattern("gpt-5.1-codex", &available);
1821 let model = parsed.model.expect("model should match");
1822
1823 assert_eq!(model.model.id, "gpt-5.1-codex-20250601");
1824 assert!(parsed.thinking_level.is_none());
1825 assert!(parsed.warning.is_none());
1826 }
1827
1828 #[test]
1829 fn split_provider_model_spec_preserves_nested_model_paths() {
1830 let parsed = split_provider_model_spec("openrouter/anthropic/claude-sonnet-4.5")
1831 .expect("provider/model spec");
1832 assert_eq!(parsed.0, "openrouter");
1833 assert_eq!(parsed.1, "anthropic/claude-sonnet-4.5");
1834
1835 assert!(split_provider_model_spec("openrouter/").is_none());
1836 assert!(split_provider_model_spec("/anthropic/claude").is_none());
1837 assert!(split_provider_model_spec("no-slash").is_none());
1838 }
1839
1840 #[test]
1841 fn try_match_model_supports_openrouter_dynamic_provider_model_ids() {
1842 let matched = try_match_model("openrouter/google/gemini-2.5-pro", &[])
1843 .expect("openrouter ad-hoc fallback should resolve");
1844 assert_eq!(matched.model.provider, "openrouter");
1845 assert_eq!(matched.model.id, "google/gemini-2.5-pro");
1846 assert_eq!(matched.model.api, "openai-completions");
1847 assert_eq!(matched.model.base_url, "https://openrouter.ai/api/v1");
1848 }
1849
1850 #[test]
1851 fn try_match_model_prefers_openai_codex_for_duplicate_exact_id_matches() {
1852 let matched = try_match_model(
1853 "gpt-5.4",
1854 &[
1855 test_model_entry("gpt-5.4", "openai", true),
1856 test_model_entry("gpt-5.4", "openai-codex", true),
1857 ],
1858 )
1859 .expect("duplicate exact-id matches should honor preferred provider ordering");
1860
1861 assert_eq!(matched.model.provider, "openai-codex");
1862 assert_eq!(matched.model.id, "gpt-5.4");
1863 }
1864
1865 #[test]
1866 fn is_alias_handles_non_ascii_model_ids_without_panicking() {
1867 assert!(is_alias("é123456789"));
1868 assert!(is_alias("model-é2345678"));
1869 assert!(!is_alias("model-20250101"));
1870 }
1871
1872 #[test]
1873 fn parse_model_pattern_parses_thinking_suffix() {
1874 let available = vec![test_model_entry("gpt-5.1-codex", "openai", true)];
1875 let parsed = parse_model_pattern("openai/gpt-5.1-codex:high", &available);
1876
1877 let model = parsed.model.expect("model should match");
1878 assert_eq!(model.model.id, "gpt-5.1-codex");
1879 assert_eq!(parsed.thinking_level, Some(model::ThinkingLevel::High));
1880 assert!(parsed.warning.is_none());
1881 }
1882
1883 #[test]
1884 fn parse_model_pattern_warns_for_invalid_thinking_suffix() {
1885 let available = vec![test_model_entry("gpt-5.1-codex", "openai", true)];
1886 let parsed = parse_model_pattern("gpt-5.1-codex:extreme", &available);
1887
1888 assert!(parsed.model.is_some());
1889 assert!(parsed.thinking_level.is_none());
1890 assert!(
1891 parsed
1892 .warning
1893 .expect("warning should be present")
1894 .contains("Invalid thinking level")
1895 );
1896 }
1897
1898 #[test]
1899 fn clamp_thinking_level_returns_off_for_non_reasoning_models() {
1900 let model_entry = test_model_entry("gpt-4o-mini", "openai", false);
1901 let clamped = model_entry.clamp_thinking_level(model::ThinkingLevel::High);
1902 assert_eq!(clamped, model::ThinkingLevel::Off);
1903 }
1904
1905 #[test]
1906 fn clamp_thinking_level_clamps_xhigh_for_unsupported_models() {
1907 let model_entry = test_model_entry("gpt-4o", "openai", true);
1908 let clamped = model_entry.clamp_thinking_level(model::ThinkingLevel::XHigh);
1909 assert_eq!(clamped, model::ThinkingLevel::High);
1910 }
1911
1912 #[test]
1913 fn clamp_thinking_level_keeps_xhigh_for_supported_models() {
1914 let model_entry = test_model_entry("gpt-5.2", "openai", true);
1915 let clamped = model_entry.clamp_thinking_level(model::ThinkingLevel::XHigh);
1916 assert_eq!(clamped, model::ThinkingLevel::XHigh);
1917 }
1918
1919 mod proptests {
1920 use super::*;
1921 use proptest::prelude::*;
1922
1923 proptest! {
1928 #[test]
1929 fn parse_models_no_empty_strings(s in "([a-z0-9*-]{0,5},?){0,6}") {
1930 let result = parse_models_arg(&s);
1931 for m in &result {
1932 assert!(!m.is_empty(), "parse_models_arg produced empty string from {s:?}");
1933 }
1934 }
1935
1936 #[test]
1937 fn parse_models_whitespace_trimmed(m1 in "[a-z]{1,8}", m2 in "[a-z]{1,8}") {
1938 let with_spaces = format!(" {m1} , {m2} ");
1939 let result = parse_models_arg(&with_spaces);
1940 assert_eq!(result, vec![m1, m2]);
1941 }
1942
1943 #[test]
1944 fn parse_models_round_trip(models in prop::collection::vec("[a-z0-9-]{1,10}", 1..6)) {
1945 let joined = models.join(",");
1946 let result = parse_models_arg(&joined);
1947 assert_eq!(result, models);
1948 }
1949
1950 #[test]
1951 fn parse_models_empty_csv(s in "[ ,]*") {
1952 let result = parse_models_arg(&s);
1953 assert!(result.is_empty(), "whitespace/commas-only should yield empty vec");
1954 }
1955 }
1956
1957 proptest! {
1962 #[test]
1963 fn apply_piped_stdin_trims_sets_print_and_prepends(
1964 existing in prop::collection::vec("[A-Za-z0-9._/-]{1,16}", 0..4),
1965 leading_ws in "[ \\t\\n\\r]{0,4}",
1966 core in "[A-Za-z0-9._/-]{1,24}",
1967 trailing_ws in "[ \\t\\n\\r]{0,4}",
1968 ) {
1969 let mut cli = cli::Cli::parse_from(["pi"]);
1970 cli.args = existing.clone();
1971 cli.print = false;
1972
1973 let raw = format!("{leading_ws}{core}{trailing_ws}");
1974 apply_piped_stdin(&mut cli, Some(raw));
1975
1976 prop_assert!(cli.print);
1977 prop_assert_eq!(cli.args.len(), existing.len() + 1);
1978 prop_assert_eq!(cli.args.first().map(String::as_str), Some(core.as_str()));
1979 prop_assert_eq!(&cli.args[1..], existing.as_slice());
1980 }
1981
1982 #[test]
1983 fn apply_piped_stdin_none_or_whitespace_is_noop(
1984 existing in prop::collection::vec("[A-Za-z0-9._/-]{1,16}", 0..4),
1985 initial_print in any::<bool>(),
1986 initial_no_session in any::<bool>(),
1987 whitespace in "[ \\t\\n\\r]{0,16}",
1988 ) {
1989 let mut cli = cli::Cli::parse_from(["pi"]);
1990 cli.args = existing.clone();
1991 cli.print = initial_print;
1992 cli.no_session = initial_no_session;
1993
1994 apply_piped_stdin(&mut cli, None);
1995 prop_assert_eq!(&cli.args, &existing);
1996 prop_assert_eq!(cli.print, initial_print);
1997 prop_assert_eq!(cli.no_session, initial_no_session);
1998
1999 apply_piped_stdin(&mut cli, Some(whitespace));
2000 prop_assert_eq!(&cli.args, &existing);
2001 prop_assert_eq!(cli.print, initial_print);
2002 prop_assert_eq!(cli.no_session, initial_no_session);
2003 }
2004
2005 #[test]
2006 fn normalize_cli_lowercases_provider_and_applies_print_semantics(
2007 provider in prop::option::of("[A-Za-z0-9_-]{1,20}"),
2008 print in any::<bool>(),
2009 initial_no_session in any::<bool>(),
2010 ) {
2011 let mut cli = cli::Cli::parse_from(["pi"]);
2012 cli.provider = provider.clone();
2013 cli.print = print;
2014 cli.no_session = initial_no_session;
2015
2016 normalize_cli(&mut cli);
2017
2018 let expected_provider = provider.map(|value: String| value.to_ascii_lowercase());
2019 let expected_no_session = if print { true } else { initial_no_session };
2020
2021 prop_assert_eq!(cli.provider, expected_provider);
2022 prop_assert_eq!(cli.no_session, expected_no_session);
2023 }
2024
2025 #[test]
2026 fn normalize_cli_is_idempotent(
2027 provider in prop::option::of("[A-Za-z0-9_-]{1,20}"),
2028 print in any::<bool>(),
2029 initial_no_session in any::<bool>(),
2030 ) {
2031 let mut cli = cli::Cli::parse_from(["pi"]);
2032 cli.provider = provider;
2033 cli.print = print;
2034 cli.no_session = initial_no_session;
2035
2036 normalize_cli(&mut cli);
2037 let provider_once = cli.provider.clone();
2038 let no_session_once = cli.no_session;
2039 let print_once = cli.print;
2040
2041 normalize_cli(&mut cli);
2042
2043 prop_assert_eq!(cli.provider, provider_once);
2044 prop_assert_eq!(cli.no_session, no_session_once);
2045 prop_assert_eq!(cli.print, print_once);
2046 }
2047 }
2048
2049 proptest! {
2054 #[test]
2055 fn split_spec_first_slash(pre in "[a-z]{1,8}", mid in "[a-z]{1,8}", post in "[a-z]{1,8}") {
2056 let input = format!("{pre}/{mid}/{post}");
2057 let (p, m) = split_provider_model_spec(&input).unwrap();
2058 assert_eq!(p, pre.as_str());
2059 assert_eq!(m, format!("{mid}/{post}"));
2060 }
2061
2062 #[test]
2063 fn split_spec_trims_whitespace(p in "[a-z]{1,6}", m in "[a-z]{1,6}") {
2064 let input = format!(" {p} / {m} ");
2065 let (prov, model) = split_provider_model_spec(&input).unwrap();
2066 assert_eq!(prov, p.as_str());
2067 assert_eq!(model, m.as_str());
2068 }
2069
2070 #[test]
2071 fn split_spec_rejects_empty_halves(valid in "[a-z]{1,8}") {
2072 assert!(split_provider_model_spec(&format!("{valid}/")).is_none());
2073 assert!(split_provider_model_spec(&format!("/{valid}")).is_none());
2074 }
2075
2076 #[test]
2077 fn split_spec_none_without_slash(s in "[a-z0-9]{1,12}") {
2078 assert!(split_provider_model_spec(&s).is_none());
2079 }
2080 }
2081
2082 proptest! {
2087 #[test]
2088 fn is_alias_latest_suffix(prefix in "[a-z]{1,10}") {
2089 assert!(is_alias(&format!("{prefix}-latest")));
2090 }
2091
2092 #[test]
2093 fn is_alias_eight_digits_not_alias(prefix in "[a-z]{1,8}", d in "[0-9]{8}") {
2094 let id = format!("{prefix}-{d}");
2095 assert!(!is_alias(&id), "{id} should not be alias (8-digit suffix)");
2096 }
2097
2098 #[test]
2099 fn is_alias_non_eight_digit_suffix(prefix in "[a-z]{1,6}", suffix in "[a-z0-9]{1,7}") {
2100 let id = format!("{prefix}-{suffix}");
2101 let is_pure_digits = suffix.chars().all(|c| c.is_ascii_digit());
2102 if is_pure_digits && (suffix.len() == 8 || suffix.len() == 4) {
2103 assert!(!is_alias(&id));
2104 } else {
2105 assert!(is_alias(&id));
2106 }
2107 }
2108
2109 #[test]
2110 fn is_alias_no_hyphen(id in "[a-z0-9]{1,12}") {
2111 if !id.contains('-') {
2112 assert!(is_alias(&id));
2113 }
2114 }
2115
2116 #[test]
2117 fn is_alias_non_ascii_no_panic(id in ".{1,20}") {
2118 let _ = is_alias(&id); }
2120 }
2121
2122 proptest! {
2127 #[test]
2128 fn models_equal_reflexive(provider in "[a-z]{1,6}", id in "[a-z0-9-]{1,10}") {
2129 let m = test_model_entry(&id, &provider, true);
2130 assert!(models_equal(&m, &m));
2131 }
2132
2133 #[test]
2134 fn models_equal_symmetric(provider in "[a-z]{1,6}", id in "[a-z0-9-]{1,10}") {
2135 let a = test_model_entry(&id, &provider, true);
2136 let b = test_model_entry(&id, &provider, false);
2137 assert_eq!(models_equal(&a, &b), models_equal(&b, &a));
2138 }
2139
2140 #[test]
2141 fn models_equal_different_providers(id in "[a-z]{1,8}", p1 in "[a-z]{1,5}", p2 in "[a-z]{1,5}") {
2142 if p1 != p2 {
2143 let a = test_model_entry(&id, &p1, true);
2144 let b = test_model_entry(&id, &p2, true);
2145 assert!(!models_equal(&a, &b));
2146 }
2147 }
2148
2149 #[test]
2150 fn models_equal_different_ids(id1 in "[a-z]{1,6}", id2 in "[a-z]{1,6}", prov in "[a-z]{1,5}") {
2151 if id1 != id2 {
2152 let a = test_model_entry(&id1, &prov, true);
2153 let b = test_model_entry(&id2, &prov, true);
2154 assert!(!models_equal(&a, &b));
2155 }
2156 }
2157 }
2158
2159 #[test]
2160 fn models_equal_normalizes_provider_aliases_and_model_case() {
2161 let left = test_model_entry("openai/gpt-4o-mini", "openrouter", true);
2162 let right = test_model_entry("OPENAI/GPT-4O-MINI", "open-router", false);
2163 assert!(models_equal(&left, &right));
2164 }
2165 }
2166}