Skip to main content

pi/
app.rs

1//! Helpers for `src/main.rs`.
2//!
3//! This module exists to make core CLI logic testable without invoking the full
4//! interactive agent loop.
5
6use std::collections::HashSet;
7use std::path::{Path, PathBuf};
8
9use anyhow::{Result, bail};
10use chrono::{Datelike, Local};
11use glob::Pattern;
12use thiserror::Error;
13
14use crate::auth::AuthStorage;
15use crate::cli;
16use crate::config::Config;
17use crate::model::{self, AssistantMessage, ContentBlock, ImageContent, TextContent};
18use crate::models::{ModelEntry, ModelRegistry, default_models_path};
19use crate::provider::{StreamOptions, ThinkingBudgets};
20use crate::provider_metadata::{canonical_provider_id, provider_metadata};
21use crate::session::Session;
22use crate::tools::process_file_arguments;
23
24#[derive(Debug, Clone)]
25pub struct InitialMessage {
26    pub text: String,
27    pub images: Vec<ImageContent>,
28}
29
30#[derive(Debug, Clone)]
31pub struct ScopedModel {
32    pub model: ModelEntry,
33    pub thinking_level: Option<model::ThinkingLevel>,
34}
35
36#[derive(Debug, Clone)]
37struct ParsedModelResult {
38    model: Option<ModelEntry>,
39    thinking_level: Option<model::ThinkingLevel>,
40    warning: Option<String>,
41}
42
43#[derive(Debug, Clone)]
44pub struct ModelSelection {
45    pub model_entry: ModelEntry,
46    pub thinking_level: model::ThinkingLevel,
47    pub scoped_models: Vec<ScopedModel>,
48    pub fallback_message: Option<String>,
49}
50
51#[derive(Debug, Error)]
52pub enum StartupError {
53    #[error("No models available. Set API keys in environment variables or create {models_path}")]
54    NoModelsAvailable { models_path: PathBuf },
55    #[error("No API key found for provider {provider}. Set env var or use --api-key.")]
56    MissingApiKey { provider: String },
57}
58
59#[derive(Debug, Clone)]
60struct ContextFile {
61    path: String,
62    content: String,
63}
64
65struct RestoreResult {
66    model: Option<ModelEntry>,
67    fallback_message: Option<String>,
68}
69
70pub fn apply_piped_stdin(cli: &mut cli::Cli, stdin_content: Option<String>) {
71    if let Some(stdin_content) = stdin_content {
72        // Match pi-mono's `.trim()` — strip all leading/trailing whitespace.
73        let stdin_content = stdin_content.trim();
74        if stdin_content.is_empty() {
75            return;
76        }
77        cli.print = true;
78        cli.args.insert(0, stdin_content.to_string());
79    }
80}
81
82#[allow(clippy::missing_const_for_fn)]
83pub fn normalize_cli(cli: &mut cli::Cli) {
84    if cli.print {
85        cli.no_session = true;
86    }
87
88    if let Some(provider) = &mut cli.provider {
89        *provider = provider.to_ascii_lowercase();
90    }
91}
92
93pub fn validate_rpc_args(cli: &cli::Cli) -> Result<()> {
94    if cli.mode.as_deref() == Some("rpc") && !cli.file_args().is_empty() {
95        bail!("Error: @file arguments are not supported in RPC mode");
96    }
97    Ok(())
98}
99
100pub fn prepare_initial_message(
101    cwd: &Path,
102    file_args: &[String],
103    messages: &mut Vec<String>,
104    auto_resize_images: bool,
105) -> Result<Option<InitialMessage>> {
106    if file_args.is_empty() {
107        return Ok(None);
108    }
109
110    let processed = process_file_arguments(file_args, cwd, auto_resize_images)?;
111    let mut initial_message = processed.text;
112    let has_message = !messages.is_empty();
113    if has_message {
114        initial_message.push_str(&messages.remove(0));
115    }
116
117    if initial_message.is_empty() && processed.images.is_empty() && !has_message {
118        return Ok(None);
119    }
120
121    Ok(Some(InitialMessage {
122        text: initial_message,
123        images: processed.images,
124    }))
125}
126
127pub fn build_initial_content(initial: &InitialMessage) -> Vec<ContentBlock> {
128    let mut content = Vec::new();
129    content.push(ContentBlock::Text(TextContent::new(initial.text.clone())));
130    for image in &initial.images {
131        content.push(ContentBlock::Image(image.clone()));
132    }
133    content
134}
135
136pub fn build_system_prompt(
137    cli: &cli::Cli,
138    cwd: &Path,
139    enabled_tools: &[&str],
140    skills_prompt: Option<&str>,
141    global_dir: &Path,
142    package_dir: &Path,
143    test_mode: bool,
144) -> String {
145    use std::fmt::Write as _;
146
147    let custom_prompt = resolve_prompt_input(cli.system_prompt.as_deref(), "system prompt");
148    let append_prompt =
149        resolve_prompt_input(cli.append_system_prompt.as_deref(), "append system prompt");
150    let context_files = load_project_context_files(cwd, global_dir);
151
152    let mut prompt =
153        custom_prompt.unwrap_or_else(|| default_system_prompt(enabled_tools, package_dir));
154
155    if let Some(append_prompt) = append_prompt {
156        prompt.push_str("\n\n");
157        prompt.push_str(&append_prompt);
158    }
159
160    if !context_files.is_empty() {
161        prompt.push_str("\n\n# Project Context\n\n");
162        prompt.push_str("Project-specific instructions and guidelines:\n\n");
163        for file in &context_files {
164            let _ = write!(prompt, "## {}\n\n{}\n\n", file.path, file.content);
165        }
166    }
167
168    if let Some(skills_prompt) = skills_prompt {
169        prompt.push_str(skills_prompt);
170    }
171
172    let date_time = if test_mode {
173        "<TIMESTAMP>".to_string()
174    } else {
175        format_current_datetime()
176    };
177    let _ = write!(prompt, "\nCurrent date and time: {date_time}");
178    let cwd_display = if test_mode {
179        "<CWD>".to_string()
180    } else {
181        cwd.display().to_string()
182    };
183    let _ = write!(prompt, "\nCurrent working directory: {cwd_display}");
184
185    prompt
186}
187
188fn resolve_prompt_input(input: Option<&str>, description: &str) -> Option<String> {
189    let value = input?;
190
191    let path = Path::new(value);
192    if path.exists() {
193        match std::fs::read_to_string(path) {
194            Ok(content) => Some(content),
195            Err(err) => {
196                eprintln!("Warning: Could not read {description} file {value}: {err}");
197                Some(value.to_string())
198            }
199        }
200    } else {
201        Some(value.to_string())
202    }
203}
204
205fn default_system_prompt(enabled_tools: &[&str], package_dir: &Path) -> String {
206    let tool_descriptions = [
207        ("read", "Read file contents"),
208        ("bash", "Execute bash commands (ls, grep, find, etc.)"),
209        (
210            "edit",
211            "Make surgical edits to files (find exact text and replace)",
212        ),
213        ("write", "Create or overwrite files"),
214        (
215            "grep",
216            "Search file contents for patterns (respects .gitignore)",
217        ),
218        ("find", "Find files by glob pattern (respects .gitignore)"),
219        ("ls", "List directory contents"),
220    ];
221
222    let mut tools = Vec::new();
223    for tool in enabled_tools {
224        if let Some((_, description)) = tool_descriptions.iter().find(|(name, _)| name == tool) {
225            tools.push(format!("- {tool}: {description}"));
226        }
227    }
228
229    let tools_list = if tools.is_empty() {
230        "(none)".to_string()
231    } else {
232        tools.join("\n")
233    };
234
235    let has_tool = |name: &str| enabled_tools.contains(&name);
236    let has_bash = has_tool("bash");
237    let has_edit = has_tool("edit");
238    let has_write = has_tool("write");
239    let has_grep = has_tool("grep");
240    let has_find = has_tool("find");
241    let has_ls = has_tool("ls");
242    let has_read = has_tool("read");
243
244    let mut guidelines_list = Vec::new();
245    if has_bash && !has_grep && !has_find && !has_ls {
246        guidelines_list.push("Use bash for file operations like ls, rg, find");
247    } else if has_bash && (has_grep || has_find || has_ls) {
248        guidelines_list.push(
249            "Prefer grep/find/ls tools over bash for file exploration (faster, respects .gitignore)",
250        );
251    }
252
253    if has_read && has_edit {
254        guidelines_list.push(
255            "Use read to examine files before editing. You must use this tool instead of cat or sed.",
256        );
257    }
258    if has_edit {
259        guidelines_list.push("Use edit for precise changes (old text must match exactly)");
260    }
261    if has_write {
262        guidelines_list.push("Use write only for new files or complete rewrites");
263    }
264    if has_edit || has_write {
265        guidelines_list.push(
266            "When summarizing your actions, output plain text directly - do NOT use cat or bash to display what you did",
267        );
268    }
269
270    guidelines_list.push("Be concise in your responses");
271    guidelines_list.push("Show file paths clearly when working with files");
272
273    let guidelines = guidelines_list
274        .iter()
275        .map(|g| format!("- {g}"))
276        .collect::<Vec<_>>()
277        .join("\n");
278
279    let readme_path = package_dir.join("README.md").display().to_string();
280    let docs_path = package_dir.join("docs").display().to_string();
281    let examples_path = package_dir.join("examples").display().to_string();
282
283    format!(
284        "You are an expert coding assistant operating inside pi, a coding agent harness. You help users by reading files, executing commands, editing code, and writing new files.\n\nAvailable tools:\n{tools_list}\n\nIn addition to the tools above, you may have access to other custom tools depending on the project.\n\nGuidelines:\n{guidelines}\n\nPi documentation (read only when the user asks about pi itself, its SDK, extensions, themes, skills, or TUI):\n- Main documentation: {readme_path}\n- Additional docs: {docs_path}\n- Examples: {examples_path} (extensions, custom tools, SDK)\n- When asked about: extensions (docs/extensions.md, examples/extensions/), themes (docs/themes.md), skills (docs/skills.md), prompt templates (docs/prompt-templates.md), TUI components (docs/tui.md), keybindings (docs/keybindings.md), SDK integrations (docs/sdk.md), custom providers (docs/custom-provider.md), adding models (docs/models.md), pi packages (docs/packages.md)\n- When working on pi topics, read the docs and examples, and follow .md cross-references before implementing\n- Always read pi .md files completely and follow links to related docs (e.g., tui.md for TUI API details)"
285    )
286}
287
288fn load_project_context_files(cwd: &Path, global_dir: &Path) -> Vec<ContextFile> {
289    let mut context_files = Vec::new();
290    let mut seen = HashSet::new();
291
292    if let Some(global) = load_context_file_from_dir(global_dir) {
293        seen.insert(global.path.clone());
294        context_files.push(global);
295    }
296
297    let mut ancestor_files = Vec::new();
298    let mut current = cwd.to_path_buf();
299
300    loop {
301        if let Some(context) = load_context_file_from_dir(&current) {
302            if seen.insert(context.path.clone()) {
303                ancestor_files.push(context);
304            }
305        }
306
307        if !current.pop() {
308            break;
309        }
310    }
311
312    ancestor_files.reverse();
313    context_files.extend(ancestor_files);
314    context_files
315}
316
317fn load_context_file_from_dir(dir: &Path) -> Option<ContextFile> {
318    let candidates = ["AGENTS.md", "CLAUDE.md"];
319    for filename in candidates {
320        let path = dir.join(filename);
321        if path.exists() {
322            match std::fs::read_to_string(&path) {
323                Ok(content) => {
324                    return Some(ContextFile {
325                        path: path.display().to_string(),
326                        content,
327                    });
328                }
329                Err(err) => {
330                    eprintln!("Warning: Could not read {}: {err}", path.display());
331                }
332            }
333        }
334    }
335    None
336}
337
338fn format_current_datetime() -> String {
339    let now = Local::now();
340    let date = format!(
341        "{}, {} {}, {}",
342        now.format("%A"),
343        now.format("%B"),
344        now.day(),
345        now.year()
346    );
347    let time = format!("{} {}", now.format("%I:%M:%S %p"), now.format("%Z"));
348    format!("{date}, {time}")
349}
350
351#[allow(clippy::too_many_lines)]
352pub fn select_model_and_thinking(
353    cli: &cli::Cli,
354    config: &Config,
355    session: &Session,
356    registry: &ModelRegistry,
357    scoped_models: &[ScopedModel],
358    global_dir: &Path,
359) -> Result<ModelSelection> {
360    let is_continuing = cli.r#continue || cli.resume || cli.session.is_some();
361    let mut selected_model: Option<ModelEntry> = None;
362    let mut scoped_thinking: Option<model::ThinkingLevel> = None;
363    let mut fallback_message = None;
364
365    if let (Some(provider), Some(model_id)) = (cli.provider.as_deref(), cli.model.as_deref()) {
366        let found = registry
367            .find(provider, model_id)
368            .or_else(|| crate::models::ad_hoc_model_entry(provider, model_id));
369        if found.is_none() {
370            bail!("Model {provider}/{model_id} not found");
371        }
372        selected_model = found;
373    } else if let Some(provider) = cli.provider.as_deref() {
374        let mut candidates: Vec<ModelEntry> = registry
375            .models()
376            .iter()
377            .filter(|m| provider_ids_match(&m.model.provider, provider))
378            .cloned()
379            .collect();
380        if candidates.is_empty() {
381            bail!("No models available for provider {provider}");
382        }
383        if let Some(found) = candidates.iter().find(|m| model_entry_is_ready(m)) {
384            selected_model = Some(found.clone());
385        } else {
386            selected_model = Some(candidates.remove(0));
387        }
388    } else if let Some(model_id) = cli.model.as_deref() {
389        if let Some((provider, scoped_model_id)) = split_provider_model_spec(model_id) {
390            selected_model = registry
391                .find(provider, scoped_model_id)
392                .or_else(|| crate::models::ad_hoc_model_entry(provider, scoped_model_id));
393        }
394
395        if selected_model.is_none() {
396            let matches: Vec<ModelEntry> = registry
397                .models()
398                .iter()
399                .filter(|m| m.model.id.eq_ignore_ascii_case(model_id))
400                .cloned()
401                .collect();
402            if matches.is_empty() {
403                bail!("Model {model_id} not found");
404            }
405            if let Some(default_provider) = config.default_provider.as_deref() {
406                if let Some(found) = matches
407                    .iter()
408                    .find(|m| provider_ids_match(&m.model.provider, default_provider))
409                {
410                    selected_model = Some(found.clone());
411                }
412            }
413            if selected_model.is_none() {
414                if let Some(found) = matches.iter().find(|m| model_entry_is_ready(m)) {
415                    selected_model = Some(found.clone());
416                }
417            }
418            if selected_model.is_none() {
419                selected_model = Some(matches[0].clone());
420            }
421        }
422    } else if !scoped_models.is_empty() && !is_continuing {
423        if let (Some(default_provider), Some(default_model)) = (
424            config.default_provider.as_deref(),
425            config.default_model.as_deref(),
426        ) {
427            if let Some(found) = scoped_models.iter().find(|sm| {
428                provider_ids_match(&sm.model.model.provider, default_provider)
429                    && sm.model.model.id.eq_ignore_ascii_case(default_model)
430            }) {
431                selected_model = Some(found.model.clone());
432                if cli.thinking.is_none() {
433                    scoped_thinking = found.thinking_level;
434                }
435            }
436        }
437        if selected_model.is_none() {
438            let first = &scoped_models[0];
439            selected_model = Some(first.model.clone());
440            if cli.thinking.is_none() {
441                scoped_thinking = first.thinking_level;
442            }
443        }
444    }
445
446    if selected_model.is_none() {
447        if let Some((provider, model_id)) = last_model_from_session(session) {
448            let restore = restore_model_from_session(&provider, &model_id, None, registry);
449            selected_model = restore.model;
450            fallback_message = restore.fallback_message;
451        }
452    }
453
454    if selected_model.is_none() {
455        if let (Some(default_provider), Some(default_model)) = (
456            config.default_provider.as_deref(),
457            config.default_model.as_deref(),
458        ) {
459            if let Some(found) = registry.find(default_provider, default_model) {
460                selected_model = Some(found);
461            }
462        }
463    }
464
465    if selected_model.is_none() {
466        let available = registry.get_available();
467        if !available.is_empty() {
468            selected_model = Some(default_model_from_available(&available));
469        }
470    }
471
472    // If we restored or defaulted into a model that requires credentials but has
473    // none configured, prefer falling back to any ready model instead of forcing
474    // an immediate setup prompt. (Explicit CLI selection should still error.)
475    let explicit_model_selection = cli.provider.is_some() || cli.model.is_some();
476    let missing_creds = if explicit_model_selection {
477        None
478    } else {
479        selected_model.as_ref().and_then(|entry| {
480            if model_entry_is_ready(entry) {
481                None
482            } else {
483                Some((entry.model.provider.clone(), entry.model.id.clone()))
484            }
485        })
486    };
487    if let Some((missing_provider, missing_model_id)) = missing_creds {
488        let available = registry.get_available();
489        if !available.is_empty() {
490            let fallback = default_model_from_available(&available);
491            fallback_message = Some(format!(
492                "Missing credentials for {missing_provider}/{missing_model_id}. Using {}/{} based on detected keys.",
493                fallback.model.provider, fallback.model.id
494            ));
495            selected_model = Some(fallback);
496        } else if !registry.models().is_empty() {
497            // No detected keys anywhere, but we still want to pick a stable default
498            // so startup can guide the user through the correct login flow.
499            let fallback = default_model_from_catalog(registry.models());
500            fallback_message = Some(format!(
501                "Missing credentials for {missing_provider}/{missing_model_id}. Defaulting to {}/{} for setup.",
502                fallback.model.provider, fallback.model.id
503            ));
504            selected_model = Some(fallback);
505        }
506    }
507
508    // If nothing was selected yet, default to our preferred catalog entry even
509    // when no credentials are configured. This keeps first-run UX consistent
510    // and avoids the misleading "No models configured" path when built-ins exist.
511    if selected_model.is_none() && !registry.models().is_empty() {
512        selected_model = Some(default_model_from_catalog(registry.models()));
513    }
514
515    let Some(model_entry) = selected_model else {
516        let models_path = default_models_path(global_dir);
517        return Err(StartupError::NoModelsAvailable { models_path }.into());
518    };
519
520    let mut thinking_level: Option<model::ThinkingLevel> = None;
521
522    if let Some(cli_thinking) = cli.thinking.as_deref() {
523        thinking_level = Some(parse_thinking_level(cli_thinking)?);
524    } else if scoped_thinking.is_some() {
525        thinking_level = scoped_thinking;
526    } else if is_continuing {
527        if let Some(saved) = last_thinking_level(session) {
528            thinking_level = Some(saved);
529        }
530    }
531
532    if thinking_level.is_none() {
533        thinking_level = config
534            .default_thinking_level
535            .as_deref()
536            .and_then(parse_thinking_level_opt);
537    }
538
539    let thinking_level =
540        model_entry.clamp_thinking_level(thinking_level.unwrap_or(model::ThinkingLevel::XHigh));
541
542    Ok(ModelSelection {
543        model_entry,
544        thinking_level,
545        scoped_models: scoped_models.to_vec(),
546        fallback_message,
547    })
548}
549
550fn parse_thinking_level(value: &str) -> Result<model::ThinkingLevel> {
551    value
552        .parse()
553        .map_err(|err| anyhow::anyhow!("Invalid thinking level \"{value}\": {err}"))
554}
555
556fn parse_thinking_level_opt(value: &str) -> Option<model::ThinkingLevel> {
557    value.parse().ok()
558}
559
560fn last_model_from_session(session: &Session) -> Option<(String, String)> {
561    for entry in session.entries.iter().rev() {
562        if let crate::session::SessionEntry::ModelChange(change) = entry {
563            return Some((change.provider.clone(), change.model_id.clone()));
564        }
565    }
566    None
567}
568
569fn last_thinking_level(session: &Session) -> Option<model::ThinkingLevel> {
570    for entry in session.entries.iter().rev() {
571        if let crate::session::SessionEntry::ThinkingLevelChange(change) = entry {
572            if let Some(level) = parse_thinking_level_opt(&change.thinking_level) {
573                return Some(level);
574            }
575        }
576    }
577    None
578}
579
580pub fn update_session_for_selection(session: &mut Session, selection: &ModelSelection) {
581    session.set_model_header(
582        Some(selection.model_entry.model.provider.clone()),
583        Some(selection.model_entry.model.id.clone()),
584        Some(selection.thinking_level.to_string()),
585    );
586
587    let model_changed = match last_model_from_session(session) {
588        Some((provider, model_id)) => {
589            provider != selection.model_entry.model.provider
590                || model_id != selection.model_entry.model.id
591        }
592        None => true,
593    };
594
595    if model_changed {
596        session.append_model_change(
597            selection.model_entry.model.provider.clone(),
598            selection.model_entry.model.id.clone(),
599        );
600    }
601
602    let thinking_changed = last_thinking_level(session) != Some(selection.thinking_level);
603
604    if thinking_changed {
605        session.append_thinking_level_change(selection.thinking_level.to_string());
606    }
607}
608
609fn restore_model_from_session(
610    saved_provider: &str,
611    saved_model_id: &str,
612    current_model: Option<ModelEntry>,
613    registry: &ModelRegistry,
614) -> RestoreResult {
615    let restored = registry
616        .find(saved_provider, saved_model_id)
617        .or_else(|| crate::models::ad_hoc_model_entry(saved_provider, saved_model_id));
618
619    if restored.is_some() {
620        return RestoreResult {
621            model: restored,
622            fallback_message: None,
623        };
624    }
625
626    let reason = "model no longer exists";
627
628    if let Some(current) = current_model {
629        return RestoreResult {
630            model: Some(current.clone()),
631            fallback_message: Some(format!(
632                "Could not restore model {saved_provider}/{saved_model_id} ({reason}). Using {}/{}.",
633                current.model.provider, current.model.id
634            )),
635        };
636    }
637
638    let available = registry.get_available();
639    if !available.is_empty() {
640        let fallback = default_model_from_available(&available);
641        return RestoreResult {
642            model: Some(fallback.clone()),
643            fallback_message: Some(format!(
644                "Could not restore model {saved_provider}/{saved_model_id} ({reason}). Using {}/{}.",
645                fallback.model.provider, fallback.model.id
646            )),
647        };
648    }
649
650    RestoreResult {
651        model: None,
652        fallback_message: None,
653    }
654}
655
656fn default_model_from_available(available: &[ModelEntry]) -> ModelEntry {
657    default_model_from_candidates(available)
658}
659
660fn default_model_from_catalog(models: &[ModelEntry]) -> ModelEntry {
661    default_model_from_candidates(models)
662}
663
664fn default_model_from_candidates(candidates: &[ModelEntry]) -> ModelEntry {
665    let defaults = [
666        // Prefer Codex (ChatGPT OAuth) when available.
667        ("openai-codex", "gpt-5.3-codex"),
668        ("openai-codex", "gpt-5.2-codex"),
669        ("openai-codex", "gpt-5.1-codex-max"),
670        // Fall back to OpenAI API when configured.
671        ("openai", "gpt-5.3-codex"),
672        ("openai", "gpt-5.2-codex"),
673        ("openai", "gpt-5.1-codex"),
674        ("amazon-bedrock", "us.anthropic.claude-opus-4-20250514-v1:0"),
675        ("anthropic", "claude-opus-4-5"),
676        ("azure-openai-responses", "gpt-5.2"),
677        ("google", "gemini-2.5-pro"),
678        ("google-gemini-cli", "gemini-2.5-pro"),
679        ("google-antigravity", "gemini-3-pro-high"),
680        ("google-vertex", "gemini-3-pro-preview"),
681        ("github-copilot", "gpt-4o"),
682        ("openrouter", "openai/gpt-5.1-codex"),
683        ("vercel-ai-gateway", "anthropic/claude-opus-4.5"),
684        ("xai", "grok-4-fast-non-reasoning"),
685        ("groq", "openai/gpt-oss-120b"),
686        ("cerebras", "zai-glm-4.6"),
687        ("zai", "glm-4.6"),
688        ("mistral", "devstral-medium-latest"),
689        ("minimax", "MiniMax-M2.1"),
690        ("minimax-cn", "MiniMax-M2.1"),
691        ("huggingface", "moonshotai/Kimi-K2.5"),
692        ("opencode", "claude-opus-4-5"),
693        ("kimi-coding", "kimi-k2-thinking"),
694    ];
695
696    let canonical = |provider: &str| {
697        canonical_provider_id(provider)
698            .unwrap_or(provider)
699            .to_ascii_lowercase()
700    };
701
702    for (provider, model_id) in defaults {
703        if let Some(found) = candidates.iter().find(|m| {
704            canonical(&m.model.provider) == canonical(provider)
705                && m.model.id.eq_ignore_ascii_case(model_id)
706        }) {
707            return found.clone();
708        }
709    }
710
711    candidates[0].clone()
712}
713
714fn normalize_api_key_opt(api_key: Option<String>) -> Option<String> {
715    api_key.and_then(|key| {
716        let trimmed = key.trim();
717        (!trimmed.is_empty()).then(|| trimmed.to_string())
718    })
719}
720
721fn model_requires_configured_credential(entry: &ModelEntry) -> bool {
722    let provider = entry.model.provider.as_str();
723    entry.auth_header
724        || provider_metadata(provider).is_some_and(|meta| !meta.auth_env_keys.is_empty())
725        || entry.oauth_config.is_some()
726}
727
728fn model_entry_is_ready(entry: &ModelEntry) -> bool {
729    !model_requires_configured_credential(entry)
730        || entry
731            .api_key
732            .as_ref()
733            .is_some_and(|value| !value.trim().is_empty())
734}
735
736pub fn resolve_api_key(
737    auth: &AuthStorage,
738    cli: &cli::Cli,
739    entry: &ModelEntry,
740) -> Result<Option<String>> {
741    let key = normalize_api_key_opt(cli.api_key.clone())
742        .or_else(|| normalize_api_key_opt(auth.resolve_api_key(&entry.model.provider, None)))
743        .or_else(|| normalize_api_key_opt(entry.api_key.clone()));
744
745    if model_requires_configured_credential(entry) && key.is_none() {
746        return Err(StartupError::MissingApiKey {
747            provider: entry.model.provider.clone(),
748        }
749        .into());
750    }
751
752    Ok(key)
753}
754
755pub fn build_stream_options(
756    config: &Config,
757    api_key: Option<String>,
758    selection: &ModelSelection,
759    session: &Session,
760) -> StreamOptions {
761    let mut options = StreamOptions {
762        api_key,
763        headers: selection.model_entry.headers.clone(),
764        session_id: Some(session.header.id.clone()),
765        ..Default::default()
766    };
767
768    options.thinking_level = Some(selection.thinking_level);
769
770    if let Some(budgets) = &config.thinking_budgets {
771        let defaults = ThinkingBudgets::default();
772        options.thinking_budgets = Some(ThinkingBudgets {
773            minimal: budgets.minimal.unwrap_or(defaults.minimal),
774            low: budgets.low.unwrap_or(defaults.low),
775            medium: budgets.medium.unwrap_or(defaults.medium),
776            high: budgets.high.unwrap_or(defaults.high),
777            xhigh: budgets.xhigh.unwrap_or(defaults.xhigh),
778        });
779    }
780
781    options
782}
783
784// === Model scoping helpers (used by main + tests) ===
785
786pub fn parse_models_arg(models: &str) -> Vec<String> {
787    models
788        .split(',')
789        .map(str::trim)
790        .filter(|s| !s.is_empty())
791        .map(ToString::to_string)
792        .collect()
793}
794
795pub fn resolve_model_scope(
796    patterns: &[String],
797    registry: &ModelRegistry,
798    allow_missing_keys: bool,
799) -> Vec<ScopedModel> {
800    let available_models = if allow_missing_keys {
801        registry.models().to_vec()
802    } else {
803        registry.get_available()
804    };
805
806    let mut scoped_models: Vec<ScopedModel> = Vec::new();
807
808    for pattern in patterns {
809        if pattern.contains('*') || pattern.contains('?') || pattern.contains('[') {
810            let mut glob_pattern = pattern.as_str();
811            let mut thinking_level = None;
812            if let Some((prefix, suffix)) = pattern.rsplit_once(':') {
813                if let Some(parsed) = parse_thinking_level_opt(suffix) {
814                    thinking_level = Some(parsed);
815                    glob_pattern = prefix;
816                }
817            }
818
819            let glob = match Pattern::new(&glob_pattern.to_lowercase()) {
820                Ok(glob) => glob,
821                Err(err) => {
822                    eprintln!("Warning: Invalid model pattern \"{pattern}\": {err}");
823                    continue;
824                }
825            };
826
827            let mut matched_any = false;
828            for model in &available_models {
829                let full_id = format!("{}/{}", model.model.provider, model.model.id);
830                let candidate_full = full_id.to_lowercase();
831                let candidate_id = model.model.id.to_lowercase();
832                if glob.matches(&candidate_full) || glob.matches(&candidate_id) {
833                    matched_any = true;
834                    if !scoped_models
835                        .iter()
836                        .any(|sm| models_equal(&sm.model, model))
837                    {
838                        scoped_models.push(ScopedModel {
839                            model: model.clone(),
840                            thinking_level,
841                        });
842                    }
843                }
844            }
845
846            if !matched_any {
847                eprintln!("Warning: No models match pattern \"{pattern}\"");
848            }
849            continue;
850        }
851
852        let parsed = parse_model_pattern(pattern, &available_models);
853        if let Some(warning) = parsed.warning {
854            eprintln!("Warning: {warning}");
855        }
856
857        if let Some(model) = parsed.model {
858            if !scoped_models
859                .iter()
860                .any(|sm| models_equal(&sm.model, &model))
861            {
862                scoped_models.push(ScopedModel {
863                    model,
864                    thinking_level: parsed.thinking_level,
865                });
866            }
867        } else {
868            eprintln!("Warning: No models match pattern \"{pattern}\"");
869        }
870    }
871
872    scoped_models
873}
874
875fn parse_model_pattern(pattern: &str, available_models: &[ModelEntry]) -> ParsedModelResult {
876    // Try stripping a valid thinking-level suffix FIRST. This prevents
877    // `provider/model:high` from being swallowed by `ad_hoc_model_entry`
878    // which would create a model with id `model:high` instead of `model`.
879    if let Some((prefix, suffix)) = pattern.rsplit_once(':') {
880        if let Some(thinking_level) = parse_thinking_level_opt(suffix) {
881            let result = parse_model_pattern(prefix, available_models);
882            if result.model.is_some() {
883                return ParsedModelResult {
884                    model: result.model,
885                    thinking_level: if result.warning.is_some() {
886                        None
887                    } else {
888                        Some(thinking_level)
889                    },
890                    warning: result.warning,
891                };
892            }
893        }
894    }
895
896    if let Some(model) = try_match_model(pattern, available_models) {
897        return ParsedModelResult {
898            model: Some(model),
899            thinking_level: None,
900            warning: None,
901        };
902    }
903
904    let Some((prefix, suffix)) = pattern.rsplit_once(':') else {
905        return ParsedModelResult {
906            model: None,
907            thinking_level: None,
908            warning: None,
909        };
910    };
911
912    // Invalid thinking level suffix — still match the model but warn
913    let result = parse_model_pattern(prefix, available_models);
914    if result.model.is_some() {
915        return ParsedModelResult {
916            model: result.model,
917            thinking_level: None,
918            warning: Some(format!(
919                "Invalid thinking level \"{suffix}\" in pattern \"{pattern}\". Using default instead."
920            )),
921        };
922    }
923
924    result
925}
926
927fn split_provider_model_spec(model_spec: &str) -> Option<(&str, &str)> {
928    let (provider, model_id) = model_spec.split_once('/')?;
929    let provider = provider.trim();
930    let model_id = model_id.trim();
931    if provider.is_empty() || model_id.is_empty() {
932        return None;
933    }
934    Some((provider, model_id))
935}
936
937fn provider_ids_match(left: &str, right: &str) -> bool {
938    let left = left.trim();
939    let right = right.trim();
940    if left.eq_ignore_ascii_case(right) {
941        return true;
942    }
943
944    let left_canonical = canonical_provider_id(left).unwrap_or(left);
945    let right_canonical = canonical_provider_id(right).unwrap_or(right);
946
947    left_canonical.eq_ignore_ascii_case(right)
948        || right_canonical.eq_ignore_ascii_case(left)
949        || left_canonical.eq_ignore_ascii_case(right_canonical)
950}
951
952fn try_match_model(pattern: &str, available_models: &[ModelEntry]) -> Option<ModelEntry> {
953    if let Some((provider, model_id)) = split_provider_model_spec(pattern) {
954        if let Some(found) = available_models.iter().find(|m| {
955            provider_ids_match(&m.model.provider, provider)
956                && m.model.id.eq_ignore_ascii_case(model_id)
957        }) {
958            return Some(found.clone());
959        }
960
961        if let Some(ad_hoc) = crate::models::ad_hoc_model_entry(provider, model_id) {
962            return Some(ad_hoc);
963        }
964    }
965
966    if let Some(found) = available_models
967        .iter()
968        .find(|m| m.model.id.eq_ignore_ascii_case(pattern))
969    {
970        return Some(found.clone());
971    }
972
973    let pattern_lower = pattern.to_lowercase();
974    let matches: Vec<ModelEntry> = available_models
975        .iter()
976        .filter(|m| {
977            m.model.id.to_lowercase().contains(&pattern_lower)
978                || m.model.name.to_lowercase().contains(&pattern_lower)
979        })
980        .cloned()
981        .collect();
982
983    if matches.is_empty() {
984        return None;
985    }
986
987    let mut aliases: Vec<ModelEntry> = matches
988        .iter()
989        .filter(|m| is_alias(&m.model.id))
990        .cloned()
991        .collect();
992    let mut dated: Vec<ModelEntry> = matches
993        .iter()
994        .filter(|m| !is_alias(&m.model.id))
995        .cloned()
996        .collect();
997
998    if !aliases.is_empty() {
999        aliases.sort_by(|a, b| b.model.id.cmp(&a.model.id));
1000        return aliases.first().cloned();
1001    }
1002
1003    dated.sort_by(|a, b| b.model.id.cmp(&a.model.id));
1004    dated.first().cloned()
1005}
1006
1007fn is_alias(model_id: &str) -> bool {
1008    if model_id.ends_with("-latest") {
1009        return true;
1010    }
1011
1012    let Some((_, date_suffix)) = model_id.rsplit_once('-') else {
1013        return true;
1014    };
1015
1016    date_suffix.len() != 8 || !date_suffix.chars().all(|c| c.is_ascii_digit())
1017}
1018
1019fn models_equal(left: &ModelEntry, right: &ModelEntry) -> bool {
1020    provider_ids_match(&left.model.provider, &right.model.provider)
1021        && left.model.id.eq_ignore_ascii_case(&right.model.id)
1022}
1023
1024pub fn output_final_text(message: &AssistantMessage) {
1025    for block in &message.content {
1026        if let ContentBlock::Text(text) = block {
1027            println!("{}", text.text);
1028        }
1029    }
1030}
1031
1032pub fn render_session_html(session: &Session) -> String {
1033    session.to_html()
1034}
1035
1036#[cfg(test)]
1037mod tests {
1038    use std::collections::HashMap;
1039
1040    use clap::Parser;
1041    use tempfile::tempdir;
1042
1043    use super::*;
1044    use crate::auth::AuthStorage;
1045    use crate::provider::{InputType, Model, ModelCost};
1046
1047    fn test_model_entry(id: &str, provider: &str, reasoning: bool) -> ModelEntry {
1048        ModelEntry {
1049            model: Model {
1050                id: id.to_string(),
1051                name: id.to_string(),
1052                api: "openai-responses".to_string(),
1053                provider: provider.to_string(),
1054                base_url: "https://example.test/v1".to_string(),
1055                reasoning,
1056                input: vec![InputType::Text],
1057                cost: ModelCost {
1058                    input: 0.0,
1059                    output: 0.0,
1060                    cache_read: 0.0,
1061                    cache_write: 0.0,
1062                },
1063                context_window: 128_000,
1064                max_tokens: 8_192,
1065                headers: HashMap::new(),
1066            },
1067            api_key: Some("test-key".to_string()),
1068            headers: HashMap::new(),
1069            auth_header: true,
1070            compat: None,
1071            oauth_config: None,
1072        }
1073    }
1074
1075    fn registry_with_entries(entries: Vec<ModelEntry>) -> ModelRegistry {
1076        let dir = tempdir().expect("tempdir");
1077        let auth = AuthStorage::load(dir.path().join("auth.json")).expect("load auth");
1078        let mut registry = ModelRegistry::load(&auth, None);
1079        registry.merge_entries(entries);
1080        registry
1081    }
1082
1083    #[test]
1084    fn parse_models_arg_splits_and_trims() {
1085        assert_eq!(
1086            parse_models_arg("gpt-4*, claude* ,,"),
1087            vec!["gpt-4*".to_string(), "claude*".to_string()]
1088        );
1089    }
1090
1091    #[test]
1092    fn default_model_from_available_prefers_azure_legacy_default() {
1093        let available = vec![
1094            test_model_entry("gpt-4o-mini", "azure-openai-responses", true),
1095            test_model_entry("gpt-5.2", "azure-openai-responses", true),
1096        ];
1097
1098        let selected = default_model_from_available(&available);
1099        assert_eq!(selected.model.provider, "azure-openai-responses");
1100        assert_eq!(selected.model.id, "gpt-5.2");
1101    }
1102
1103    #[test]
1104    fn default_model_from_available_applies_vercel_gateway_alias_mapping() {
1105        let available = vec![
1106            test_model_entry("gpt-4o-mini", "vercel", true),
1107            test_model_entry("anthropic/claude-opus-4.5", "vercel", true),
1108        ];
1109
1110        let selected = default_model_from_available(&available);
1111        assert_eq!(selected.model.provider, "vercel");
1112        assert_eq!(selected.model.id, "anthropic/claude-opus-4.5");
1113    }
1114
1115    #[test]
1116    fn resolve_api_key_allows_keyless_model_when_credentials_not_required() {
1117        let dir = tempdir().expect("tempdir");
1118        let auth = AuthStorage::load(dir.path().join("auth.json")).expect("load auth");
1119        let mut entry = test_model_entry("llama3.2", "ollama", false);
1120        entry.api_key = None;
1121        entry.auth_header = false;
1122
1123        let cli = cli::Cli::parse_from(["pi"]);
1124        let resolved = resolve_api_key(&auth, &cli, &entry).expect("resolve keyless model");
1125        assert!(resolved.is_none());
1126    }
1127
1128    #[test]
1129    fn resolve_api_key_still_requires_credentials_for_remote_provider() {
1130        let dir = tempdir().expect("tempdir");
1131        let auth = AuthStorage::load(dir.path().join("auth.json")).expect("load auth");
1132        let mut entry = test_model_entry("gpt-4o-mini", "openai", true);
1133        entry.api_key = None;
1134        entry.auth_header = true;
1135
1136        let cli = cli::Cli::parse_from(["pi"]);
1137        let err = resolve_api_key(&auth, &cli, &entry).unwrap_err();
1138        let startup = err
1139            .downcast_ref::<StartupError>()
1140            .expect("missing key should map to startup error");
1141        match startup {
1142            StartupError::MissingApiKey { provider } => {
1143                assert_eq!(provider, "openai");
1144            }
1145            StartupError::NoModelsAvailable { .. } => {
1146                panic!("unexpected startup error: {startup:?}");
1147            }
1148        }
1149    }
1150
1151    #[test]
1152    fn default_model_from_available_applies_kimi_coding_alias_mapping() {
1153        let available = vec![
1154            test_model_entry("kimi-k2-instruct", "kimi-for-coding", true),
1155            test_model_entry("kimi-k2-thinking", "kimi-for-coding", true),
1156        ];
1157
1158        let selected = default_model_from_available(&available);
1159        assert_eq!(selected.model.provider, "kimi-for-coding");
1160        assert_eq!(selected.model.id, "kimi-k2-thinking");
1161    }
1162
1163    #[test]
1164    fn default_model_from_available_matches_default_id_case_insensitively() {
1165        let available = vec![test_model_entry("GPT-5.2-CODEX", "openai-codex", true)];
1166        let selected = default_model_from_available(&available);
1167        assert_eq!(selected.model.provider, "openai-codex");
1168        assert_eq!(selected.model.id, "GPT-5.2-CODEX");
1169    }
1170
1171    #[test]
1172    fn apply_piped_stdin_trims_newlines_and_prepends_message() {
1173        let mut cli = cli::Cli::parse_from(["pi", "existing-message"]);
1174        apply_piped_stdin(&mut cli, Some("from-stdin\n".to_string()));
1175
1176        assert!(cli.print);
1177        assert_eq!(
1178            cli.args,
1179            vec!["from-stdin".to_string(), "existing-message".to_string()]
1180        );
1181    }
1182
1183    #[test]
1184    fn apply_piped_stdin_ignores_empty_input() {
1185        let mut cli = cli::Cli::parse_from(["pi", "existing-message"]);
1186        apply_piped_stdin(&mut cli, Some("\n".to_string()));
1187
1188        assert!(!cli.print);
1189        assert_eq!(cli.args, vec!["existing-message".to_string()]);
1190    }
1191
1192    #[test]
1193    fn normalize_cli_enables_no_session_for_print_and_lowercases_provider() {
1194        let mut cli = cli::Cli::parse_from(["pi", "--provider", "OpenAI", "--print", "hello"]);
1195        assert!(!cli.no_session);
1196        assert_eq!(cli.provider.as_deref(), Some("OpenAI"));
1197
1198        normalize_cli(&mut cli);
1199
1200        assert!(cli.no_session);
1201        assert_eq!(cli.provider.as_deref(), Some("openai"));
1202    }
1203
1204    #[test]
1205    fn validate_rpc_args_rejects_file_arguments() {
1206        let cli = cli::Cli::parse_from(["pi", "--mode", "rpc", "@src/main.rs", "hello"]);
1207
1208        let err = validate_rpc_args(&cli).expect_err("rpc mode should reject @file args");
1209        assert!(
1210            err.to_string()
1211                .contains("@file arguments are not supported in RPC mode")
1212        );
1213    }
1214
1215    #[test]
1216    fn validate_rpc_args_allows_non_rpc_file_arguments() {
1217        let cli = cli::Cli::parse_from(["pi", "--mode", "json", "@src/main.rs", "hello"]);
1218        assert!(validate_rpc_args(&cli).is_ok());
1219    }
1220
1221    #[test]
1222    fn parse_model_pattern_prefers_alias_when_alias_and_dated_match() {
1223        let available = vec![
1224            test_model_entry("gpt-5.1-codex-20250101", "openai", true),
1225            test_model_entry("gpt-5.1-codex-latest", "openai", true),
1226        ];
1227
1228        let parsed = parse_model_pattern("gpt-5.1-codex", &available);
1229        let model = parsed.model.expect("model should match");
1230
1231        assert_eq!(model.model.id, "gpt-5.1-codex-latest");
1232        assert!(parsed.thinking_level.is_none());
1233        assert!(parsed.warning.is_none());
1234    }
1235
1236    #[test]
1237    fn try_match_model_prefers_existing_entry_for_provider_alias() {
1238        let mut openrouter = test_model_entry("openai/gpt-4o-mini", "openrouter", true);
1239        openrouter
1240            .headers
1241            .insert("x-test".to_string(), "1".to_string());
1242
1243        let matched = try_match_model("open-router/openai/gpt-4o-mini", &[openrouter.clone()])
1244            .expect("provider alias should match existing entry");
1245
1246        assert_eq!(matched.model.provider, "openrouter");
1247        assert_eq!(matched.model.id, "openai/gpt-4o-mini");
1248        assert_eq!(
1249            matched.headers.get("x-test").map(String::as_str),
1250            Some("1"),
1251            "must preserve existing model metadata instead of falling back to ad-hoc"
1252        );
1253    }
1254
1255    #[test]
1256    fn select_model_and_thinking_provider_only_accepts_provider_alias() {
1257        let cli = cli::Cli::parse_from(["pi", "--provider", "open-router"]);
1258        let config = Config::default();
1259        let session = Session::in_memory();
1260        let registry = registry_with_entries(vec![test_model_entry(
1261            "openai/gpt-4o-mini",
1262            "openrouter",
1263            true,
1264        )]);
1265
1266        let selection =
1267            select_model_and_thinking(&cli, &config, &session, &registry, &[], Path::new("/tmp"))
1268                .expect("provider alias should resolve");
1269
1270        assert!(provider_ids_match(
1271            &selection.model_entry.model.provider,
1272            "open-router"
1273        ));
1274        assert!(!selection.model_entry.model.id.is_empty());
1275    }
1276
1277    #[test]
1278    fn select_model_and_thinking_provider_only_prefers_ready_model() {
1279        let cli = cli::Cli::parse_from(["pi", "--provider", "acme"]);
1280        let config = Config::default();
1281        let session = Session::in_memory();
1282
1283        let mut unready_remote = test_model_entry("cloud-model", "acme", true);
1284        unready_remote.api_key = None;
1285        unready_remote.auth_header = true;
1286
1287        let mut keyless_ready = test_model_entry("local-model", "acme", false);
1288        keyless_ready.api_key = None;
1289        keyless_ready.auth_header = false;
1290
1291        let registry = registry_with_entries(vec![unready_remote, keyless_ready]);
1292        let selection =
1293            select_model_and_thinking(&cli, &config, &session, &registry, &[], Path::new("/tmp"))
1294                .expect("provider selection should prefer ready models");
1295
1296        assert_eq!(selection.model_entry.model.provider, "acme");
1297        assert_eq!(selection.model_entry.model.id, "local-model");
1298    }
1299
1300    #[test]
1301    fn select_model_and_thinking_model_only_prefers_default_provider_alias() {
1302        let model_id = "__test-openrouter-alias-model__";
1303        let cli = cli::Cli::parse_from(["pi", "--model", model_id]);
1304        let config = Config {
1305            default_provider: Some("open-router".to_string()),
1306            ..Config::default()
1307        };
1308        let session = Session::in_memory();
1309        let registry = registry_with_entries(vec![
1310            test_model_entry(model_id, "openai", true),
1311            test_model_entry(model_id, "openrouter", true),
1312        ]);
1313
1314        let selection =
1315            select_model_and_thinking(&cli, &config, &session, &registry, &[], Path::new("/tmp"))
1316                .expect("default provider alias should resolve in model-only selection");
1317
1318        assert_eq!(selection.model_entry.model.provider, "openrouter");
1319        assert_eq!(selection.model_entry.model.id, model_id);
1320    }
1321
1322    #[test]
1323    fn select_model_and_thinking_model_only_matches_case_insensitively() {
1324        let model_id = "__test-case-insensitive-model__";
1325        let cli = cli::Cli::parse_from(["pi", "--model", "__TEST-CASE-INSENSITIVE-MODEL__"]);
1326        let config = Config::default();
1327        let session = Session::in_memory();
1328        let registry = registry_with_entries(vec![test_model_entry(model_id, "openai", true)]);
1329
1330        let selection =
1331            select_model_and_thinking(&cli, &config, &session, &registry, &[], Path::new("/tmp"))
1332                .expect("model-only selection should be case-insensitive");
1333
1334        assert_eq!(selection.model_entry.model.provider, "openai");
1335        assert_eq!(selection.model_entry.model.id, model_id);
1336    }
1337
1338    #[test]
1339    fn select_model_and_thinking_scoped_models_prefers_default_provider_alias() {
1340        let cli = cli::Cli::parse_from(["pi"]);
1341        let config = Config {
1342            default_provider: Some("open-router".to_string()),
1343            default_model: Some("gpt-4o-mini".to_string()),
1344            ..Config::default()
1345        };
1346        let session = Session::in_memory();
1347        let registry = registry_with_entries(Vec::new());
1348        let scoped_models = vec![
1349            ScopedModel {
1350                model: test_model_entry("gpt-4o-mini", "openai", true),
1351                thinking_level: None,
1352            },
1353            ScopedModel {
1354                model: test_model_entry("gpt-4o-mini", "openrouter", true),
1355                thinking_level: Some(model::ThinkingLevel::High),
1356            },
1357        ];
1358
1359        let selection = select_model_and_thinking(
1360            &cli,
1361            &config,
1362            &session,
1363            &registry,
1364            &scoped_models,
1365            Path::new("/tmp"),
1366        )
1367        .expect("scoped models should honor default provider alias");
1368
1369        assert_eq!(selection.model_entry.model.provider, "openrouter");
1370        assert_eq!(selection.model_entry.model.id, "gpt-4o-mini");
1371        assert_eq!(selection.thinking_level, model::ThinkingLevel::High);
1372    }
1373
1374    #[test]
1375    fn select_model_and_thinking_scoped_models_matches_default_model_case_insensitively() {
1376        let cli = cli::Cli::parse_from(["pi"]);
1377        let config = Config {
1378            default_provider: Some("open-router".to_string()),
1379            default_model: Some("GPT-4O-MINI".to_string()),
1380            ..Config::default()
1381        };
1382        let session = Session::in_memory();
1383        let registry = registry_with_entries(Vec::new());
1384        let scoped_models = vec![
1385            ScopedModel {
1386                model: test_model_entry("gpt-4o-mini", "openrouter", true),
1387                thinking_level: Some(model::ThinkingLevel::Low),
1388            },
1389            ScopedModel {
1390                model: test_model_entry("gpt-4o", "openrouter", true),
1391                thinking_level: Some(model::ThinkingLevel::High),
1392            },
1393        ];
1394
1395        let selection = select_model_and_thinking(
1396            &cli,
1397            &config,
1398            &session,
1399            &registry,
1400            &scoped_models,
1401            Path::new("/tmp"),
1402        )
1403        .expect("scoped default model should match case-insensitively");
1404
1405        assert_eq!(selection.model_entry.model.provider, "openrouter");
1406        assert_eq!(selection.model_entry.model.id, "gpt-4o-mini");
1407        assert_eq!(selection.thinking_level, model::ThinkingLevel::Low);
1408    }
1409
1410    #[test]
1411    fn parse_model_pattern_picks_latest_dated_when_no_alias_exists() {
1412        let available = vec![
1413            test_model_entry("gpt-5.1-codex-20250101", "openai", true),
1414            test_model_entry("gpt-5.1-codex-20250601", "openai", true),
1415        ];
1416
1417        let parsed = parse_model_pattern("gpt-5.1-codex", &available);
1418        let model = parsed.model.expect("model should match");
1419
1420        assert_eq!(model.model.id, "gpt-5.1-codex-20250601");
1421        assert!(parsed.thinking_level.is_none());
1422        assert!(parsed.warning.is_none());
1423    }
1424
1425    #[test]
1426    fn split_provider_model_spec_preserves_nested_model_paths() {
1427        let parsed = split_provider_model_spec("openrouter/anthropic/claude-sonnet-4.5")
1428            .expect("provider/model spec");
1429        assert_eq!(parsed.0, "openrouter");
1430        assert_eq!(parsed.1, "anthropic/claude-sonnet-4.5");
1431
1432        assert!(split_provider_model_spec("openrouter/").is_none());
1433        assert!(split_provider_model_spec("/anthropic/claude").is_none());
1434        assert!(split_provider_model_spec("no-slash").is_none());
1435    }
1436
1437    #[test]
1438    fn try_match_model_supports_openrouter_dynamic_provider_model_ids() {
1439        let matched = try_match_model("openrouter/google/gemini-2.5-pro", &[])
1440            .expect("openrouter ad-hoc fallback should resolve");
1441        assert_eq!(matched.model.provider, "openrouter");
1442        assert_eq!(matched.model.id, "google/gemini-2.5-pro");
1443        assert_eq!(matched.model.api, "openai-completions");
1444        assert_eq!(matched.model.base_url, "https://openrouter.ai/api/v1");
1445    }
1446
1447    #[test]
1448    fn is_alias_handles_non_ascii_model_ids_without_panicking() {
1449        assert!(is_alias("é123456789"));
1450        assert!(is_alias("model-é2345678"));
1451        assert!(!is_alias("model-20250101"));
1452    }
1453
1454    #[test]
1455    fn parse_model_pattern_parses_thinking_suffix() {
1456        let available = vec![test_model_entry("gpt-5.1-codex", "openai", true)];
1457        let parsed = parse_model_pattern("openai/gpt-5.1-codex:high", &available);
1458
1459        let model = parsed.model.expect("model should match");
1460        assert_eq!(model.model.id, "gpt-5.1-codex");
1461        assert_eq!(parsed.thinking_level, Some(model::ThinkingLevel::High));
1462        assert!(parsed.warning.is_none());
1463    }
1464
1465    #[test]
1466    fn parse_model_pattern_warns_for_invalid_thinking_suffix() {
1467        let available = vec![test_model_entry("gpt-5.1-codex", "openai", true)];
1468        let parsed = parse_model_pattern("gpt-5.1-codex:extreme", &available);
1469
1470        assert!(parsed.model.is_some());
1471        assert!(parsed.thinking_level.is_none());
1472        assert!(
1473            parsed
1474                .warning
1475                .expect("warning should be present")
1476                .contains("Invalid thinking level")
1477        );
1478    }
1479
1480    #[test]
1481    fn clamp_thinking_level_returns_off_for_non_reasoning_models() {
1482        let model_entry = test_model_entry("gpt-4o-mini", "openai", false);
1483        let clamped = model_entry.clamp_thinking_level(model::ThinkingLevel::High);
1484        assert_eq!(clamped, model::ThinkingLevel::Off);
1485    }
1486
1487    #[test]
1488    fn clamp_thinking_level_clamps_xhigh_for_unsupported_models() {
1489        let model_entry = test_model_entry("gpt-4o", "openai", true);
1490        let clamped = model_entry.clamp_thinking_level(model::ThinkingLevel::XHigh);
1491        assert_eq!(clamped, model::ThinkingLevel::High);
1492    }
1493
1494    #[test]
1495    fn clamp_thinking_level_keeps_xhigh_for_supported_models() {
1496        let model_entry = test_model_entry("gpt-5.2", "openai", true);
1497        let clamped = model_entry.clamp_thinking_level(model::ThinkingLevel::XHigh);
1498        assert_eq!(clamped, model::ThinkingLevel::XHigh);
1499    }
1500
1501    mod proptests {
1502        use super::*;
1503        use proptest::prelude::*;
1504
1505        // ====================================================================
1506        // parse_models_arg
1507        // ====================================================================
1508
1509        proptest! {
1510            #[test]
1511            fn parse_models_no_empty_strings(s in "([a-z0-9*-]{0,5},?){0,6}") {
1512                let result = parse_models_arg(&s);
1513                for m in &result {
1514                    assert!(!m.is_empty(), "parse_models_arg produced empty string from {s:?}");
1515                }
1516            }
1517
1518            #[test]
1519            fn parse_models_whitespace_trimmed(m1 in "[a-z]{1,8}", m2 in "[a-z]{1,8}") {
1520                let with_spaces = format!("  {m1}  ,  {m2}  ");
1521                let result = parse_models_arg(&with_spaces);
1522                assert_eq!(result, vec![m1, m2]);
1523            }
1524
1525            #[test]
1526            fn parse_models_round_trip(models in prop::collection::vec("[a-z0-9-]{1,10}", 1..6)) {
1527                let joined = models.join(",");
1528                let result = parse_models_arg(&joined);
1529                assert_eq!(result, models);
1530            }
1531
1532            #[test]
1533            fn parse_models_empty_csv(s in "[ ,]*") {
1534                let result = parse_models_arg(&s);
1535                assert!(result.is_empty(), "whitespace/commas-only should yield empty vec");
1536            }
1537        }
1538
1539        // ====================================================================
1540        // apply_piped_stdin / normalize_cli
1541        // ====================================================================
1542
1543        proptest! {
1544            #[test]
1545            fn apply_piped_stdin_trims_sets_print_and_prepends(
1546                existing in prop::collection::vec("[A-Za-z0-9._/-]{1,16}", 0..4),
1547                leading_ws in "[ \\t\\n\\r]{0,4}",
1548                core in "[A-Za-z0-9._/-]{1,24}",
1549                trailing_ws in "[ \\t\\n\\r]{0,4}",
1550            ) {
1551                let mut cli = cli::Cli::parse_from(["pi"]);
1552                cli.args = existing.clone();
1553                cli.print = false;
1554
1555                let raw = format!("{leading_ws}{core}{trailing_ws}");
1556                apply_piped_stdin(&mut cli, Some(raw));
1557
1558                prop_assert!(cli.print);
1559                prop_assert_eq!(cli.args.len(), existing.len() + 1);
1560                prop_assert_eq!(cli.args.first().map(String::as_str), Some(core.as_str()));
1561                prop_assert_eq!(&cli.args[1..], existing.as_slice());
1562            }
1563
1564            #[test]
1565            fn apply_piped_stdin_none_or_whitespace_is_noop(
1566                existing in prop::collection::vec("[A-Za-z0-9._/-]{1,16}", 0..4),
1567                initial_print in any::<bool>(),
1568                initial_no_session in any::<bool>(),
1569                whitespace in "[ \\t\\n\\r]{0,16}",
1570            ) {
1571                let mut cli = cli::Cli::parse_from(["pi"]);
1572                cli.args = existing.clone();
1573                cli.print = initial_print;
1574                cli.no_session = initial_no_session;
1575
1576                apply_piped_stdin(&mut cli, None);
1577                prop_assert_eq!(&cli.args, &existing);
1578                prop_assert_eq!(cli.print, initial_print);
1579                prop_assert_eq!(cli.no_session, initial_no_session);
1580
1581                apply_piped_stdin(&mut cli, Some(whitespace));
1582                prop_assert_eq!(&cli.args, &existing);
1583                prop_assert_eq!(cli.print, initial_print);
1584                prop_assert_eq!(cli.no_session, initial_no_session);
1585            }
1586
1587            #[test]
1588            fn normalize_cli_lowercases_provider_and_applies_print_semantics(
1589                provider in prop::option::of("[A-Za-z0-9_-]{1,20}"),
1590                print in any::<bool>(),
1591                initial_no_session in any::<bool>(),
1592            ) {
1593                let mut cli = cli::Cli::parse_from(["pi"]);
1594                cli.provider = provider.clone();
1595                cli.print = print;
1596                cli.no_session = initial_no_session;
1597
1598                normalize_cli(&mut cli);
1599
1600                let expected_provider = provider.map(|value: String| value.to_ascii_lowercase());
1601                let expected_no_session = if print { true } else { initial_no_session };
1602
1603                prop_assert_eq!(cli.provider, expected_provider);
1604                prop_assert_eq!(cli.no_session, expected_no_session);
1605            }
1606
1607            #[test]
1608            fn normalize_cli_is_idempotent(
1609                provider in prop::option::of("[A-Za-z0-9_-]{1,20}"),
1610                print in any::<bool>(),
1611                initial_no_session in any::<bool>(),
1612            ) {
1613                let mut cli = cli::Cli::parse_from(["pi"]);
1614                cli.provider = provider;
1615                cli.print = print;
1616                cli.no_session = initial_no_session;
1617
1618                normalize_cli(&mut cli);
1619                let provider_once = cli.provider.clone();
1620                let no_session_once = cli.no_session;
1621                let print_once = cli.print;
1622
1623                normalize_cli(&mut cli);
1624
1625                prop_assert_eq!(cli.provider, provider_once);
1626                prop_assert_eq!(cli.no_session, no_session_once);
1627                prop_assert_eq!(cli.print, print_once);
1628            }
1629        }
1630
1631        // ====================================================================
1632        // split_provider_model_spec
1633        // ====================================================================
1634
1635        proptest! {
1636            #[test]
1637            fn split_spec_first_slash(pre in "[a-z]{1,8}", mid in "[a-z]{1,8}", post in "[a-z]{1,8}") {
1638                let input = format!("{pre}/{mid}/{post}");
1639                let (p, m) = split_provider_model_spec(&input).unwrap();
1640                assert_eq!(p, pre.as_str());
1641                assert_eq!(m, format!("{mid}/{post}"));
1642            }
1643
1644            #[test]
1645            fn split_spec_trims_whitespace(p in "[a-z]{1,6}", m in "[a-z]{1,6}") {
1646                let input = format!("  {p}  /  {m}  ");
1647                let (prov, model) = split_provider_model_spec(&input).unwrap();
1648                assert_eq!(prov, p.as_str());
1649                assert_eq!(model, m.as_str());
1650            }
1651
1652            #[test]
1653            fn split_spec_rejects_empty_halves(valid in "[a-z]{1,8}") {
1654                assert!(split_provider_model_spec(&format!("{valid}/")).is_none());
1655                assert!(split_provider_model_spec(&format!("/{valid}")).is_none());
1656            }
1657
1658            #[test]
1659            fn split_spec_none_without_slash(s in "[a-z0-9]{1,12}") {
1660                assert!(split_provider_model_spec(&s).is_none());
1661            }
1662        }
1663
1664        // ====================================================================
1665        // is_alias
1666        // ====================================================================
1667
1668        proptest! {
1669            #[test]
1670            fn is_alias_latest_suffix(prefix in "[a-z]{1,10}") {
1671                assert!(is_alias(&format!("{prefix}-latest")));
1672            }
1673
1674            #[test]
1675            fn is_alias_eight_digits_not_alias(prefix in "[a-z]{1,8}", d in "[0-9]{8}") {
1676                let id = format!("{prefix}-{d}");
1677                assert!(!is_alias(&id), "{id} should not be alias (8-digit suffix)");
1678            }
1679
1680            #[test]
1681            fn is_alias_non_eight_digit_suffix(prefix in "[a-z]{1,6}", suffix in "[a-z0-9]{1,7}") {
1682                let id = format!("{prefix}-{suffix}");
1683                // Only 8 pure-digit suffixes are non-alias
1684                if suffix.len() == 8 && suffix.chars().all(|c| c.is_ascii_digit()) {
1685                    assert!(!is_alias(&id));
1686                } else {
1687                    assert!(is_alias(&id));
1688                }
1689            }
1690
1691            #[test]
1692            fn is_alias_no_hyphen(id in "[a-z0-9]{1,12}") {
1693                if !id.contains('-') {
1694                    assert!(is_alias(&id));
1695                }
1696            }
1697
1698            #[test]
1699            fn is_alias_non_ascii_no_panic(id in ".{1,20}") {
1700                let _ = is_alias(&id); // must not panic
1701            }
1702        }
1703
1704        // ====================================================================
1705        // models_equal
1706        // ====================================================================
1707
1708        proptest! {
1709            #[test]
1710            fn models_equal_reflexive(provider in "[a-z]{1,6}", id in "[a-z0-9-]{1,10}") {
1711                let m = test_model_entry(&id, &provider, true);
1712                assert!(models_equal(&m, &m));
1713            }
1714
1715            #[test]
1716            fn models_equal_symmetric(provider in "[a-z]{1,6}", id in "[a-z0-9-]{1,10}") {
1717                let a = test_model_entry(&id, &provider, true);
1718                let b = test_model_entry(&id, &provider, false);
1719                assert_eq!(models_equal(&a, &b), models_equal(&b, &a));
1720            }
1721
1722            #[test]
1723            fn models_equal_different_providers(id in "[a-z]{1,8}", p1 in "[a-z]{1,5}", p2 in "[a-z]{1,5}") {
1724                if p1 != p2 {
1725                    let a = test_model_entry(&id, &p1, true);
1726                    let b = test_model_entry(&id, &p2, true);
1727                    assert!(!models_equal(&a, &b));
1728                }
1729            }
1730
1731            #[test]
1732            fn models_equal_different_ids(id1 in "[a-z]{1,6}", id2 in "[a-z]{1,6}", prov in "[a-z]{1,5}") {
1733                if id1 != id2 {
1734                    let a = test_model_entry(&id1, &prov, true);
1735                    let b = test_model_entry(&id2, &prov, true);
1736                    assert!(!models_equal(&a, &b));
1737                }
1738            }
1739        }
1740
1741        #[test]
1742        fn models_equal_normalizes_provider_aliases_and_model_case() {
1743            let left = test_model_entry("openai/gpt-4o-mini", "openrouter", true);
1744            let right = test_model_entry("OPENAI/GPT-4O-MINI", "open-router", false);
1745            assert!(models_equal(&left, &right));
1746        }
1747    }
1748}