use std::cell::RefCell;
use std::collections::BTreeMap;
use std::sync::OnceLock;
use serde::{Deserialize, Serialize};
use super::providers::anthropic::claude_generation;
use super::providers::openai_compat::gpt_generation;
const BUILTIN_TOML: &str = include_str!("capabilities.toml");
#[derive(Debug, Clone, Deserialize, Default)]
pub struct CapabilitiesFile {
#[serde(default)]
pub provider: BTreeMap<String, Vec<ProviderRule>>,
#[serde(default)]
pub provider_family: BTreeMap<String, String>,
}
#[derive(Debug, Clone, Deserialize)]
pub struct ProviderRule {
pub model_match: String,
#[serde(default)]
pub version_min: Option<Vec<u32>>,
#[serde(default)]
pub native_tools: Option<bool>,
#[serde(default)]
pub defer_loading: Option<bool>,
#[serde(default)]
pub tool_search: Option<Vec<String>>,
#[serde(default)]
pub max_tools: Option<u32>,
#[serde(default)]
pub prompt_caching: Option<bool>,
#[serde(default)]
pub vision: Option<bool>,
#[serde(default, alias = "audio_supported")]
pub audio: Option<bool>,
#[serde(default, alias = "pdf_supported")]
pub pdf: Option<bool>,
#[serde(default)]
pub files_api_supported: Option<bool>,
#[serde(default)]
pub structured_output: Option<String>,
#[serde(default)]
pub json_schema: Option<String>,
#[serde(default)]
pub thinking_modes: Option<Vec<String>>,
#[serde(default)]
pub interleaved_thinking_supported: Option<bool>,
#[serde(default)]
pub anthropic_beta_features: Option<Vec<String>>,
#[serde(default)]
pub thinking: Option<bool>,
#[serde(default)]
pub vision_supported: Option<bool>,
#[serde(default)]
pub preserve_thinking: Option<bool>,
#[serde(default)]
pub server_parser: Option<String>,
#[serde(default)]
pub honors_chat_template_kwargs: Option<bool>,
#[serde(default)]
pub requires_completion_tokens: Option<bool>,
#[serde(default)]
pub reasoning_effort_supported: Option<bool>,
#[serde(default)]
pub reasoning_none_supported: Option<bool>,
#[serde(default)]
pub recommended_endpoint: Option<String>,
#[serde(default)]
pub text_tool_wire_format_supported: Option<bool>,
#[serde(default)]
pub thinking_disable_directive: Option<String>,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct Capabilities {
pub native_tools: bool,
pub defer_loading: bool,
pub tool_search: Vec<String>,
pub max_tools: Option<u32>,
pub prompt_caching: bool,
pub vision: bool,
pub audio: bool,
pub pdf: bool,
pub files_api_supported: bool,
pub structured_output: Option<String>,
pub json_schema: Option<String>,
pub thinking_modes: Vec<String>,
pub interleaved_thinking_supported: bool,
pub anthropic_beta_features: Vec<String>,
pub vision_supported: bool,
pub preserve_thinking: bool,
pub server_parser: String,
pub honors_chat_template_kwargs: bool,
pub requires_completion_tokens: bool,
pub reasoning_effort_supported: bool,
pub reasoning_none_supported: bool,
pub recommended_endpoint: Option<String>,
pub text_tool_wire_format_supported: bool,
pub thinking_disable_directive: Option<String>,
}
impl Default for Capabilities {
fn default() -> Self {
Self {
native_tools: false,
defer_loading: false,
tool_search: Vec::new(),
max_tools: None,
prompt_caching: false,
vision: false,
audio: false,
pdf: false,
files_api_supported: false,
structured_output: None,
json_schema: None,
thinking_modes: Vec::new(),
interleaved_thinking_supported: false,
anthropic_beta_features: Vec::new(),
vision_supported: false,
preserve_thinking: false,
server_parser: "none".to_string(),
honors_chat_template_kwargs: false,
requires_completion_tokens: false,
reasoning_effort_supported: false,
reasoning_none_supported: false,
recommended_endpoint: None,
text_tool_wire_format_supported: true,
thinking_disable_directive: None,
}
}
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize)]
pub struct ProviderCapabilityMatrixRow {
pub provider: String,
pub model: String,
pub thinking: Vec<String>,
pub vision: bool,
pub audio: bool,
pub pdf: bool,
pub streaming: bool,
pub files_api_supported: bool,
pub json_schema: Option<String>,
pub tools: bool,
pub cache: bool,
pub source: String,
}
thread_local! {
static USER_OVERRIDES: RefCell<Option<CapabilitiesFile>> = const { RefCell::new(None) };
}
static BUILTIN: OnceLock<CapabilitiesFile> = OnceLock::new();
fn builtin() -> &'static CapabilitiesFile {
BUILTIN.get_or_init(|| {
toml::from_str::<CapabilitiesFile>(BUILTIN_TOML)
.expect("capabilities.toml must parse at build time")
})
}
pub fn set_user_overrides(file: Option<CapabilitiesFile>) {
USER_OVERRIDES.with(|cell| *cell.borrow_mut() = file);
}
pub fn clear_user_overrides() {
set_user_overrides(None);
}
pub fn set_user_overrides_toml(src: &str) -> Result<(), String> {
let parsed: CapabilitiesFile = toml::from_str(src).map_err(|e| e.to_string())?;
set_user_overrides(Some(parsed));
Ok(())
}
pub fn set_user_overrides_from_manifest_toml(src: &str) -> Result<(), String> {
#[derive(Deserialize)]
struct Manifest {
#[serde(default)]
capabilities: Option<CapabilitiesFile>,
}
let parsed: Manifest = toml::from_str(src).map_err(|e| e.to_string())?;
set_user_overrides(parsed.capabilities);
Ok(())
}
pub fn lookup(provider: &str, model: &str) -> Capabilities {
let user = USER_OVERRIDES.with(|cell| cell.borrow().clone());
lookup_with(provider, model, builtin(), user.as_ref())
}
pub fn matrix_rows() -> Vec<ProviderCapabilityMatrixRow> {
let user = USER_OVERRIDES.with(|cell| cell.borrow().clone());
let mut rows = Vec::new();
if let Some(user) = user.as_ref() {
push_matrix_rows(&mut rows, user, "project");
}
push_matrix_rows(&mut rows, builtin(), "builtin");
rows
}
fn push_matrix_rows(
rows: &mut Vec<ProviderCapabilityMatrixRow>,
file: &CapabilitiesFile,
source: &str,
) {
for (provider, rules) in &file.provider {
for rule in rules {
rows.push(rule_to_matrix_row(provider, rule, source));
}
}
}
fn rule_to_matrix_row(
provider: &str,
rule: &ProviderRule,
source: &str,
) -> ProviderCapabilityMatrixRow {
ProviderCapabilityMatrixRow {
provider: provider.to_string(),
model: rule.model_match.clone(),
thinking: rule_thinking_modes(rule),
vision: rule_vision(rule),
audio: rule.audio.unwrap_or(false),
pdf: rule.pdf.unwrap_or(false),
streaming: true,
files_api_supported: rule.files_api_supported.unwrap_or(false),
json_schema: rule_structured_output(rule),
tools: rule.native_tools.unwrap_or(false),
cache: rule.prompt_caching.unwrap_or(false),
source: source.to_string(),
}
}
fn rule_thinking_modes(rule: &ProviderRule) -> Vec<String> {
rule.thinking_modes.clone().unwrap_or_else(|| {
if rule.thinking.unwrap_or(false) {
vec!["enabled".to_string()]
} else {
Vec::new()
}
})
}
fn rule_vision(rule: &ProviderRule) -> bool {
rule.vision.or(rule.vision_supported).unwrap_or(false)
}
fn lookup_with(
provider: &str,
model: &str,
builtin: &CapabilitiesFile,
user: Option<&CapabilitiesFile>,
) -> Capabilities {
if provider == "mock" {
if let Some(caps) = try_match_layer(user, builtin, "anthropic", model, provider) {
return caps;
}
if let Some(caps) = try_match_layer(user, builtin, "openai", model, provider) {
return caps;
}
return Capabilities::default();
}
let mut current = provider.to_string();
let mut visited: std::collections::HashSet<String> = std::collections::HashSet::new();
while visited.insert(current.clone()) {
if let Some(caps) = try_match_layer(user, builtin, ¤t, model, provider) {
return caps;
}
let next = user
.and_then(|f| f.provider_family.get(¤t))
.or_else(|| builtin.provider_family.get(¤t))
.cloned();
match next {
Some(parent) => current = parent,
None => break,
}
}
Capabilities::default()
}
fn try_match_layer(
user: Option<&CapabilitiesFile>,
builtin: &CapabilitiesFile,
layer_provider: &str,
model: &str,
_original_provider: &str,
) -> Option<Capabilities> {
if let Some(user) = user {
if let Some(rules) = user.provider.get(layer_provider) {
for rule in rules {
if rule_matches(rule, model) {
return Some(rule_to_caps(rule));
}
}
}
}
if let Some(rules) = builtin.provider.get(layer_provider) {
for rule in rules {
if rule_matches(rule, model) {
return Some(rule_to_caps(rule));
}
}
}
None
}
fn rule_to_caps(rule: &ProviderRule) -> Capabilities {
let thinking_modes = rule_thinking_modes(rule);
Capabilities {
native_tools: rule.native_tools.unwrap_or(false),
defer_loading: rule.defer_loading.unwrap_or(false),
tool_search: rule.tool_search.clone().unwrap_or_default(),
max_tools: rule.max_tools,
prompt_caching: rule.prompt_caching.unwrap_or(false),
vision: rule_vision(rule),
audio: rule.audio.unwrap_or(false),
pdf: rule.pdf.unwrap_or(false),
files_api_supported: rule.files_api_supported.unwrap_or(false),
structured_output: rule_structured_output(rule),
json_schema: rule_structured_output(rule),
thinking_modes,
interleaved_thinking_supported: rule.interleaved_thinking_supported.unwrap_or(false),
anthropic_beta_features: rule.anthropic_beta_features.clone().unwrap_or_default(),
vision_supported: rule.vision_supported.unwrap_or(false),
preserve_thinking: rule.preserve_thinking.unwrap_or(false),
server_parser: rule
.server_parser
.clone()
.unwrap_or_else(|| "none".to_string()),
honors_chat_template_kwargs: rule.honors_chat_template_kwargs.unwrap_or(false),
requires_completion_tokens: rule.requires_completion_tokens.unwrap_or(false),
reasoning_effort_supported: rule.reasoning_effort_supported.unwrap_or(false),
reasoning_none_supported: rule.reasoning_none_supported.unwrap_or(false),
recommended_endpoint: rule.recommended_endpoint.clone(),
text_tool_wire_format_supported: rule.text_tool_wire_format_supported.unwrap_or(true),
thinking_disable_directive: rule.thinking_disable_directive.clone(),
}
}
fn rule_structured_output(rule: &ProviderRule) -> Option<String> {
rule.structured_output
.clone()
.or_else(|| rule.json_schema.clone())
.filter(|value| value != "none")
}
fn rule_matches(rule: &ProviderRule, model: &str) -> bool {
let lower = model.to_lowercase();
if !glob_match(&rule.model_match.to_lowercase(), &lower) {
return false;
}
if let Some(version_min) = &rule.version_min {
if version_min.len() != 2 {
return false;
}
let want = (version_min[0], version_min[1]);
let have = match extract_version(model) {
Some(v) => v,
None => return false,
};
if have < want {
return false;
}
}
true
}
fn extract_version(model: &str) -> Option<(u32, u32)> {
claude_generation(model).or_else(|| gpt_generation(model))
}
fn glob_match(pattern: &str, input: &str) -> bool {
if let Some(prefix) = pattern.strip_suffix('*') {
if let Some(rest) = prefix.strip_prefix('*') {
return input.contains(rest);
}
return input.starts_with(prefix);
}
if let Some(suffix) = pattern.strip_prefix('*') {
return input.ends_with(suffix);
}
if pattern.contains('*') {
let parts: Vec<&str> = pattern.split('*').collect();
if parts.len() == 2 {
return input.starts_with(parts[0]) && input.ends_with(parts[1]);
}
return input == pattern;
}
input == pattern
}
#[cfg(test)]
mod tests {
use super::*;
fn reset() {
clear_user_overrides();
}
#[test]
fn anthropic_opus_47_gets_full_capabilities() {
reset();
let caps = lookup("anthropic", "claude-opus-4-7");
assert!(caps.native_tools);
assert!(caps.defer_loading);
assert_eq!(caps.tool_search, vec!["bm25", "regex"]);
assert!(caps.prompt_caching);
assert_eq!(caps.thinking_modes, vec!["adaptive"]);
assert!(caps.vision_supported);
assert!(caps.audio);
assert!(caps.pdf);
assert!(caps.files_api_supported);
assert_eq!(caps.max_tools, Some(10000));
}
#[test]
fn anthropic_opus_46_uses_budgeted_thinking() {
reset();
let caps = lookup("anthropic", "claude-opus-4-6");
assert_eq!(caps.thinking_modes, vec!["enabled"]);
assert!(caps.interleaved_thinking_supported);
}
#[test]
fn anthropic_opus_45_does_not_support_interleaved_thinking() {
reset();
let caps = lookup("anthropic", "claude-opus-4-5");
assert_eq!(caps.thinking_modes, vec!["enabled"]);
assert!(!caps.interleaved_thinking_supported);
}
#[test]
fn override_can_supply_anthropic_beta_features() {
reset();
let toml_src = r#"
[[provider.anthropic]]
model_match = "claude-custom-*"
native_tools = true
anthropic_beta_features = ["fine-grained-tool-streaming-2025-05-14"]
"#;
set_user_overrides_toml(toml_src).unwrap();
let caps = lookup("anthropic", "claude-custom-1");
assert_eq!(
caps.anthropic_beta_features,
vec!["fine-grained-tool-streaming-2025-05-14"]
);
reset();
}
#[test]
fn anthropic_haiku_44_has_no_tool_search() {
reset();
let caps = lookup("anthropic", "claude-haiku-4-4");
assert!(caps.native_tools);
assert!(caps.prompt_caching);
assert!(!caps.defer_loading);
assert!(caps.tool_search.is_empty());
}
#[test]
fn anthropic_haiku_45_supports_tool_search() {
reset();
let caps = lookup("anthropic", "claude-haiku-4-5");
assert!(caps.defer_loading);
assert_eq!(caps.tool_search, vec!["bm25", "regex"]);
}
#[test]
fn old_claude_gets_catchall() {
reset();
let caps = lookup("anthropic", "claude-opus-3-5");
assert!(caps.native_tools);
assert!(caps.prompt_caching);
assert!(!caps.defer_loading);
assert!(caps.tool_search.is_empty());
}
#[test]
fn openai_gpt_54_supports_tool_search() {
reset();
let caps = lookup("openai", "gpt-5.4");
assert!(caps.defer_loading);
assert_eq!(caps.tool_search, vec!["hosted", "client"]);
assert_eq!(caps.json_schema.as_deref(), Some("native"));
assert_eq!(caps.thinking_modes, vec!["effort"]);
assert!(caps.reasoning_effort_supported);
assert!(caps.reasoning_none_supported);
}
#[test]
fn openai_gpt_53_has_reasoning_none_without_tool_search() {
reset();
let caps = lookup("openai", "gpt-5.3");
assert!(caps.native_tools);
assert!(!caps.defer_loading);
assert!(caps.vision_supported);
assert!(caps.tool_search.is_empty());
assert_eq!(caps.thinking_modes, vec!["effort"]);
assert!(caps.reasoning_effort_supported);
assert!(caps.reasoning_none_supported);
}
#[test]
fn openai_original_gpt_5_has_reasoning_floor_without_none() {
reset();
let caps = lookup("openai", "gpt-5");
assert!(caps.native_tools);
assert!(!caps.defer_loading);
assert_eq!(caps.thinking_modes, vec!["effort"]);
assert!(caps.reasoning_effort_supported);
assert!(!caps.reasoning_none_supported);
}
#[test]
fn openai_gpt_4o_matrix_fields_include_multimodal_support() {
reset();
let caps = lookup("openai", "gpt-4o");
assert!(caps.native_tools);
assert!(caps.vision);
assert!(caps.audio);
assert!(!caps.pdf);
assert_eq!(caps.json_schema.as_deref(), Some("native"));
}
#[test]
fn openai_reasoning_models_support_effort() {
reset();
let caps = lookup("openai", "o3");
assert_eq!(caps.thinking_modes, vec!["effort"]);
assert!(caps.requires_completion_tokens);
assert!(caps.reasoning_effort_supported);
let prefixed = lookup("openrouter", "openai/o4-mini");
assert!(prefixed.requires_completion_tokens);
assert!(prefixed.reasoning_effort_supported);
}
#[test]
fn vision_capability_gates_known_multimodal_models() {
reset();
assert!(lookup("openai", "gpt-4o").vision_supported);
assert!(lookup("openai", "gpt-5.4-preview").vision_supported);
assert!(lookup("anthropic", "claude-sonnet-4-6").vision_supported);
assert!(lookup("anthropic", "claude-sonnet-4-6").pdf);
assert!(lookup("anthropic", "claude-sonnet-4-6").files_api_supported);
assert!(lookup("openrouter", "google/gemini-2.5-flash").vision_supported);
assert!(lookup("gemini", "gemini-2.5-flash").vision_supported);
assert!(lookup("gemini", "gemini-2.5-flash").audio);
assert!(lookup("gemini", "gemini-2.5-flash").pdf);
assert!(lookup("ollama", "llava:latest").vision_supported);
assert!(!lookup("openai", "gpt-3.5-turbo").vision_supported);
assert!(!lookup("ollama", "qwen3.5:35b-a3b-coding-nvfp4").vision_supported);
}
#[test]
fn openrouter_inherits_openai() {
reset();
let caps = lookup("openrouter", "gpt-5.4");
assert!(caps.defer_loading);
assert_eq!(caps.tool_search, vec!["hosted", "client"]);
}
#[test]
fn groq_inherits_openai_family_only() {
reset();
let caps = lookup("groq", "gpt-5.5-preview");
assert!(caps.defer_loading);
}
#[test]
fn mock_with_claude_model_routes_to_anthropic() {
reset();
let caps = lookup("mock", "claude-sonnet-4-7");
assert!(caps.defer_loading);
assert_eq!(caps.tool_search, vec!["bm25", "regex"]);
}
#[test]
fn mock_with_gpt_model_routes_to_openai() {
reset();
let caps = lookup("mock", "gpt-5.4-preview");
assert!(caps.defer_loading);
assert_eq!(caps.tool_search, vec!["hosted", "client"]);
}
#[test]
fn qwen36_ollama_preserves_thinking() {
reset();
let caps = lookup("ollama", "qwen3.6:35b-a3b-coding-nvfp4");
assert!(caps.native_tools);
assert_eq!(caps.json_schema.as_deref(), Some("format_kw"));
assert!(!caps.thinking_modes.is_empty());
assert!(
caps.preserve_thinking,
"Qwen3.6 should enable preserve_thinking by default for long-horizon loops"
);
assert_eq!(caps.server_parser, "ollama_qwen3coder");
assert!(!caps.honors_chat_template_kwargs);
assert_eq!(
caps.recommended_endpoint.as_deref(),
Some("/api/generate-raw")
);
assert!(!caps.text_tool_wire_format_supported);
}
#[test]
fn qwen35_ollama_does_not_preserve_thinking() {
reset();
let caps = lookup("ollama", "qwen3.5:35b-a3b-coding-nvfp4");
assert!(caps.native_tools);
assert!(!caps.thinking_modes.is_empty());
assert!(
!caps.preserve_thinking,
"Qwen3.5 lacks the preserve_thinking kwarg — rely on the chat template's rolling checkpoint instead"
);
assert_eq!(caps.server_parser, "ollama_qwen3coder");
assert!(!caps.text_tool_wire_format_supported);
}
#[test]
fn qwen36_routed_providers_all_preserve_thinking() {
reset();
for (provider, model) in [
("openrouter", "qwen/qwen3.6-plus"),
("together", "Qwen/Qwen3.6-Plus"),
("huggingface", "Qwen/Qwen3.6-35B-A3B"),
("fireworks", "accounts/fireworks/models/qwen3p6-plus"),
("dashscope", "qwen3.6-plus"),
("llamacpp", "unsloth/Qwen3.6-35B-A3B-GGUF"),
("local", "Qwen3.6-35B-A3B"),
("mlx", "unsloth/Qwen3.6-27B-UD-MLX-4bit"),
("mlx", "Qwen/Qwen3.6-27B"),
] {
let caps = lookup(provider, model);
assert!(
!caps.thinking_modes.is_empty(),
"{provider}/{model}: thinking"
);
assert!(
caps.preserve_thinking,
"{provider}/{model}: preserve_thinking must be on for Qwen3.6"
);
assert!(caps.native_tools, "{provider}/{model}: native_tools");
assert_ne!(
caps.server_parser, "ollama_qwen3coder",
"{provider}/{model}: only Ollama routes through the qwen3coder response parser"
);
}
}
#[test]
fn qwen_coder_models_do_not_claim_thinking_modes() {
reset();
for (provider, model) in [
("together", "Qwen/Qwen3-Coder-Next-FP8"),
("together", "Qwen/Qwen3-Coder-480B-A35B-Instruct-FP8"),
("openrouter", "qwen/qwen3-coder-next"),
("huggingface", "Qwen/Qwen3-Coder-Next"),
] {
let caps = lookup(provider, model);
assert!(caps.native_tools, "{provider}/{model}: native_tools");
assert!(
caps.thinking_modes.is_empty(),
"{provider}/{model}: coder models are non-thinking routes"
);
assert!(
!caps.preserve_thinking,
"{provider}/{model}: preserve_thinking must stay off"
);
assert!(
caps.thinking_disable_directive.is_none(),
"{provider}/{model}: no /no_think shim should be needed"
);
}
}
#[test]
fn llamacpp_qwen_keeps_text_tool_wire_format() {
reset();
let caps = lookup("llamacpp", "unsloth/Qwen3.5-Coder-GGUF");
assert_eq!(caps.server_parser, "none");
assert!(caps.honors_chat_template_kwargs);
assert!(caps.text_tool_wire_format_supported);
assert_eq!(
caps.recommended_endpoint.as_deref(),
Some("/v1/chat/completions")
);
}
#[test]
fn dashscope_and_llamacpp_resolve_capabilities() {
reset();
let caps = lookup("dashscope", "gpt-5.4-preview");
assert!(caps.defer_loading);
let caps = lookup("llamacpp", "gpt-5.4-preview");
assert!(caps.defer_loading);
}
#[test]
fn unknown_provider_has_no_capabilities() {
reset();
let caps = lookup("my-custom-proxy", "foo-bar-1");
assert!(!caps.native_tools);
assert!(!caps.defer_loading);
assert!(caps.tool_search.is_empty());
}
#[test]
fn user_override_adds_new_provider() {
reset();
let toml_src = r#"
[[provider.my-proxy]]
model_match = "*"
native_tools = true
tool_search = ["hosted"]
"#;
set_user_overrides_toml(toml_src).unwrap();
let caps = lookup("my-proxy", "anything");
assert!(caps.native_tools);
assert_eq!(caps.tool_search, vec!["hosted"]);
clear_user_overrides();
}
#[test]
fn user_override_takes_precedence_over_builtin() {
reset();
let toml_src = r#"
[[provider.anthropic]]
model_match = "claude-opus-*"
native_tools = true
defer_loading = false
tool_search = []
"#;
set_user_overrides_toml(toml_src).unwrap();
let caps = lookup("anthropic", "claude-opus-4-7");
assert!(caps.native_tools);
assert!(!caps.defer_loading);
assert!(caps.tool_search.is_empty());
clear_user_overrides();
}
#[test]
fn user_override_from_manifest_toml() {
reset();
let manifest = r#"
[package]
name = "demo"
[[capabilities.provider.my-proxy]]
model_match = "*"
native_tools = true
tool_search = ["hosted"]
"#;
set_user_overrides_from_manifest_toml(manifest).unwrap();
let caps = lookup("my-proxy", "foo");
assert!(caps.native_tools);
assert_eq!(caps.tool_search, vec!["hosted"]);
clear_user_overrides();
}
#[test]
fn version_min_requires_parseable_model() {
reset();
let toml_src = r#"
[[provider.custom]]
model_match = "*"
version_min = [5, 4]
native_tools = true
"#;
set_user_overrides_toml(toml_src).unwrap();
let caps = lookup("custom", "mystery-model");
assert!(!caps.native_tools);
clear_user_overrides();
}
#[test]
fn glob_match_substring() {
assert!(glob_match("*gpt*", "openai/gpt-5.4"));
assert!(glob_match("*claude*", "anthropic/claude-opus-4-7"));
assert!(!glob_match("*xyz*", "openai/gpt-5.4"));
}
#[test]
fn openrouter_namespaced_anthropic_model() {
reset();
let caps = lookup("anthropic", "anthropic/claude-opus-4-7");
assert!(caps.defer_loading);
}
#[test]
fn matrix_rows_include_provider_patterns_and_sources() {
reset();
let rows = matrix_rows();
assert!(rows.iter().any(|row| {
row.provider == "openai"
&& row.model == "gpt-4o*"
&& row.vision
&& row.audio
&& row.json_schema.as_deref() == Some("native")
&& row.source == "builtin"
}));
}
}