use crate::AgentError;
use crate::a2a::AgentSkill;
use crate::browser::{BrowserAgentConfig, BrowsrClientConfig};
use crate::configuration::DefinitionOverrides;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use std::default::Default;
pub const DEFAULT_EXTERNAL_TOOL_TIMEOUT_SECS: u64 = 120;
#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, Default)]
pub struct AvailableSkill {
pub id: String,
pub name: String,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema)]
#[serde(deny_unknown_fields, rename_all = "snake_case")]
#[derive(Default)]
pub struct AgentStrategy {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub reasoning_depth: Option<ReasoningDepth>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub execution_mode: Option<ExecutionMode>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub replanning: Option<ReplanningConfig>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub external_tool_timeout_secs: Option<u64>,
}
impl AgentStrategy {
pub fn get_reasoning_depth(&self) -> ReasoningDepth {
self.reasoning_depth.clone().unwrap_or_default()
}
pub fn get_execution_mode(&self) -> ExecutionMode {
self.execution_mode.clone().unwrap_or_default()
}
pub fn get_replanning(&self) -> ReplanningConfig {
self.replanning.clone().unwrap_or_default()
}
pub fn get_external_tool_timeout_secs(&self) -> u64 {
self.external_tool_timeout_secs
.unwrap_or(DEFAULT_EXTERNAL_TOOL_TIMEOUT_SECS)
}
}
#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, Default, PartialEq)]
#[serde(rename_all = "snake_case")]
pub enum CodeLanguage {
#[default]
Typescript,
}
impl std::fmt::Display for CodeLanguage {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
CodeLanguage::Typescript => write!(f, "typescript"),
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, Default)]
pub struct ReflectionConfig {
#[serde(default)]
pub enabled: bool,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub reflection_agent: Option<String>,
#[serde(default)]
pub trigger: ReflectionTrigger,
#[serde(default)]
pub depth: ReflectionDepth,
}
#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, Default)]
#[serde(rename_all = "snake_case")]
pub enum ReflectionTrigger {
#[default]
EndOfExecution,
AfterEachStep,
AfterFailures,
AfterNSteps(usize),
}
#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, Default)]
#[serde(rename_all = "snake_case")]
pub enum ReflectionDepth {
#[default]
Light,
Standard,
Deep,
}
#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema)]
pub struct PlanConfig {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub model_settings: Option<ModelSettings>,
#[serde(default = "default_plan_max_iterations")]
pub max_iterations: usize,
}
impl Default for PlanConfig {
fn default() -> Self {
Self {
model_settings: None,
max_iterations: default_plan_max_iterations(),
}
}
}
fn default_plan_max_iterations() -> usize {
10
}
#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, Default, PartialEq)]
#[serde(rename_all = "snake_case")]
pub enum ReasoningDepth {
Shallow,
#[default]
Standard,
Deep,
}
#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, Default, PartialEq)]
#[serde(rename_all = "snake_case", tag = "type")]
pub enum ExecutionMode {
#[default]
Tools,
Code { language: CodeLanguage },
}
#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, Default)]
#[serde(rename_all = "snake_case")]
pub struct ReplanningConfig {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub trigger: Option<ReplanningTrigger>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub enabled: Option<bool>,
}
#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, Default)]
#[serde(rename_all = "snake_case")]
pub enum ReplanningTrigger {
#[default]
Never,
AfterReflection,
AfterNIterations(usize),
AfterFailures,
}
impl ReplanningConfig {
pub fn get_trigger(&self) -> ReplanningTrigger {
self.trigger.clone().unwrap_or_default()
}
pub fn is_enabled(&self) -> bool {
self.enabled.unwrap_or(false)
}
}
#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, Default)]
#[serde(rename_all = "snake_case")]
pub enum ExecutionKind {
#[default]
Retriable,
Interleaved,
}
#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, Default)]
#[serde(rename_all = "snake_case")]
pub enum MemoryKind {
#[default]
None,
ShortTerm,
LongTerm,
}
#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, Default, PartialEq)]
#[serde(rename_all = "snake_case")]
pub enum ToolDeliveryMode {
#[serde(alias = "all_tools")]
Full,
#[default]
#[serde(alias = "tool_search")]
Deferred,
NamesOnly,
}
#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, Default, PartialEq)]
#[serde(rename_all = "snake_case")]
pub enum OpenAiApiFormat {
#[default]
Auto,
Completions,
Responses,
}
impl OpenAiApiFormat {
pub fn resolve(&self, model: &str) -> ResolvedOpenAiApiFormat {
match self {
OpenAiApiFormat::Completions => ResolvedOpenAiApiFormat::Completions,
OpenAiApiFormat::Responses => ResolvedOpenAiApiFormat::Responses,
OpenAiApiFormat::Auto => {
if Self::model_requires_responses_api(model) {
ResolvedOpenAiApiFormat::Responses
} else {
ResolvedOpenAiApiFormat::Completions
}
}
}
}
fn model_requires_responses_api(model: &str) -> bool {
let m = model.to_lowercase();
m.starts_with("codex")
|| m.ends_with("-codex")
|| m.contains("/codex")
|| m.ends_with("-pro")
|| m.ends_with("-deep-research")
}
}
#[derive(Debug, Clone, PartialEq)]
pub enum ResolvedOpenAiApiFormat {
Completions,
Responses,
}
#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, Default, PartialEq)]
#[serde(rename_all = "snake_case")]
pub enum ToolCallFormat {
#[default]
Xml,
JsonL,
Code,
#[serde(rename = "provider")]
Provider,
None,
}
#[derive(Debug, Serialize, Deserialize, Clone, JsonSchema, Default)]
pub struct UserMessageOverrides {
pub parts: Vec<PartDefinition>,
#[serde(default)]
pub include_artifacts: bool,
#[serde(default = "default_include_step_count")]
pub include_step_count: Option<bool>,
}
fn default_include_step_count() -> Option<bool> {
Some(true)
}
#[derive(Debug, Serialize, Deserialize, Clone, JsonSchema)]
#[serde(tag = "type", content = "source", rename_all = "snake_case")]
pub enum PartDefinition {
Template(String), SessionKey(String), }
#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema)]
#[serde(deny_unknown_fields)]
pub struct LlmDefinition {
pub name: String,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub model_settings: Option<ModelSettings>,
#[serde(default)]
pub tool_format: ToolCallFormat,
#[serde(default)]
pub tool_delivery_mode: ToolDeliveryMode,
}
impl LlmDefinition {
pub fn ms(&self) -> Result<&ModelSettings, String> {
self.model_settings.as_ref().ok_or_else(|| {
"No model configured. Please set a default model in Agent Settings → Default Model."
.to_string()
})
}
pub fn ms_mut(&mut self) -> Result<&mut ModelSettings, String> {
self.model_settings.as_mut().ok_or_else(|| {
"No model configured. Please set a default model in Agent Settings → Default Model."
.to_string()
})
}
}
#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, PartialEq, Eq, Hash, Default)]
#[serde(rename_all = "snake_case")]
pub enum RuntimeMode {
Cli,
#[default]
Cloud,
Browser,
}
#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, Default)]
pub struct StandardDefinition {
pub name: String,
#[serde(default)]
pub description: String,
#[serde(default = "default_agent_version")]
pub version: Option<String>,
#[serde(default)]
pub instructions: String,
#[serde(default)]
pub mcp_servers: Option<Vec<McpDefinition>>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub model_settings: Option<ModelSettings>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub analysis_model_settings: Option<ModelSettings>,
#[serde(default = "default_history_size")]
pub history_size: Option<usize>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub strategy: Option<AgentStrategy>,
#[serde(default)]
pub icon_url: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub max_iterations: Option<usize>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub skills_description: Vec<AgentSkill>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub available_skills: Vec<AvailableSkill>,
#[serde(default)]
pub sub_agents: Vec<String>,
#[serde(default)]
pub tool_format: ToolCallFormat,
#[serde(default)]
pub tool_delivery_mode: ToolDeliveryMode,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tools: Option<ToolsConfig>,
#[serde(default, skip_serializing_if = "std::collections::HashMap::is_empty")]
pub partials: std::collections::HashMap<String, String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub reflection: Option<ReflectionConfig>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub enable_todos: Option<bool>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub browser_config: Option<BrowserAgentConfig>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub include_shell: Option<bool>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub context_size: Option<u32>,
#[serde(
skip_serializing_if = "Option::is_none",
default = "default_append_default_instructions"
)]
pub append_default_instructions: Option<bool>,
#[serde(
skip_serializing_if = "Option::is_none",
default = "default_include_scratchpad"
)]
pub include_scratchpad: Option<bool>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub hooks: Vec<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub user_message_overrides: Option<UserMessageOverrides>,
#[serde(
default = "default_compaction_enabled",
skip_serializing_if = "is_true"
)]
pub compaction_enabled: bool,
#[serde(default, alias = "deepagent")]
pub remote: bool,
#[serde(
default,
deserialize_with = "deserialize_runtime_modes",
skip_serializing_if = "Vec::is_empty"
)]
pub runtime: Vec<RuntimeMode>,
}
fn deserialize_runtime_modes<'de, D>(deserializer: D) -> Result<Vec<RuntimeMode>, D::Error>
where
D: serde::Deserializer<'de>,
{
use serde::de::{self, Deserialize};
#[derive(Deserialize)]
#[serde(untagged)]
enum OneOrMany {
One(RuntimeMode),
Many(Vec<RuntimeMode>),
}
match Option::<OneOrMany>::deserialize(deserializer)? {
None => Ok(Vec::new()),
Some(OneOrMany::One(rt)) => Ok(vec![rt]),
Some(OneOrMany::Many(v)) => {
let mut seen = std::collections::HashSet::new();
for rt in &v {
let key = format!("{:?}", rt);
if !seen.insert(key) {
return Err(de::Error::custom(format!(
"duplicate runtime entry: {:?}",
rt
)));
}
}
Ok(v)
}
}
}
fn default_append_default_instructions() -> Option<bool> {
Some(true)
}
fn default_include_scratchpad() -> Option<bool> {
Some(true)
}
fn default_compaction_enabled() -> bool {
true
}
fn is_true(v: &bool) -> bool {
*v
}
impl StandardDefinition {
pub fn allowed_runtimes(&self) -> Vec<RuntimeMode> {
if !self.runtime.is_empty() {
return self.runtime.clone();
}
if self.remote {
return vec![RuntimeMode::Cli];
}
Vec::new()
}
pub fn is_runnable_in(
&self,
current: &RuntimeMode,
runner_provides: Option<&RuntimeMode>,
) -> bool {
let allowed = self.allowed_runtimes();
if allowed.is_empty() {
return true;
}
if allowed.iter().any(|rt| rt == current) {
return true;
}
match runner_provides {
Some(p) => allowed.iter().any(|rt| rt == p),
None => false,
}
}
pub fn should_use_browser(&self) -> bool {
self.browser_config
.as_ref()
.map(|cfg| cfg.is_enabled())
.unwrap_or(false)
}
pub fn browser_settings(&self) -> Option<&BrowserAgentConfig> {
self.browser_config.as_ref()
}
pub fn browser_runtime_config(&self) -> Option<BrowsrClientConfig> {
self.browser_config.as_ref().map(|cfg| cfg.runtime_config())
}
pub fn should_persist_browser_session(&self) -> bool {
self.browser_config
.as_ref()
.map(|cfg| cfg.should_persist_session())
.unwrap_or(false)
}
pub fn is_reflection_enabled(&self) -> bool {
self.reflection.as_ref().map(|r| r.enabled).unwrap_or(false)
}
pub fn reflection_config(&self) -> Option<&ReflectionConfig> {
self.reflection.as_ref().filter(|r| r.enabled)
}
pub fn is_todos_enabled(&self) -> bool {
self.enable_todos.unwrap_or(false)
}
pub fn should_include_shell(&self) -> bool {
self.include_shell.unwrap_or(false)
}
pub fn model_settings(&self) -> Option<&ModelSettings> {
self.model_settings.as_ref()
}
pub fn model_settings_mut(&mut self) -> Option<&mut ModelSettings> {
self.model_settings.as_mut()
}
pub fn get_effective_context_size(&self) -> u32 {
self.context_size
.filter(|&s| s > 0)
.or_else(|| {
self.model_settings()
.map(|ms| ms.inner.context_size)
.filter(|&s| s > 0)
})
.unwrap_or_else(default_context_size)
}
pub fn analysis_model_settings_config(&self) -> Option<&ModelSettings> {
self.analysis_model_settings
.as_ref()
.or_else(|| self.model_settings())
}
pub fn include_scratchpad(&self) -> bool {
self.include_scratchpad.unwrap_or(true)
}
pub fn apply_overrides(&mut self, overrides: DefinitionOverrides) {
if let Some(ref mut ms) = self.model_settings {
if let Some(model) = overrides.model {
ms.model = model
.split_once('/')
.map(|(_, m)| m.to_string())
.unwrap_or(model);
}
if let Some(temperature) = overrides.temperature {
ms.inner.temperature = Some(temperature);
}
if let Some(max_tokens) = overrides.max_tokens {
ms.inner.max_tokens = Some(max_tokens);
}
}
if let Some(max_iterations) = overrides.max_iterations {
self.max_iterations = Some(max_iterations);
}
if let Some(instructions) = overrides.instructions {
self.instructions = instructions;
}
if let Some(remote) = overrides.remote {
self.remote = remote;
}
if let Some(use_browser) = overrides.use_browser {
let mut config = self.browser_config.clone().unwrap_or_default();
config.enabled = use_browser;
self.browser_config = Some(config);
}
if let Some(dynamic_tools) = overrides.dynamic_tools {
let tools = self.tools.get_or_insert_with(ToolsConfig::default);
tools.dynamic.extend(dynamic_tools);
}
}
}
pub const VALID_BUILTIN_TOOLS: &[&str] = &[
"final",
"reflect",
"transfer_to_agent",
"browsr_scrape",
"browsr_browser",
"browsr_crawl",
"browser_step",
"search",
"start_shell",
"execute_shell",
"stop_shell",
"distri_execute_code",
"tool_search",
"load_skill",
"inject_connection_env",
"console_log",
"artifact_tool",
"todos",
];
pub const CORE_TOOLS: &[&str] = &[
"final",
"transfer_to_agent",
"tool_search",
"write_todos",
"execute_shell",
"start_shell",
"load_skill",
];
pub const DEFAULT_DEFERRED_THRESHOLD: usize = 15;
#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, Default)]
#[serde(deny_unknown_fields)]
pub struct ToolsConfig {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub builtin: Vec<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub dynamic: Vec<crate::dynamic_tool::DynamicToolFactory>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub mcp: Vec<McpToolConfig>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub external: Option<Vec<String>>,
#[serde(default, skip_serializing_if = "is_default_delivery_mode")]
pub delivery_mode: ToolDeliveryMode,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub deferred_threshold: Option<usize>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub always_full_schema: Vec<String>,
}
fn is_default_delivery_mode(mode: &ToolDeliveryMode) -> bool {
*mode == ToolDeliveryMode::Deferred
}
impl ToolsConfig {
pub fn invalid_builtin_tools(&self) -> Vec<String> {
self.builtin
.iter()
.filter(|name| !VALID_BUILTIN_TOOLS.contains(&name.as_str()))
.cloned()
.collect()
}
pub fn is_core_tool(&self, name: &str) -> bool {
CORE_TOOLS.contains(&name)
|| self.always_full_schema.iter().any(|n| n == name)
|| name.starts_with("call_")
}
pub fn effective_threshold(&self) -> usize {
self.deferred_threshold
.unwrap_or(DEFAULT_DEFERRED_THRESHOLD)
}
pub fn effective_delivery_mode(&self, _total_tools: usize) -> ToolDeliveryMode {
self.delivery_mode.clone()
}
}
#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema)]
#[serde(deny_unknown_fields)]
pub struct McpToolConfig {
pub server: String,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub include: Vec<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub exclude: Vec<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema)]
#[serde(deny_unknown_fields)]
pub struct McpDefinition {
#[serde(default)]
pub filter: Option<Vec<String>>,
pub name: String,
#[serde(default)]
pub r#type: McpServerType,
#[serde(default)]
pub auth_config: Option<crate::a2a::SecurityScheme>,
}
#[derive(Debug, Clone, Serialize, Deserialize, Default, JsonSchema, PartialEq)]
#[serde(rename_all = "lowercase")]
pub enum McpServerType {
#[default]
Tool,
Agent,
}
#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema)]
#[serde(deny_unknown_fields, rename_all = "lowercase", tag = "name")]
pub enum ModelProvider {
#[serde(rename = "openai")]
OpenAI {},
#[serde(rename = "openai_compat")]
OpenAICompatible {
base_url: String,
api_key: Option<String>,
project_id: Option<String>,
},
#[serde(rename = "azure_openai")]
AzureOpenAI {
base_url: String,
api_key: Option<String>,
deployment: String,
#[serde(default = "ModelProvider::azure_api_version")]
api_version: String,
},
#[serde(rename = "anthropic")]
Anthropic {
#[serde(default = "ModelProvider::anthropic_base_url")]
base_url: Option<String>,
api_key: Option<String>,
},
#[serde(rename = "gemini")]
Gemini {
#[serde(default = "ModelProvider::gemini_base_url")]
base_url: String,
api_key: Option<String>,
},
#[serde(rename = "azure_ai_foundry")]
AzureAiFoundry {
base_url: String,
api_key: Option<String>,
},
#[serde(rename = "aws_bedrock")]
AwsBedrock {
base_url: String,
api_key: Option<String>,
},
#[serde(rename = "google_vertex")]
GoogleVertex {
base_url: String,
api_key: Option<String>,
project_id: Option<String>,
},
#[serde(rename = "alibaba_cloud")]
AlibabaCloud {
#[serde(default = "ModelProvider::alibaba_cloud_base_url")]
base_url: String,
api_key: Option<String>,
},
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ProviderSecretDefinition {
pub id: String,
pub label: String,
pub keys: Vec<SecretKeyDefinition>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SecretKeyDefinition {
pub key: String,
pub label: String,
pub placeholder: String,
#[serde(default = "default_required")]
pub required: bool,
#[serde(default = "default_sensitive")]
pub sensitive: bool,
}
fn default_required() -> bool {
true
}
fn default_sensitive() -> bool {
true
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ModelInfo {
pub id: String,
pub name: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
struct DefaultProviderEntry {
id: String,
label: String,
keys: Vec<SecretKeyDefinition>,
models: Vec<crate::models::Model>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
struct DefaultModelsFile {
providers: Vec<DefaultProviderEntry>,
}
fn load_default_providers() -> &'static [DefaultProviderEntry] {
use std::sync::OnceLock;
static PROVIDERS: OnceLock<Vec<DefaultProviderEntry>> = OnceLock::new();
PROVIDERS.get_or_init(|| {
let json = include_str!("default_models.json");
let file: DefaultModelsFile =
serde_json::from_str(json).expect("Failed to parse default_models.json");
file.providers
})
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ProviderModels {
pub provider_id: String,
pub provider_label: String,
pub models: Vec<crate::models::Model>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ProviderModelsStatus {
pub provider_id: String,
pub provider_label: String,
pub configured: bool,
pub models: Vec<crate::models::Model>,
}
impl Default for ModelProvider {
fn default() -> Self {
ModelProvider::OpenAI {}
}
}
impl ModelProvider {
pub fn openai_base_url() -> String {
"https://api.openai.com/v1".to_string()
}
pub fn anthropic_base_url() -> Option<String> {
None
}
pub fn gemini_base_url() -> String {
"https://generativelanguage.googleapis.com/v1beta/openai".to_string()
}
pub fn azure_api_version() -> String {
"2024-06-01".to_string()
}
pub fn alibaba_cloud_base_url() -> String {
"https://dashscope-intl.aliyuncs.com/compatible-mode/v1".to_string()
}
pub fn provider_type(&self) -> crate::models::ProviderType {
match self {
ModelProvider::OpenAI {} => crate::models::ProviderType::OpenAI,
ModelProvider::OpenAICompatible { .. } => {
crate::models::ProviderType::Custom("openai_compat".to_string())
}
ModelProvider::AzureOpenAI { .. } => crate::models::ProviderType::Azure,
ModelProvider::Anthropic { .. } => crate::models::ProviderType::Anthropic,
ModelProvider::Gemini { .. } => crate::models::ProviderType::Gemini,
ModelProvider::AzureAiFoundry { .. } => crate::models::ProviderType::AzureAiFoundry,
ModelProvider::AwsBedrock { .. } => crate::models::ProviderType::AwsBedrock,
ModelProvider::GoogleVertex { .. } => crate::models::ProviderType::GoogleVertex,
ModelProvider::AlibabaCloud { .. } => crate::models::ProviderType::AlibabaCloud,
}
}
pub fn provider_id(&self) -> &str {
match self {
ModelProvider::OpenAI {} => "openai",
ModelProvider::OpenAICompatible { .. } => "openai_compat",
ModelProvider::AzureOpenAI { .. } => "azure_openai",
ModelProvider::Anthropic { .. } => "anthropic",
ModelProvider::Gemini { .. } => "gemini",
ModelProvider::AzureAiFoundry { .. } => "azure_ai_foundry",
ModelProvider::AwsBedrock { .. } => "aws_bedrock",
ModelProvider::GoogleVertex { .. } => "google_vertex",
ModelProvider::AlibabaCloud { .. } => "alibaba_cloud",
}
}
pub fn required_secret_keys(&self) -> Vec<&'static str> {
match self {
ModelProvider::OpenAI {} => vec!["OPENAI_API_KEY"],
ModelProvider::OpenAICompatible { api_key, .. } => {
if api_key.is_some() {
vec![]
} else {
vec!["OPENAI_API_KEY"]
}
}
ModelProvider::AzureOpenAI { api_key, .. } => {
if api_key.is_some() {
vec![]
} else {
vec!["AZURE_OPENAI_API_KEY"]
}
}
ModelProvider::Anthropic { api_key, .. } => {
if api_key.is_some() {
vec![]
} else {
vec!["ANTHROPIC_API_KEY"]
}
}
ModelProvider::Gemini { api_key, .. } => {
if api_key.is_some() {
vec![]
} else {
vec!["GEMINI_API_KEY"]
}
}
ModelProvider::AzureAiFoundry { api_key, .. } => {
if api_key.is_some() {
vec![]
} else {
vec!["AZURE_AI_FOUNDRY_API_KEY"]
}
}
ModelProvider::AwsBedrock { api_key, .. } => {
if api_key.is_some() {
vec![]
} else {
vec!["AWS_ACCESS_KEY_ID"]
}
}
ModelProvider::GoogleVertex { api_key, .. } => {
if api_key.is_some() {
vec![]
} else {
vec!["GOOGLE_VERTEX_API_KEY"]
}
}
ModelProvider::AlibabaCloud { api_key, .. } => {
if api_key.is_some() {
vec![]
} else {
vec!["DASHSCOPE_API_KEY"]
}
}
}
}
pub fn all_provider_definitions() -> Vec<ProviderSecretDefinition> {
load_default_providers()
.iter()
.map(|p| ProviderSecretDefinition {
id: p.id.clone(),
label: p.label.clone(),
keys: p.keys.clone(),
})
.collect()
}
pub fn well_known_models() -> Vec<ProviderModels> {
load_default_providers()
.iter()
.filter(|p| !p.models.is_empty())
.map(|p| ProviderModels {
provider_id: p.id.clone(),
provider_label: p.label.clone(),
models: p.models.clone(),
})
.collect()
}
pub fn display_name(&self) -> &'static str {
match self {
ModelProvider::OpenAI {} => "OpenAI",
ModelProvider::OpenAICompatible { .. } => "OpenAI Compatible",
ModelProvider::AzureOpenAI { .. } => "Azure",
ModelProvider::Anthropic { .. } => "Anthropic",
ModelProvider::Gemini { .. } => "Google Gemini",
ModelProvider::AzureAiFoundry { .. } => "Azure AI Foundry",
ModelProvider::AwsBedrock { .. } => "AWS Bedrock",
ModelProvider::GoogleVertex { .. } => "Google Vertex AI",
ModelProvider::AlibabaCloud { .. } => "Alibaba Cloud",
}
}
pub fn otel_provider_name(&self) -> &'static str {
match self {
ModelProvider::OpenAI { .. } => "openai",
ModelProvider::OpenAICompatible { .. } => "openai",
ModelProvider::AzureOpenAI { .. } => "azure.ai.openai",
ModelProvider::Anthropic { .. } => "anthropic",
ModelProvider::Gemini { .. } => "google.gemini",
ModelProvider::AzureAiFoundry { .. } => "azure.ai.inference",
ModelProvider::AwsBedrock { .. } => "aws.bedrock",
ModelProvider::GoogleVertex { .. } => "gcp.vertex_ai",
ModelProvider::AlibabaCloud { .. } => "alibaba_cloud",
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema)]
pub struct ModelSettings {
pub model: String,
#[serde(flatten)]
pub inner: ModelSettingsInner,
}
#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, Default)]
pub struct ModelSettingsInner {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub temperature: Option<f32>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub max_tokens: Option<u32>,
#[serde(default = "default_context_size")]
pub context_size: u32,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub top_p: Option<f32>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub frequency_penalty: Option<f32>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub presence_penalty: Option<f32>,
#[serde(default = "default_model_provider")]
pub provider: ModelProvider,
#[serde(default)]
pub parameters: Option<serde_json::Value>,
#[serde(default)]
pub response_format: Option<serde_json::Value>,
#[serde(default, skip_serializing_if = "is_default_api_format")]
pub api_format: OpenAiApiFormat,
}
impl ModelSettings {
pub fn new(model: impl Into<String>) -> Self {
Self {
model: model.into(),
inner: ModelSettingsInner::default(),
}
}
pub fn from_provider_model_str(s: &str) -> Result<Option<Self>, String> {
let Some((provider_str, model_id)) = s.split_once('/') else {
return Ok(None);
};
if model_id.is_empty() {
return Ok(None);
}
let provider = match provider_str {
"openai" => ModelProvider::OpenAI {},
"anthropic" => ModelProvider::Anthropic {
base_url: None,
api_key: None,
},
"azure_openai" | "azure" => ModelProvider::AzureOpenAI {
base_url: String::new(),
api_key: None,
deployment: model_id.to_string(),
api_version: ModelProvider::azure_api_version(),
},
"gemini" => ModelProvider::Gemini {
base_url: ModelProvider::gemini_base_url(),
api_key: None,
},
"azure_ai_foundry" => ModelProvider::AzureAiFoundry {
base_url: String::new(),
api_key: None,
},
"aws_bedrock" => ModelProvider::AwsBedrock {
base_url: String::new(),
api_key: None,
},
"google_vertex" => ModelProvider::GoogleVertex {
base_url: String::new(),
api_key: None,
project_id: None,
},
"alibaba_cloud" => ModelProvider::AlibabaCloud {
base_url: ModelProvider::alibaba_cloud_base_url(),
api_key: None,
},
_ if provider_str.starts_with("custom_") => ModelProvider::OpenAICompatible {
base_url: String::new(),
api_key: None,
project_id: None,
},
_ => ModelProvider::OpenAICompatible {
base_url: String::new(),
api_key: None,
project_id: None,
},
};
Ok(Some(Self {
model: model_id.to_string(),
inner: ModelSettingsInner {
provider,
..Default::default()
},
}))
}
pub fn merge(&self, override_settings: &ModelSettings) -> Option<ModelSettings> {
let default_provider = ModelProvider::OpenAI {};
let override_has_explicit_provider = std::mem::discriminant(&override_settings.inner.provider)
!= std::mem::discriminant(&default_provider);
let base_has_explicit_provider = std::mem::discriminant(&self.inner.provider)
!= std::mem::discriminant(&default_provider);
let (provider, model) = if override_has_explicit_provider {
let model = if !override_settings.model.is_empty() {
override_settings.model.clone()
} else {
self.model.clone()
};
(override_settings.inner.provider.clone(), model)
} else if base_has_explicit_provider {
let model = if !self.model.is_empty() {
self.model.clone()
} else if !override_settings.model.is_empty() {
override_settings.model.clone()
} else {
String::new()
};
(self.inner.provider.clone(), model)
} else {
let model = if !override_settings.model.is_empty() {
override_settings.model.clone()
} else {
self.model.clone()
};
(self.inner.provider.clone(), model)
};
if model.is_empty() {
return None;
}
let default_context_size = 20000u32;
Some(ModelSettings {
model,
inner: ModelSettingsInner {
temperature: override_settings
.inner
.temperature
.or(self.inner.temperature),
max_tokens: override_settings.inner.max_tokens.or(self.inner.max_tokens),
context_size: if override_settings.inner.context_size != default_context_size {
override_settings.inner.context_size
} else {
self.inner.context_size
},
top_p: override_settings.inner.top_p.or(self.inner.top_p),
frequency_penalty: override_settings
.inner
.frequency_penalty
.or(self.inner.frequency_penalty),
presence_penalty: override_settings
.inner
.presence_penalty
.or(self.inner.presence_penalty),
provider,
parameters: if override_settings.inner.parameters.is_some() {
override_settings.inner.parameters.clone()
} else {
self.inner.parameters.clone()
},
response_format: if override_settings.inner.response_format.is_some() {
override_settings.inner.response_format.clone()
} else {
self.inner.response_format.clone()
},
api_format: if override_settings.inner.api_format != OpenAiApiFormat::Auto {
override_settings.inner.api_format.clone()
} else {
self.inner.api_format.clone()
},
},
})
}
}
pub fn default_agent_version() -> Option<String> {
Some("0.2.2".to_string())
}
fn default_model_provider() -> ModelProvider {
ModelProvider::OpenAI {}
}
fn default_context_size() -> u32 {
20000 }
fn is_default_api_format(f: &OpenAiApiFormat) -> bool {
*f == OpenAiApiFormat::Auto
}
fn default_history_size() -> Option<usize> {
Some(5)
}
impl StandardDefinition {
pub fn validate(&self) -> anyhow::Result<()> {
if self.name.is_empty() {
return Err(anyhow::anyhow!("Agent name cannot be empty"));
}
if let Some(ref reflection) = self.reflection
&& reflection.enabled
{
if let Some(ref agent_name) = reflection.reflection_agent
&& agent_name.is_empty()
{
return Err(anyhow::anyhow!(
"Reflection agent name cannot be empty when specified"
));
}
}
Ok(())
}
pub fn validate_reflection_agent(agent_def: &StandardDefinition) -> anyhow::Result<()> {
let has_reflect_tool = agent_def
.tools
.as_ref()
.map(|t| t.builtin.iter().any(|name| name == "reflect"))
.unwrap_or(false);
if !has_reflect_tool {
anyhow::bail!(
"Reflection agent '{}' must have the 'reflect' tool in its tools.builtin configuration",
agent_def.name
);
}
Ok(())
}
}
impl From<StandardDefinition> for LlmDefinition {
fn from(definition: StandardDefinition) -> Self {
let model_settings = match (definition.model_settings, definition.context_size) {
(Some(mut ms), Some(ctx)) => {
ms.inner.context_size = ctx;
Some(ms)
}
(ms, _) => ms,
};
Self {
name: definition.name,
model_settings,
tool_format: definition.tool_format,
tool_delivery_mode: definition.tool_delivery_mode,
}
}
}
impl ToolsConfig {
pub fn builtin_only(tools: Vec<&str>) -> Self {
Self {
builtin: tools.into_iter().map(|s| s.to_string()).collect(),
..Default::default()
}
}
pub fn mcp_all(server: &str) -> Self {
Self {
mcp: vec![McpToolConfig {
server: server.to_string(),
include: vec!["*".to_string()],
exclude: vec![],
}],
..Default::default()
}
}
pub fn mcp_filtered(server: &str, include: Vec<&str>, exclude: Vec<&str>) -> Self {
Self {
mcp: vec![McpToolConfig {
server: server.to_string(),
include: include.into_iter().map(|s| s.to_string()).collect(),
exclude: exclude.into_iter().map(|s| s.to_string()).collect(),
}],
..Default::default()
}
}
}
pub async fn parse_agent_markdown_content(content: &str) -> Result<StandardDefinition, AgentError> {
let parts: Vec<&str> = content.split("---").collect();
if parts.len() < 3 {
return Err(AgentError::Validation(
"Invalid agent markdown format. Expected TOML frontmatter between --- markers"
.to_string(),
));
}
let toml_content = parts[1].trim();
let mut agent_def: crate::StandardDefinition =
toml::from_str(toml_content).map_err(|e| AgentError::Validation(e.to_string()))?;
if let Err(validation_error) = validate_plugin_name(&agent_def.name) {
return Err(AgentError::Validation(format!(
"Invalid agent name '{}': {}",
agent_def.name, validation_error
)));
}
if !agent_def
.name
.chars()
.all(|c| c.is_alphanumeric() || c == '_' || c == '/')
|| agent_def
.name
.chars()
.next()
.is_some_and(|c| c.is_numeric())
|| agent_def.name.chars().filter(|&c| c == '/').count() > 1
{
return Err(AgentError::Validation(format!(
"Invalid agent name '{}': Agent names must be alphanumeric with underscores, at most one '/' for namespacing (e.g. '_system/plan'), cannot start with number.",
agent_def.name
)));
}
let instructions = parts[2..].join("---").trim().to_string();
agent_def.instructions = instructions;
Ok(agent_def)
}
pub fn validate_plugin_name(name: &str) -> Result<(), String> {
if name.is_empty() {
return Err("Plugin name cannot be empty".to_string());
}
if name.contains('-') {
return Err(format!(
"Plugin name '{}' cannot contain hyphens. Use underscores instead.",
name
));
}
let slash_count = name.chars().filter(|&c| c == '/').count();
if slash_count > 1 {
return Err(format!(
"Plugin name '{}' can contain at most one '/' for workspace namespacing (e.g. 'workspace/agent')",
name
));
}
let segments: Vec<&str> = name.split('/').collect();
for segment in &segments {
if segment.is_empty() {
return Err(format!(
"Plugin name '{}' has an empty segment around '/'",
name
));
}
if let Some(first_char) = segment.chars().next()
&& !first_char.is_ascii_alphabetic()
&& first_char != '_'
{
return Err(format!(
"Each segment in '{}' must start with a letter or underscore",
name
));
}
for ch in segment.chars() {
if !ch.is_ascii_alphanumeric() && ch != '_' {
return Err(format!(
"Plugin name '{}' can only contain letters, numbers, underscores, and at most one '/' for namespacing",
name
));
}
}
}
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_compaction_enabled_defaults_to_true_via_serde() {
let json = r#"{"name": "test"}"#;
let def: StandardDefinition = serde_json::from_str(json).unwrap();
assert!(def.compaction_enabled);
}
#[test]
fn test_compaction_enabled_deserializes_true_when_absent() {
let json = r#"{"name": "test", "description": "test agent"}"#;
let def: StandardDefinition = serde_json::from_str(json).unwrap();
assert!(def.compaction_enabled);
}
#[test]
fn test_compaction_enabled_deserializes_false() {
let json = r#"{"name": "test", "description": "test agent", "compaction_enabled": false}"#;
let def: StandardDefinition = serde_json::from_str(json).unwrap();
assert!(!def.compaction_enabled);
}
#[test]
fn test_compaction_enabled_true_skipped_in_serialization() {
let def = StandardDefinition {
name: "test".to_string(),
compaction_enabled: true,
..Default::default()
};
let json = serde_json::to_string(&def).unwrap();
assert!(!json.contains("compaction_enabled"));
}
#[test]
fn test_compaction_enabled_false_serialized() {
let def = StandardDefinition {
name: "test".to_string(),
compaction_enabled: false,
..Default::default()
};
let json = serde_json::to_string(&def).unwrap();
assert!(json.contains("\"compaction_enabled\":false"));
}
#[test]
fn test_max_tokens_optional_defaults_to_none() {
let def = StandardDefinition::default();
assert!(def.model_settings().is_none());
}
#[test]
fn test_max_tokens_deserializes_when_present() {
let json =
r#"{"name": "test", "model_settings": {"model": "gpt-4.1", "max_tokens": 4096}}"#;
let def: StandardDefinition = serde_json::from_str(json).unwrap();
assert_eq!(def.model_settings().unwrap().inner.max_tokens, Some(4096));
}
#[test]
fn test_max_tokens_none_when_absent() {
let json = r#"{"name": "test", "model_settings": {"model": "gpt-4.1"}}"#;
let def: StandardDefinition = serde_json::from_str(json).unwrap();
assert!(def.model_settings().unwrap().inner.max_tokens.is_none());
}
#[test]
fn test_max_tokens_none_skipped_in_serialization() {
let settings = ModelSettings {
model: "test-model".to_string(),
inner: ModelSettingsInner {
max_tokens: None,
provider: ModelProvider::OpenAI {},
..Default::default()
},
};
let json = serde_json::to_string(&settings).unwrap();
assert!(!json.contains("max_tokens"));
}
#[test]
fn test_max_tokens_some_serialized() {
let settings = ModelSettings {
model: "test-model".to_string(),
inner: ModelSettingsInner {
max_tokens: Some(2048),
provider: ModelProvider::OpenAI {},
..Default::default()
},
};
let json = serde_json::to_string(&settings).unwrap();
assert!(json.contains("\"max_tokens\":2048"));
}
#[test]
fn test_api_format_auto_detect_codex_prefix() {
let fmt = OpenAiApiFormat::Auto;
assert_eq!(
fmt.resolve("codex-mini-latest"),
ResolvedOpenAiApiFormat::Responses
);
assert_eq!(
fmt.resolve("codex-mini-2025-01-24"),
ResolvedOpenAiApiFormat::Responses
);
}
#[test]
fn test_api_format_auto_detect_codex_suffix() {
let fmt = OpenAiApiFormat::Auto;
assert_eq!(
fmt.resolve("gpt-5.1-codex"),
ResolvedOpenAiApiFormat::Responses
);
assert_eq!(
fmt.resolve("gpt-5.3-codex"),
ResolvedOpenAiApiFormat::Responses
);
}
#[test]
fn test_api_format_auto_detect_pro_models() {
let fmt = OpenAiApiFormat::Auto;
assert_eq!(fmt.resolve("gpt-5-pro"), ResolvedOpenAiApiFormat::Responses);
assert_eq!(
fmt.resolve("gpt-5.2-pro"),
ResolvedOpenAiApiFormat::Responses
);
assert_eq!(
fmt.resolve("gpt-5.4-pro"),
ResolvedOpenAiApiFormat::Responses
);
assert_eq!(fmt.resolve("o3-pro"), ResolvedOpenAiApiFormat::Responses);
}
#[test]
fn test_api_format_auto_detect_deep_research_models() {
let fmt = OpenAiApiFormat::Auto;
assert_eq!(
fmt.resolve("o3-deep-research"),
ResolvedOpenAiApiFormat::Responses
);
assert_eq!(
fmt.resolve("o4-mini-deep-research"),
ResolvedOpenAiApiFormat::Responses
);
}
#[test]
fn test_api_format_auto_detect_non_codex() {
let fmt = OpenAiApiFormat::Auto;
assert_eq!(fmt.resolve("gpt-4o"), ResolvedOpenAiApiFormat::Completions);
assert_eq!(fmt.resolve("gpt-4.1"), ResolvedOpenAiApiFormat::Completions);
assert_eq!(fmt.resolve("gpt-5"), ResolvedOpenAiApiFormat::Completions);
assert_eq!(fmt.resolve("o1"), ResolvedOpenAiApiFormat::Completions);
assert_eq!(
fmt.resolve("gpt-5.4-mini"),
ResolvedOpenAiApiFormat::Completions
);
assert_eq!(fmt.resolve("o3-mini"), ResolvedOpenAiApiFormat::Completions);
}
#[test]
fn test_api_format_explicit_override() {
assert_eq!(
OpenAiApiFormat::Responses.resolve("gpt-4o"),
ResolvedOpenAiApiFormat::Responses
);
assert_eq!(
OpenAiApiFormat::Completions.resolve("codex-mini-latest"),
ResolvedOpenAiApiFormat::Completions
);
}
#[test]
fn test_api_format_defaults_to_auto() {
let inner = ModelSettingsInner::default();
assert_eq!(inner.api_format, OpenAiApiFormat::Auto);
}
#[test]
fn test_api_format_auto_skipped_in_serialization() {
let settings = ModelSettings {
model: "test-model".to_string(),
inner: ModelSettingsInner {
provider: ModelProvider::OpenAI {},
..Default::default()
},
};
let json = serde_json::to_string(&settings).unwrap();
assert!(!json.contains("api_format"));
}
#[test]
fn test_api_format_responses_serialized() {
let settings = ModelSettings {
model: "test-model".to_string(),
inner: ModelSettingsInner {
api_format: OpenAiApiFormat::Responses,
provider: ModelProvider::OpenAI {},
..Default::default()
},
};
let json = serde_json::to_string(&settings).unwrap();
assert!(json.contains("\"api_format\":\"responses\""));
}
#[test]
fn test_api_format_deserializes_from_toml() {
let toml_str = r#"
model = "codex-mini-latest"
api_format = "responses"
[provider]
name = "openai"
"#;
let settings: ModelSettings = toml::from_str(toml_str).unwrap();
assert_eq!(settings.inner.api_format, OpenAiApiFormat::Responses);
}
#[test]
fn test_tool_delivery_mode_defaults_to_deferred() {
let mode: ToolDeliveryMode = Default::default();
assert_eq!(mode, ToolDeliveryMode::Deferred);
}
#[test]
fn test_tool_delivery_mode_backwards_compat_all_tools() {
let json = r#""all_tools""#;
let mode: ToolDeliveryMode = serde_json::from_str(json).unwrap();
assert_eq!(mode, ToolDeliveryMode::Full);
}
#[test]
fn test_tool_delivery_mode_backwards_compat_tool_search() {
let json = r#""tool_search""#;
let mode: ToolDeliveryMode = serde_json::from_str(json).unwrap();
assert_eq!(mode, ToolDeliveryMode::Deferred);
}
#[test]
fn test_tools_config_is_core_tool() {
let config = ToolsConfig::default();
assert!(config.is_core_tool("final"));
assert!(config.is_core_tool("tool_search"));
assert!(config.is_core_tool("execute_shell"));
assert!(config.is_core_tool("call_coder"));
assert!(!config.is_core_tool("browsr_scrape"));
}
#[test]
fn test_tools_config_always_full_schema() {
let config = ToolsConfig {
always_full_schema: vec!["browsr_scrape".to_string()],
..Default::default()
};
assert!(config.is_core_tool("browsr_scrape"));
assert!(!config.is_core_tool("browsr_browser"));
}
#[test]
fn test_effective_delivery_mode_full_stays_full() {
let config = ToolsConfig {
delivery_mode: ToolDeliveryMode::Full,
..Default::default()
};
assert_eq!(config.effective_delivery_mode(100), ToolDeliveryMode::Full);
}
#[test]
fn test_effective_delivery_mode_deferred_stays_deferred() {
let config = ToolsConfig {
delivery_mode: ToolDeliveryMode::Deferred,
deferred_threshold: Some(20),
..Default::default()
};
assert_eq!(
config.effective_delivery_mode(10),
ToolDeliveryMode::Deferred
);
}
#[test]
fn test_effective_delivery_mode_deferred_over_threshold() {
let config = ToolsConfig {
delivery_mode: ToolDeliveryMode::Deferred,
deferred_threshold: Some(10),
..Default::default()
};
assert_eq!(
config.effective_delivery_mode(15),
ToolDeliveryMode::Deferred
);
}
#[test]
fn test_runtime_mode_serde() {
let mode: RuntimeMode = serde_json::from_str("\"cloud\"").unwrap();
assert_eq!(mode, RuntimeMode::Cloud);
let mode: RuntimeMode = serde_json::from_str("\"cli\"").unwrap();
assert_eq!(mode, RuntimeMode::Cli);
let mode: RuntimeMode = serde_json::from_str("\"browser\"").unwrap();
assert_eq!(mode, RuntimeMode::Browser);
assert_eq!(RuntimeMode::default(), RuntimeMode::Cloud);
let json = serde_json::to_string(&RuntimeMode::Cli).unwrap();
assert_eq!(json, "\"cli\"");
}
#[test]
fn merge_both_default_openai_agent_model_wins() {
let base = ModelSettings::new("gpt-5.1");
let agent = ModelSettings::new("gpt-4.1-mini");
let result = base.merge(&agent).unwrap();
assert_eq!(result.model, "gpt-4.1-mini");
assert!(matches!(result.inner.provider, ModelProvider::OpenAI {}));
}
#[test]
fn merge_both_default_openai_base_model_used_when_agent_empty() {
let base = ModelSettings::new("gpt-5.1");
let agent = ModelSettings::new("");
let result = base.merge(&agent).unwrap();
assert_eq!(result.model, "gpt-5.1");
}
#[test]
fn merge_agent_explicit_provider_wins() {
let base = ModelSettings {
model: "gpt-5.1".into(),
inner: ModelSettingsInner {
provider: ModelProvider::OpenAICompatible {
base_url: "https://custom.com/v1".into(),
api_key: Some("key".into()),
project_id: None,
},
..Default::default()
},
};
let agent = ModelSettings {
model: "claude-sonnet-4".into(),
inner: ModelSettingsInner {
provider: ModelProvider::Anthropic {
base_url: None,
api_key: None,
},
..Default::default()
},
};
let result = base.merge(&agent).unwrap();
assert_eq!(result.model, "claude-sonnet-4");
assert!(matches!(result.inner.provider, ModelProvider::Anthropic { .. }));
}
#[test]
fn merge_agent_explicit_provider_no_model_uses_base() {
let base = ModelSettings::new("gpt-5.1");
let agent = ModelSettings {
model: "".into(),
inner: ModelSettingsInner {
provider: ModelProvider::Anthropic {
base_url: None,
api_key: None,
},
..Default::default()
},
};
let result = base.merge(&agent).unwrap();
assert_eq!(result.model, "gpt-5.1");
assert!(matches!(result.inner.provider, ModelProvider::Anthropic { .. }));
}
#[test]
fn merge_workspace_custom_provider_overrides_agent_model() {
let base = ModelSettings {
model: "gpt-5.4".into(),
inner: ModelSettingsInner {
provider: ModelProvider::OpenAICompatible {
base_url: "https://custom.azure.com/openai/v1".into(),
api_key: Some("test-key".into()),
project_id: None,
},
..Default::default()
},
};
let agent = ModelSettings::new("gpt-5.1");
let result = base.merge(&agent).unwrap();
assert_eq!(result.model, "gpt-5.4");
assert!(matches!(result.inner.provider, ModelProvider::OpenAICompatible { .. }));
}
#[test]
fn merge_workspace_custom_provider_agent_empty_model() {
let base = ModelSettings {
model: "gpt-5.4".into(),
inner: ModelSettingsInner {
provider: ModelProvider::OpenAICompatible {
base_url: "https://custom.azure.com/openai/v1".into(),
api_key: Some("test-key".into()),
project_id: None,
},
..Default::default()
},
};
let agent = ModelSettings::new("");
let result = base.merge(&agent).unwrap();
assert_eq!(result.model, "gpt-5.4");
}
#[test]
fn merge_both_empty_returns_none() {
let base = ModelSettings::new("");
let agent = ModelSettings::new("");
assert!(base.merge(&agent).is_none());
}
#[test]
fn merge_workspace_empty_agent_empty_returns_none() {
let base = ModelSettings {
model: "".into(),
inner: ModelSettingsInner {
provider: ModelProvider::OpenAICompatible {
base_url: "https://custom.com".into(),
api_key: None,
project_id: None,
},
..Default::default()
},
};
let agent = ModelSettings::new("");
assert!(base.merge(&agent).is_none());
}
#[test]
fn merge_temperature_max_tokens_override() {
let base = ModelSettings {
model: "gpt-5.1".into(),
inner: ModelSettingsInner {
temperature: Some(0.5),
max_tokens: Some(1000),
top_p: Some(0.9),
..Default::default()
},
};
let agent = ModelSettings {
model: "gpt-4.1-mini".into(),
inner: ModelSettingsInner {
temperature: Some(0.9),
max_tokens: None, ..Default::default()
},
};
let result = base.merge(&agent).unwrap();
assert_eq!(result.model, "gpt-4.1-mini");
assert_eq!(result.inner.temperature, Some(0.9));
assert_eq!(result.inner.max_tokens, Some(1000)); assert_eq!(result.inner.top_p, Some(0.9)); }
#[test]
fn merge_context_size_non_default_wins() {
let base = ModelSettings {
model: "gpt-5.1".into(),
inner: ModelSettingsInner {
context_size: 20000, ..Default::default()
},
};
let agent = ModelSettings {
model: "gpt-4.1-mini".into(),
inner: ModelSettingsInner {
context_size: 100000, ..Default::default()
},
};
let result = base.merge(&agent).unwrap();
assert_eq!(result.inner.context_size, 100000);
}
#[test]
fn merge_context_size_default_falls_back() {
let base = ModelSettings {
model: "gpt-5.1".into(),
inner: ModelSettingsInner {
context_size: 128000,
..Default::default()
},
};
let agent = ModelSettings {
model: "gpt-4.1-mini".into(),
inner: ModelSettingsInner {
context_size: 20000, ..Default::default()
},
};
let result = base.merge(&agent).unwrap();
assert_eq!(result.inner.context_size, 128000);
}
#[test]
fn merge_azure_ai_foundry_base_url_preserved() {
let base = ModelSettings {
model: "gpt-5.4".into(),
inner: ModelSettingsInner {
provider: ModelProvider::AzureAiFoundry {
base_url: "https://myresource.openai.azure.com".into(),
api_key: Some("test-key".into()),
},
..Default::default()
},
};
let agent = ModelSettings::new("gpt-5.1");
let result = base.merge(&agent).unwrap();
assert_eq!(result.model, "gpt-5.4"); assert!(matches!(result.inner.provider, ModelProvider::AzureAiFoundry { .. }));
if let ModelProvider::AzureAiFoundry { base_url, .. } = result.inner.provider {
assert_eq!(base_url, "https://myresource.openai.azure.com");
}
}
#[test]
fn merge_anthropic_provider_preserves_base_url() {
let base = ModelSettings {
model: "claude-sonnet-4".into(),
inner: ModelSettingsInner {
provider: ModelProvider::Anthropic {
base_url: Some("https://custom.anthropic.com".into()),
api_key: Some("key".into()),
},
temperature: Some(0.7),
..Default::default()
},
};
let agent = ModelSettings::new("");
let result = base.merge(&agent).unwrap();
assert_eq!(result.model, "claude-sonnet-4");
assert_eq!(result.inner.temperature, Some(0.7));
if let ModelProvider::Anthropic { base_url, api_key } = result.inner.provider {
assert_eq!(base_url, Some("https://custom.anthropic.com".into()));
assert_eq!(api_key, Some("key".into()));
}
}
#[test]
fn merge_response_format_agent_wins() {
let base = ModelSettings {
model: "gpt-5.1".into(),
inner: ModelSettingsInner {
response_format: Some(serde_json::json!({"type": "text"})),
..Default::default()
},
};
let agent = ModelSettings {
model: "gpt-4.1-mini".into(),
inner: ModelSettingsInner {
response_format: Some(serde_json::json!({"type": "json_object"})),
..Default::default()
},
};
let result = base.merge(&agent).unwrap();
assert_eq!(
result.inner.response_format,
Some(serde_json::json!({"type": "json_object"}))
);
}
#[test]
fn merge_response_format_base_fallback() {
let base = ModelSettings {
model: "gpt-5.1".into(),
inner: ModelSettingsInner {
response_format: Some(serde_json::json!({"type": "text"})),
..Default::default()
},
};
let agent = ModelSettings::new("gpt-4.1-mini");
let result = base.merge(&agent).unwrap();
assert_eq!(
result.inner.response_format,
Some(serde_json::json!({"type": "text"}))
);
}
#[test]
fn merge_parameters_agent_wins() {
let base = ModelSettings {
model: "gpt-5.1".into(),
inner: ModelSettingsInner {
parameters: Some(serde_json::json!({"key": "base"})),
..Default::default()
},
};
let agent = ModelSettings {
model: "gpt-4.1-mini".into(),
inner: ModelSettingsInner {
parameters: Some(serde_json::json!({"key": "agent"})),
..Default::default()
},
};
let result = base.merge(&agent).unwrap();
assert_eq!(
result.inner.parameters,
Some(serde_json::json!({"key": "agent"}))
);
}
}