1use crate::AgentError;
2use crate::a2a::AgentSkill;
3use crate::browser::{BrowserAgentConfig, BrowsrClientConfig};
4use crate::configuration::DefinitionOverrides;
5use schemars::JsonSchema;
6use serde::{Deserialize, Serialize};
7use std::default::Default;
8
9pub const DEFAULT_EXTERNAL_TOOL_TIMEOUT_SECS: u64 = 120;
11
12#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, Default)]
14pub struct AvailableSkill {
15 pub id: String,
17 pub name: String,
19 #[serde(default, skip_serializing_if = "Option::is_none")]
21 pub description: Option<String>,
22}
23
24#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema)]
26#[serde(deny_unknown_fields, rename_all = "snake_case")]
27#[derive(Default)]
28pub struct AgentStrategy {
29 #[serde(default, skip_serializing_if = "Option::is_none")]
31 pub reasoning_depth: Option<ReasoningDepth>,
32
33 #[serde(default, skip_serializing_if = "Option::is_none")]
35 pub execution_mode: Option<ExecutionMode>,
36 #[serde(default, skip_serializing_if = "Option::is_none")]
38 pub replanning: Option<ReplanningConfig>,
39
40 #[serde(default, skip_serializing_if = "Option::is_none")]
43 pub external_tool_timeout_secs: Option<u64>,
44}
45
46impl AgentStrategy {
47 pub fn get_reasoning_depth(&self) -> ReasoningDepth {
49 self.reasoning_depth.clone().unwrap_or_default()
50 }
51
52 pub fn get_execution_mode(&self) -> ExecutionMode {
54 self.execution_mode.clone().unwrap_or_default()
55 }
56
57 pub fn get_replanning(&self) -> ReplanningConfig {
59 self.replanning.clone().unwrap_or_default()
60 }
61
62 pub fn get_external_tool_timeout_secs(&self) -> u64 {
64 self.external_tool_timeout_secs
65 .unwrap_or(DEFAULT_EXTERNAL_TOOL_TIMEOUT_SECS)
66 }
67}
68
69#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, Default, PartialEq)]
70#[serde(rename_all = "snake_case")]
71pub enum CodeLanguage {
72 #[default]
73 Typescript,
74}
75
76impl std::fmt::Display for CodeLanguage {
77 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
78 match self {
79 CodeLanguage::Typescript => write!(f, "typescript"),
80 }
81 }
82}
83
84#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, Default)]
86pub struct ReflectionConfig {
87 #[serde(default)]
89 pub enabled: bool,
90 #[serde(default, skip_serializing_if = "Option::is_none")]
94 pub reflection_agent: Option<String>,
95 #[serde(default)]
97 pub trigger: ReflectionTrigger,
98 #[serde(default)]
100 pub depth: ReflectionDepth,
101}
102
103#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, Default)]
105#[serde(rename_all = "snake_case")]
106pub enum ReflectionTrigger {
107 #[default]
109 EndOfExecution,
110 AfterEachStep,
112 AfterFailures,
114 AfterNSteps(usize),
116}
117
118#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, Default)]
120#[serde(rename_all = "snake_case")]
121pub enum ReflectionDepth {
122 #[default]
124 Light,
125 Standard,
127 Deep,
129}
130
131#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema)]
133pub struct PlanConfig {
134 #[serde(default, skip_serializing_if = "Option::is_none")]
136 pub model_settings: Option<ModelSettings>,
137 #[serde(default = "default_plan_max_iterations")]
139 pub max_iterations: usize,
140}
141
142impl Default for PlanConfig {
143 fn default() -> Self {
144 Self {
145 model_settings: None,
146 max_iterations: default_plan_max_iterations(),
147 }
148 }
149}
150
151fn default_plan_max_iterations() -> usize {
152 10
153}
154
155#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, Default, PartialEq)]
157#[serde(rename_all = "snake_case")]
158pub enum ReasoningDepth {
159 Shallow,
161 #[default]
163 Standard,
164 Deep,
166}
167
168#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, Default, PartialEq)]
170#[serde(rename_all = "snake_case", tag = "type")]
171pub enum ExecutionMode {
172 #[default]
174 Tools,
175 Code { language: CodeLanguage },
177}
178
179#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, Default)]
181#[serde(rename_all = "snake_case")]
182pub struct ReplanningConfig {
183 #[serde(default, skip_serializing_if = "Option::is_none")]
185 pub trigger: Option<ReplanningTrigger>,
186 #[serde(default, skip_serializing_if = "Option::is_none")]
188 pub enabled: Option<bool>,
189}
190
191#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, Default)]
193#[serde(rename_all = "snake_case")]
194pub enum ReplanningTrigger {
195 #[default]
197 Never,
198 AfterReflection,
200 AfterNIterations(usize),
202 AfterFailures,
204}
205
206impl ReplanningConfig {
207 pub fn get_trigger(&self) -> ReplanningTrigger {
209 self.trigger.clone().unwrap_or_default()
210 }
211
212 pub fn is_enabled(&self) -> bool {
214 self.enabled.unwrap_or(false)
215 }
216}
217
218#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, Default)]
219#[serde(rename_all = "snake_case")]
220pub enum ExecutionKind {
221 #[default]
222 Retriable,
223 Interleaved,
224}
225
226#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, Default)]
227#[serde(rename_all = "snake_case")]
228pub enum MemoryKind {
229 #[default]
230 None,
231 ShortTerm,
232 LongTerm,
233}
234
235#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, Default, PartialEq)]
242#[serde(rename_all = "snake_case")]
243pub enum ToolDeliveryMode {
244 #[serde(alias = "all_tools")]
246 Full,
247 #[default]
249 #[serde(alias = "tool_search")]
250 Deferred,
251 NamesOnly,
254}
255
256#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, Default, PartialEq)]
274#[serde(rename_all = "snake_case")]
275pub enum OpenAiApiFormat {
276 #[default]
278 Auto,
279 Completions,
281 Responses,
283}
284
285impl OpenAiApiFormat {
286 pub fn resolve(&self, model: &str) -> ResolvedOpenAiApiFormat {
289 match self {
290 OpenAiApiFormat::Completions => ResolvedOpenAiApiFormat::Completions,
291 OpenAiApiFormat::Responses => ResolvedOpenAiApiFormat::Responses,
292 OpenAiApiFormat::Auto => {
293 if Self::model_requires_responses_api(model) {
294 ResolvedOpenAiApiFormat::Responses
295 } else {
296 ResolvedOpenAiApiFormat::Completions
297 }
298 }
299 }
300 }
301
302 fn model_requires_responses_api(model: &str) -> bool {
309 let m = model.to_lowercase();
310 m.starts_with("codex")
312 || m.ends_with("-codex")
313 || m.contains("/codex")
314 || m.ends_with("-pro")
316 || m.ends_with("-deep-research")
318 }
319}
320
321#[derive(Debug, Clone, PartialEq)]
323pub enum ResolvedOpenAiApiFormat {
324 Completions,
325 Responses,
326}
327
328#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, Default, PartialEq)]
330#[serde(rename_all = "snake_case")]
331pub enum ToolCallFormat {
332 #[default]
335 Xml,
336 JsonL,
339
340 Code,
343 #[serde(rename = "provider")]
344 Provider,
345 None,
346}
347
348#[derive(Debug, Serialize, Deserialize, Clone, JsonSchema, Default)]
349pub struct UserMessageOverrides {
350 pub parts: Vec<PartDefinition>,
352 #[serde(default)]
354 pub include_artifacts: bool,
355 #[serde(default = "default_include_step_count")]
357 pub include_step_count: Option<bool>,
358}
359
360fn default_include_step_count() -> Option<bool> {
361 Some(true)
362}
363
364#[derive(Debug, Serialize, Deserialize, Clone, JsonSchema)]
365#[serde(tag = "type", content = "source", rename_all = "snake_case")]
366pub enum PartDefinition {
367 Template(String), SessionKey(String), }
370
371#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema)]
372#[serde(deny_unknown_fields)]
373pub struct LlmDefinition {
374 pub name: String,
376 #[serde(default, skip_serializing_if = "Option::is_none")]
378 pub model_settings: Option<ModelSettings>,
379 #[serde(default)]
381 pub tool_format: ToolCallFormat,
382 #[serde(default)]
384 pub tool_delivery_mode: ToolDeliveryMode,
385}
386
387impl LlmDefinition {
388 pub fn ms(&self) -> Result<&ModelSettings, String> {
391 self.model_settings.as_ref().ok_or_else(|| {
392 "No model configured. Please set a default model in Agent Settings → Default Model."
393 .to_string()
394 })
395 }
396
397 pub fn ms_mut(&mut self) -> Result<&mut ModelSettings, String> {
400 self.model_settings.as_mut().ok_or_else(|| {
401 "No model configured. Please set a default model in Agent Settings → Default Model."
402 .to_string()
403 })
404 }
405}
406
407#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, PartialEq, Eq, Hash, Default)]
410#[serde(rename_all = "snake_case")]
411pub enum RuntimeMode {
412 Cli,
414 #[default]
416 Cloud,
417 Browser,
419}
420
421#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, Default)]
423pub struct StandardDefinition {
424 pub name: String,
426 #[serde(default)]
428 pub description: String,
429
430 #[serde(default = "default_agent_version")]
432 pub version: Option<String>,
433
434 #[serde(default)]
436 pub instructions: String,
437
438 #[serde(default)]
440 pub mcp_servers: Option<Vec<McpDefinition>>,
441 #[serde(default, skip_serializing_if = "Option::is_none")]
444 pub model_settings: Option<ModelSettings>,
445 #[serde(default, skip_serializing_if = "Option::is_none")]
447 pub analysis_model_settings: Option<ModelSettings>,
448
449 #[serde(default = "default_history_size")]
451 pub history_size: Option<usize>,
452 #[serde(default, skip_serializing_if = "Option::is_none")]
454 pub strategy: Option<AgentStrategy>,
455 #[serde(default)]
457 pub icon_url: Option<String>,
458
459 #[serde(default, skip_serializing_if = "Option::is_none")]
460 pub max_iterations: Option<usize>,
461
462 #[serde(default, skip_serializing_if = "Vec::is_empty")]
464 pub skills_description: Vec<AgentSkill>,
465
466 #[serde(default, skip_serializing_if = "Vec::is_empty")]
468 pub available_skills: Vec<AvailableSkill>,
469
470 #[serde(default)]
472 pub sub_agents: Vec<String>,
473
474 #[serde(default)]
476 pub tool_format: ToolCallFormat,
477
478 #[serde(default)]
480 pub tool_delivery_mode: ToolDeliveryMode,
481
482 #[serde(default, skip_serializing_if = "Option::is_none")]
484 pub tools: Option<ToolsConfig>,
485
486 #[serde(default, skip_serializing_if = "std::collections::HashMap::is_empty")]
488 pub partials: std::collections::HashMap<String, String>,
489
490 #[serde(default, skip_serializing_if = "Option::is_none")]
492 pub reflection: Option<ReflectionConfig>,
493 #[serde(default, skip_serializing_if = "Option::is_none")]
495 pub enable_todos: Option<bool>,
496
497 #[serde(default, skip_serializing_if = "Option::is_none")]
499 pub browser_config: Option<BrowserAgentConfig>,
500
501 #[serde(default, skip_serializing_if = "Option::is_none")]
503 pub include_shell: Option<bool>,
504
505 #[serde(default, skip_serializing_if = "Option::is_none")]
507 pub context_size: Option<u32>,
508
509 #[serde(
511 skip_serializing_if = "Option::is_none",
512 default = "default_append_default_instructions"
513 )]
514 pub append_default_instructions: Option<bool>,
515 #[serde(
517 skip_serializing_if = "Option::is_none",
518 default = "default_include_scratchpad"
519 )]
520 pub include_scratchpad: Option<bool>,
521
522 #[serde(default, skip_serializing_if = "Vec::is_empty")]
524 pub hooks: Vec<String>,
525
526 #[serde(default, skip_serializing_if = "Option::is_none")]
528 pub user_message_overrides: Option<UserMessageOverrides>,
529
530 #[serde(
532 default = "default_compaction_enabled",
533 skip_serializing_if = "is_true"
534 )]
535 pub compaction_enabled: bool,
536
537 #[serde(default, alias = "deepagent")]
545 pub remote: bool,
546
547 #[serde(
561 default,
562 deserialize_with = "deserialize_runtime_modes",
563 skip_serializing_if = "Vec::is_empty"
564 )]
565 pub runtime: Vec<RuntimeMode>,
566}
567
568fn deserialize_runtime_modes<'de, D>(deserializer: D) -> Result<Vec<RuntimeMode>, D::Error>
570where
571 D: serde::Deserializer<'de>,
572{
573 use serde::de::{self, Deserialize};
574
575 #[derive(Deserialize)]
576 #[serde(untagged)]
577 enum OneOrMany {
578 One(RuntimeMode),
579 Many(Vec<RuntimeMode>),
580 }
581
582 match Option::<OneOrMany>::deserialize(deserializer)? {
583 None => Ok(Vec::new()),
584 Some(OneOrMany::One(rt)) => Ok(vec![rt]),
585 Some(OneOrMany::Many(v)) => {
586 let mut seen = std::collections::HashSet::new();
588 for rt in &v {
589 let key = format!("{:?}", rt);
590 if !seen.insert(key) {
591 return Err(de::Error::custom(format!(
592 "duplicate runtime entry: {:?}",
593 rt
594 )));
595 }
596 }
597 Ok(v)
598 }
599 }
600}
601fn default_append_default_instructions() -> Option<bool> {
602 Some(true)
603}
604fn default_include_scratchpad() -> Option<bool> {
605 Some(true)
606}
607fn default_compaction_enabled() -> bool {
608 true
609}
610fn is_true(v: &bool) -> bool {
611 *v
612}
613impl StandardDefinition {
614 pub fn allowed_runtimes(&self) -> Vec<RuntimeMode> {
620 if !self.runtime.is_empty() {
621 return self.runtime.clone();
622 }
623 if self.remote {
624 return vec![RuntimeMode::Cli];
625 }
626 Vec::new()
627 }
628
629 pub fn is_runnable_in(
639 &self,
640 current: &RuntimeMode,
641 runner_provides: Option<&RuntimeMode>,
642 ) -> bool {
643 let allowed = self.allowed_runtimes();
644 if allowed.is_empty() {
645 return true;
646 }
647 if allowed.iter().any(|rt| rt == current) {
648 return true;
649 }
650 match runner_provides {
651 Some(p) => allowed.iter().any(|rt| rt == p),
652 None => false,
653 }
654 }
655
656 pub fn should_use_browser(&self) -> bool {
658 self.browser_config
659 .as_ref()
660 .map(|cfg| cfg.is_enabled())
661 .unwrap_or(false)
662 }
663
664 pub fn browser_settings(&self) -> Option<&BrowserAgentConfig> {
666 self.browser_config.as_ref()
667 }
668
669 pub fn browser_runtime_config(&self) -> Option<BrowsrClientConfig> {
671 self.browser_config.as_ref().map(|cfg| cfg.runtime_config())
672 }
673
674 pub fn should_persist_browser_session(&self) -> bool {
676 self.browser_config
677 .as_ref()
678 .map(|cfg| cfg.should_persist_session())
679 .unwrap_or(false)
680 }
681
682 pub fn is_reflection_enabled(&self) -> bool {
684 self.reflection.as_ref().map(|r| r.enabled).unwrap_or(false)
685 }
686
687 pub fn reflection_config(&self) -> Option<&ReflectionConfig> {
689 self.reflection.as_ref().filter(|r| r.enabled)
690 }
691 pub fn is_todos_enabled(&self) -> bool {
693 self.enable_todos.unwrap_or(false)
694 }
695
696 pub fn should_include_shell(&self) -> bool {
698 self.include_shell.unwrap_or(false)
699 }
700
701 pub fn model_settings(&self) -> Option<&ModelSettings> {
703 self.model_settings.as_ref()
704 }
705
706 pub fn model_settings_mut(&mut self) -> Option<&mut ModelSettings> {
708 self.model_settings.as_mut()
709 }
710
711 pub fn get_effective_context_size(&self) -> u32 {
713 self.context_size
714 .filter(|&s| s > 0)
715 .or_else(|| {
716 self.model_settings()
717 .map(|ms| ms.inner.context_size)
718 .filter(|&s| s > 0)
719 })
720 .unwrap_or_else(default_context_size)
721 }
722
723 pub fn analysis_model_settings_config(&self) -> Option<&ModelSettings> {
725 self.analysis_model_settings
726 .as_ref()
727 .or_else(|| self.model_settings())
728 }
729
730 pub fn include_scratchpad(&self) -> bool {
732 self.include_scratchpad.unwrap_or(true)
733 }
734
735 pub fn apply_overrides(&mut self, overrides: DefinitionOverrides) {
737 if let Some(ref mut ms) = self.model_settings {
739 if let Some(model) = overrides.model {
740 ms.model = model
742 .split_once('/')
743 .map(|(_, m)| m.to_string())
744 .unwrap_or(model);
745 }
746 if let Some(temperature) = overrides.temperature {
747 ms.inner.temperature = Some(temperature);
748 }
749 if let Some(max_tokens) = overrides.max_tokens {
750 ms.inner.max_tokens = Some(max_tokens);
751 }
752 }
753
754 if let Some(max_iterations) = overrides.max_iterations {
756 self.max_iterations = Some(max_iterations);
757 }
758
759 if let Some(instructions) = overrides.instructions {
761 self.instructions = instructions;
762 }
763
764 if let Some(remote) = overrides.remote {
765 self.remote = remote;
766 }
767
768 if let Some(use_browser) = overrides.use_browser {
769 let mut config = self.browser_config.clone().unwrap_or_default();
770 config.enabled = use_browser;
771 self.browser_config = Some(config);
772 }
773
774 if let Some(dynamic_tools) = overrides.dynamic_tools {
776 let tools = self.tools.get_or_insert_with(ToolsConfig::default);
777 tools.dynamic.extend(dynamic_tools);
778 }
779 }
780}
781
782pub const VALID_BUILTIN_TOOLS: &[&str] = &[
789 "final",
791 "reflect",
792 "transfer_to_agent",
793 "browsr_scrape",
795 "browsr_browser",
796 "browsr_crawl",
797 "browser_step",
798 "search",
799 "start_shell",
801 "execute_shell",
802 "stop_shell",
803 "distri_execute_code",
805 "tool_search",
807 "load_skill",
808 "inject_connection_env",
810 "console_log",
812 "artifact_tool",
814 "todos",
816];
817
818pub const CORE_TOOLS: &[&str] = &[
821 "final",
822 "transfer_to_agent",
823 "tool_search",
824 "write_todos",
825 "execute_shell",
826 "start_shell",
827 "load_skill",
828];
829
830pub const DEFAULT_DEFERRED_THRESHOLD: usize = 15;
832
833#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, Default)]
834#[serde(deny_unknown_fields)]
835pub struct ToolsConfig {
836 #[serde(default, skip_serializing_if = "Vec::is_empty")]
838 pub builtin: Vec<String>,
839
840 #[serde(default, skip_serializing_if = "Vec::is_empty")]
842 pub dynamic: Vec<crate::dynamic_tool::DynamicToolFactory>,
843
844 #[serde(default, skip_serializing_if = "Vec::is_empty")]
846 pub mcp: Vec<McpToolConfig>,
847
848 #[serde(default, skip_serializing_if = "Option::is_none")]
850 pub external: Option<Vec<String>>,
851
852 #[serde(default, skip_serializing_if = "is_default_delivery_mode")]
856 pub delivery_mode: ToolDeliveryMode,
857
858 #[serde(default, skip_serializing_if = "Option::is_none")]
862 pub deferred_threshold: Option<usize>,
863
864 #[serde(default, skip_serializing_if = "Vec::is_empty")]
867 pub always_full_schema: Vec<String>,
868}
869
870fn is_default_delivery_mode(mode: &ToolDeliveryMode) -> bool {
871 *mode == ToolDeliveryMode::Deferred
872}
873
874impl ToolsConfig {
875 pub fn invalid_builtin_tools(&self) -> Vec<String> {
878 self.builtin
879 .iter()
880 .filter(|name| !VALID_BUILTIN_TOOLS.contains(&name.as_str()))
881 .cloned()
882 .collect()
883 }
884
885 pub fn is_core_tool(&self, name: &str) -> bool {
887 CORE_TOOLS.contains(&name)
888 || self.always_full_schema.iter().any(|n| n == name)
889 || name.starts_with("call_")
891 }
892
893 pub fn effective_threshold(&self) -> usize {
895 self.deferred_threshold
896 .unwrap_or(DEFAULT_DEFERRED_THRESHOLD)
897 }
898
899 pub fn effective_delivery_mode(&self, _total_tools: usize) -> ToolDeliveryMode {
903 self.delivery_mode.clone()
904 }
905}
906
907#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema)]
912#[serde(deny_unknown_fields)]
913pub struct McpToolConfig {
914 pub server: String,
916
917 #[serde(default, skip_serializing_if = "Vec::is_empty")]
920 pub include: Vec<String>,
921
922 #[serde(default, skip_serializing_if = "Vec::is_empty")]
924 pub exclude: Vec<String>,
925}
926
927#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema)]
928#[serde(deny_unknown_fields)]
929pub struct McpDefinition {
930 #[serde(default)]
932 pub filter: Option<Vec<String>>,
933 pub name: String,
935 #[serde(default)]
937 pub r#type: McpServerType,
938 #[serde(default)]
940 pub auth_config: Option<crate::a2a::SecurityScheme>,
941}
942
943#[derive(Debug, Clone, Serialize, Deserialize, Default, JsonSchema, PartialEq)]
944#[serde(rename_all = "lowercase")]
945pub enum McpServerType {
946 #[default]
947 Tool,
948 Agent,
949}
950
951#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema)]
952#[serde(deny_unknown_fields, rename_all = "lowercase", tag = "name")]
953pub enum ModelProvider {
954 #[serde(rename = "openai")]
955 OpenAI {},
956 #[serde(rename = "openai_compat")]
957 OpenAICompatible {
958 base_url: String,
959 api_key: Option<String>,
960 project_id: Option<String>,
961 },
962 #[serde(rename = "azure_openai")]
963 AzureOpenAI {
964 base_url: String,
965 api_key: Option<String>,
966 deployment: String,
967 #[serde(default = "ModelProvider::azure_api_version")]
968 api_version: String,
969 },
970 #[serde(rename = "anthropic")]
971 Anthropic {
972 #[serde(default = "ModelProvider::anthropic_base_url")]
973 base_url: Option<String>,
974 api_key: Option<String>,
975 },
976 #[serde(rename = "gemini")]
977 Gemini {
978 #[serde(default = "ModelProvider::gemini_base_url")]
979 base_url: String,
980 api_key: Option<String>,
981 },
982 #[serde(rename = "azure_ai_foundry")]
983 AzureAiFoundry {
984 base_url: String,
985 api_key: Option<String>,
986 },
987 #[serde(rename = "aws_bedrock")]
988 AwsBedrock {
989 base_url: String,
990 api_key: Option<String>,
991 },
992 #[serde(rename = "google_vertex")]
993 GoogleVertex {
994 base_url: String,
995 api_key: Option<String>,
996 project_id: Option<String>,
997 },
998 #[serde(rename = "alibaba_cloud")]
999 AlibabaCloud {
1000 #[serde(default = "ModelProvider::alibaba_cloud_base_url")]
1001 base_url: String,
1002 api_key: Option<String>,
1003 },
1004}
1005#[derive(Debug, Clone, Serialize, Deserialize)]
1007pub struct ProviderSecretDefinition {
1008 pub id: String,
1010 pub label: String,
1012 pub keys: Vec<SecretKeyDefinition>,
1014}
1015
1016#[derive(Debug, Clone, Serialize, Deserialize)]
1018pub struct SecretKeyDefinition {
1019 pub key: String,
1021 pub label: String,
1023 pub placeholder: String,
1025 #[serde(default = "default_required")]
1027 pub required: bool,
1028 #[serde(default = "default_sensitive")]
1031 pub sensitive: bool,
1032}
1033
1034fn default_required() -> bool {
1035 true
1036}
1037
1038fn default_sensitive() -> bool {
1039 true
1040}
1041
1042#[derive(Debug, Clone, Serialize, Deserialize)]
1044pub struct ModelInfo {
1045 pub id: String,
1047 pub name: String,
1049}
1050
1051#[derive(Debug, Clone, Serialize, Deserialize)]
1054struct DefaultProviderEntry {
1055 id: String,
1056 label: String,
1057 keys: Vec<SecretKeyDefinition>,
1058 models: Vec<crate::models::Model>,
1059}
1060
1061#[derive(Debug, Clone, Serialize, Deserialize)]
1062struct DefaultModelsFile {
1063 providers: Vec<DefaultProviderEntry>,
1064}
1065
1066fn load_default_providers() -> &'static [DefaultProviderEntry] {
1067 use std::sync::OnceLock;
1068 static PROVIDERS: OnceLock<Vec<DefaultProviderEntry>> = OnceLock::new();
1069 PROVIDERS.get_or_init(|| {
1070 let json = include_str!("default_models.json");
1071 let file: DefaultModelsFile =
1072 serde_json::from_str(json).expect("Failed to parse default_models.json");
1073 file.providers
1074 })
1075}
1076
1077#[derive(Debug, Clone, Serialize, Deserialize)]
1079pub struct ProviderModels {
1080 pub provider_id: String,
1082 pub provider_label: String,
1084 pub models: Vec<crate::models::Model>,
1086}
1087
1088#[derive(Debug, Clone, Serialize, Deserialize)]
1090pub struct ProviderModelsStatus {
1091 pub provider_id: String,
1093 pub provider_label: String,
1095 pub configured: bool,
1097 pub models: Vec<crate::models::Model>,
1099}
1100
1101impl Default for ModelProvider {
1102 fn default() -> Self {
1103 ModelProvider::OpenAI {}
1104 }
1105}
1106
1107impl ModelProvider {
1108 pub fn openai_base_url() -> String {
1109 "https://api.openai.com/v1".to_string()
1110 }
1111
1112 pub fn anthropic_base_url() -> Option<String> {
1113 None
1114 }
1115
1116 pub fn gemini_base_url() -> String {
1117 "https://generativelanguage.googleapis.com/v1beta/openai".to_string()
1118 }
1119
1120 pub fn azure_api_version() -> String {
1121 "2024-06-01".to_string()
1122 }
1123
1124 pub fn alibaba_cloud_base_url() -> String {
1125 "https://dashscope-intl.aliyuncs.com/compatible-mode/v1".to_string()
1126 }
1127
1128 pub fn provider_type(&self) -> crate::models::ProviderType {
1130 match self {
1131 ModelProvider::OpenAI {} => crate::models::ProviderType::OpenAI,
1132 ModelProvider::OpenAICompatible { .. } => {
1133 crate::models::ProviderType::Custom("openai_compat".to_string())
1134 }
1135 ModelProvider::AzureOpenAI { .. } => crate::models::ProviderType::Azure,
1136 ModelProvider::Anthropic { .. } => crate::models::ProviderType::Anthropic,
1137 ModelProvider::Gemini { .. } => crate::models::ProviderType::Gemini,
1138 ModelProvider::AzureAiFoundry { .. } => crate::models::ProviderType::AzureAiFoundry,
1139 ModelProvider::AwsBedrock { .. } => crate::models::ProviderType::AwsBedrock,
1140 ModelProvider::GoogleVertex { .. } => crate::models::ProviderType::GoogleVertex,
1141 ModelProvider::AlibabaCloud { .. } => crate::models::ProviderType::AlibabaCloud,
1142 }
1143 }
1144
1145 pub fn provider_id(&self) -> &str {
1147 match self {
1148 ModelProvider::OpenAI {} => "openai",
1149 ModelProvider::OpenAICompatible { .. } => "openai_compat",
1150 ModelProvider::AzureOpenAI { .. } => "azure_openai",
1151 ModelProvider::Anthropic { .. } => "anthropic",
1152 ModelProvider::Gemini { .. } => "gemini",
1153 ModelProvider::AzureAiFoundry { .. } => "azure_ai_foundry",
1154 ModelProvider::AwsBedrock { .. } => "aws_bedrock",
1155 ModelProvider::GoogleVertex { .. } => "google_vertex",
1156 ModelProvider::AlibabaCloud { .. } => "alibaba_cloud",
1157 }
1158 }
1159
1160 pub fn required_secret_keys(&self) -> Vec<&'static str> {
1162 match self {
1163 ModelProvider::OpenAI {} => vec!["OPENAI_API_KEY"],
1164 ModelProvider::OpenAICompatible { api_key, .. } => {
1165 if api_key.is_some() {
1166 vec![]
1167 } else {
1168 vec!["OPENAI_API_KEY"]
1169 }
1170 }
1171 ModelProvider::AzureOpenAI { api_key, .. } => {
1172 if api_key.is_some() {
1173 vec![]
1174 } else {
1175 vec!["AZURE_OPENAI_API_KEY"]
1176 }
1177 }
1178 ModelProvider::Anthropic { api_key, .. } => {
1179 if api_key.is_some() {
1180 vec![]
1181 } else {
1182 vec!["ANTHROPIC_API_KEY"]
1183 }
1184 }
1185 ModelProvider::Gemini { api_key, .. } => {
1186 if api_key.is_some() {
1187 vec![]
1188 } else {
1189 vec!["GEMINI_API_KEY"]
1190 }
1191 }
1192 ModelProvider::AzureAiFoundry { api_key, .. } => {
1193 if api_key.is_some() {
1194 vec![]
1195 } else {
1196 vec!["AZURE_AI_FOUNDRY_API_KEY"]
1197 }
1198 }
1199 ModelProvider::AwsBedrock { api_key, .. } => {
1200 if api_key.is_some() {
1201 vec![]
1202 } else {
1203 vec!["AWS_ACCESS_KEY_ID"]
1204 }
1205 }
1206 ModelProvider::GoogleVertex { api_key, .. } => {
1207 if api_key.is_some() {
1208 vec![]
1209 } else {
1210 vec!["GOOGLE_VERTEX_API_KEY"]
1211 }
1212 }
1213 ModelProvider::AlibabaCloud { api_key, .. } => {
1214 if api_key.is_some() {
1215 vec![]
1216 } else {
1217 vec!["DASHSCOPE_API_KEY"]
1218 }
1219 }
1220 }
1221 }
1222
1223 pub fn all_provider_definitions() -> Vec<ProviderSecretDefinition> {
1225 load_default_providers()
1226 .iter()
1227 .map(|p| ProviderSecretDefinition {
1228 id: p.id.clone(),
1229 label: p.label.clone(),
1230 keys: p.keys.clone(),
1231 })
1232 .collect()
1233 }
1234
1235 pub fn well_known_models() -> Vec<ProviderModels> {
1237 load_default_providers()
1238 .iter()
1239 .filter(|p| !p.models.is_empty())
1240 .map(|p| ProviderModels {
1241 provider_id: p.id.clone(),
1242 provider_label: p.label.clone(),
1243 models: p.models.clone(),
1244 })
1245 .collect()
1246 }
1247
1248 pub fn display_name(&self) -> &'static str {
1250 match self {
1251 ModelProvider::OpenAI {} => "OpenAI",
1252 ModelProvider::OpenAICompatible { .. } => "OpenAI Compatible",
1253 ModelProvider::AzureOpenAI { .. } => "Azure",
1254 ModelProvider::Anthropic { .. } => "Anthropic",
1255 ModelProvider::Gemini { .. } => "Google Gemini",
1256 ModelProvider::AzureAiFoundry { .. } => "Azure AI Foundry",
1257 ModelProvider::AwsBedrock { .. } => "AWS Bedrock",
1258 ModelProvider::GoogleVertex { .. } => "Google Vertex AI",
1259 ModelProvider::AlibabaCloud { .. } => "Alibaba Cloud",
1260 }
1261 }
1262
1263 pub fn otel_provider_name(&self) -> &'static str {
1266 match self {
1267 ModelProvider::OpenAI { .. } => "openai",
1268 ModelProvider::OpenAICompatible { .. } => "openai",
1269 ModelProvider::AzureOpenAI { .. } => "azure.ai.openai",
1270 ModelProvider::Anthropic { .. } => "anthropic",
1271 ModelProvider::Gemini { .. } => "google.gemini",
1272 ModelProvider::AzureAiFoundry { .. } => "azure.ai.inference",
1273 ModelProvider::AwsBedrock { .. } => "aws.bedrock",
1274 ModelProvider::GoogleVertex { .. } => "gcp.vertex_ai",
1275 ModelProvider::AlibabaCloud { .. } => "alibaba_cloud",
1276 }
1277 }
1278}
1279
1280#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema)]
1284pub struct ModelSettings {
1285 pub model: String,
1286 #[serde(flatten)]
1287 pub inner: ModelSettingsInner,
1288}
1289
1290#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, Default)]
1294pub struct ModelSettingsInner {
1295 #[serde(default, skip_serializing_if = "Option::is_none")]
1296 pub temperature: Option<f32>,
1297 #[serde(default, skip_serializing_if = "Option::is_none")]
1298 pub max_tokens: Option<u32>,
1299 #[serde(default = "default_context_size")]
1300 pub context_size: u32,
1301 #[serde(default, skip_serializing_if = "Option::is_none")]
1302 pub top_p: Option<f32>,
1303 #[serde(default, skip_serializing_if = "Option::is_none")]
1304 pub frequency_penalty: Option<f32>,
1305 #[serde(default, skip_serializing_if = "Option::is_none")]
1306 pub presence_penalty: Option<f32>,
1307 #[serde(default = "default_model_provider")]
1308 pub provider: ModelProvider,
1309 #[serde(default)]
1311 pub parameters: Option<serde_json::Value>,
1312 #[serde(default)]
1314 pub response_format: Option<serde_json::Value>,
1315 #[serde(default, skip_serializing_if = "is_default_api_format")]
1318 pub api_format: OpenAiApiFormat,
1319}
1320
1321impl ModelSettings {
1322 pub fn new(model: impl Into<String>) -> Self {
1324 Self {
1325 model: model.into(),
1326 inner: ModelSettingsInner::default(),
1327 }
1328 }
1329
1330 pub fn from_provider_model_str(s: &str) -> Result<Option<Self>, String> {
1338 let Some((provider_str, model_id)) = s.split_once('/') else {
1339 return Ok(None);
1340 };
1341 if model_id.is_empty() {
1342 return Ok(None);
1343 }
1344 let provider = match provider_str {
1345 "openai" => ModelProvider::OpenAI {},
1346 "anthropic" => ModelProvider::Anthropic {
1347 base_url: None,
1348 api_key: None,
1349 },
1350 "azure_openai" | "azure" => ModelProvider::AzureOpenAI {
1351 base_url: String::new(),
1352 api_key: None,
1353 deployment: model_id.to_string(),
1354 api_version: ModelProvider::azure_api_version(),
1355 },
1356 "gemini" => ModelProvider::Gemini {
1357 base_url: ModelProvider::gemini_base_url(),
1358 api_key: None,
1359 },
1360 "azure_ai_foundry" => ModelProvider::AzureAiFoundry {
1361 base_url: String::new(),
1362 api_key: None,
1363 },
1364 "aws_bedrock" => ModelProvider::AwsBedrock {
1365 base_url: String::new(),
1366 api_key: None,
1367 },
1368 "google_vertex" => ModelProvider::GoogleVertex {
1369 base_url: String::new(),
1370 api_key: None,
1371 project_id: None,
1372 },
1373 "alibaba_cloud" => ModelProvider::AlibabaCloud {
1374 base_url: ModelProvider::alibaba_cloud_base_url(),
1375 api_key: None,
1376 },
1377 _ if provider_str.starts_with("custom_") => ModelProvider::OpenAICompatible {
1378 base_url: String::new(),
1379 api_key: None,
1380 project_id: None,
1381 },
1382 _ => ModelProvider::OpenAICompatible {
1384 base_url: String::new(),
1385 api_key: None,
1386 project_id: None,
1387 },
1388 };
1389 Ok(Some(Self {
1390 model: model_id.to_string(),
1391 inner: ModelSettingsInner {
1392 provider,
1393 ..Default::default()
1394 },
1395 }))
1396 }
1397
1398 pub fn merge(&self, override_settings: &ModelSettings) -> Option<ModelSettings> {
1412 let default_provider = ModelProvider::OpenAI {};
1413 let override_has_explicit_provider = std::mem::discriminant(&override_settings.inner.provider)
1414 != std::mem::discriminant(&default_provider);
1415 let base_has_explicit_provider = std::mem::discriminant(&self.inner.provider)
1416 != std::mem::discriminant(&default_provider);
1417
1418 let (provider, model) = if override_has_explicit_provider {
1419 let model = if !override_settings.model.is_empty() {
1421 override_settings.model.clone()
1422 } else {
1423 self.model.clone()
1424 };
1425 (override_settings.inner.provider.clone(), model)
1426 } else if base_has_explicit_provider {
1427 let model = if !self.model.is_empty() {
1430 self.model.clone()
1431 } else if !override_settings.model.is_empty() {
1432 override_settings.model.clone()
1433 } else {
1434 String::new()
1435 };
1436 (self.inner.provider.clone(), model)
1437 } else {
1438 let model = if !override_settings.model.is_empty() {
1440 override_settings.model.clone()
1441 } else {
1442 self.model.clone()
1443 };
1444 (self.inner.provider.clone(), model)
1445 };
1446
1447 if model.is_empty() {
1448 return None;
1449 }
1450
1451 let default_context_size = 20000u32;
1452 Some(ModelSettings {
1453 model,
1454 inner: ModelSettingsInner {
1455 temperature: override_settings
1456 .inner
1457 .temperature
1458 .or(self.inner.temperature),
1459 max_tokens: override_settings.inner.max_tokens.or(self.inner.max_tokens),
1460 context_size: if override_settings.inner.context_size != default_context_size {
1461 override_settings.inner.context_size
1462 } else {
1463 self.inner.context_size
1464 },
1465 top_p: override_settings.inner.top_p.or(self.inner.top_p),
1466 frequency_penalty: override_settings
1467 .inner
1468 .frequency_penalty
1469 .or(self.inner.frequency_penalty),
1470 presence_penalty: override_settings
1471 .inner
1472 .presence_penalty
1473 .or(self.inner.presence_penalty),
1474 provider,
1475 parameters: if override_settings.inner.parameters.is_some() {
1476 override_settings.inner.parameters.clone()
1477 } else {
1478 self.inner.parameters.clone()
1479 },
1480 response_format: if override_settings.inner.response_format.is_some() {
1481 override_settings.inner.response_format.clone()
1482 } else {
1483 self.inner.response_format.clone()
1484 },
1485 api_format: if override_settings.inner.api_format != OpenAiApiFormat::Auto {
1486 override_settings.inner.api_format.clone()
1487 } else {
1488 self.inner.api_format.clone()
1489 },
1490 },
1491 })
1492 }
1493}
1494
1495pub fn default_agent_version() -> Option<String> {
1497 Some("0.2.2".to_string())
1498}
1499
1500fn default_model_provider() -> ModelProvider {
1501 ModelProvider::OpenAI {}
1502}
1503
1504fn default_context_size() -> u32 {
1505 20000 }
1507
1508fn is_default_api_format(f: &OpenAiApiFormat) -> bool {
1509 *f == OpenAiApiFormat::Auto
1510}
1511
1512fn default_history_size() -> Option<usize> {
1513 Some(5)
1514}
1515
1516impl StandardDefinition {
1517 pub fn validate(&self) -> anyhow::Result<()> {
1518 if self.name.is_empty() {
1520 return Err(anyhow::anyhow!("Agent name cannot be empty"));
1521 }
1522
1523 if let Some(ref reflection) = self.reflection
1525 && reflection.enabled
1526 {
1527 if let Some(ref agent_name) = reflection.reflection_agent
1529 && agent_name.is_empty()
1530 {
1531 return Err(anyhow::anyhow!(
1532 "Reflection agent name cannot be empty when specified"
1533 ));
1534 }
1535 }
1536
1537 Ok(())
1538 }
1539
1540 pub fn validate_reflection_agent(agent_def: &StandardDefinition) -> anyhow::Result<()> {
1543 let has_reflect_tool = agent_def
1544 .tools
1545 .as_ref()
1546 .map(|t| t.builtin.iter().any(|name| name == "reflect"))
1547 .unwrap_or(false);
1548
1549 if !has_reflect_tool {
1550 anyhow::bail!(
1553 "Reflection agent '{}' must have the 'reflect' tool in its tools.builtin configuration",
1554 agent_def.name
1555 );
1556 }
1557
1558 Ok(())
1559 }
1560}
1561
1562impl From<StandardDefinition> for LlmDefinition {
1563 fn from(definition: StandardDefinition) -> Self {
1564 let model_settings = match (definition.model_settings, definition.context_size) {
1565 (Some(mut ms), Some(ctx)) => {
1566 ms.inner.context_size = ctx;
1567 Some(ms)
1568 }
1569 (ms, _) => ms,
1570 };
1571
1572 Self {
1573 name: definition.name,
1574 model_settings,
1575 tool_format: definition.tool_format,
1576 tool_delivery_mode: definition.tool_delivery_mode,
1577 }
1578 }
1579}
1580
1581impl ToolsConfig {
1582 pub fn builtin_only(tools: Vec<&str>) -> Self {
1584 Self {
1585 builtin: tools.into_iter().map(|s| s.to_string()).collect(),
1586 ..Default::default()
1587 }
1588 }
1589
1590 pub fn mcp_all(server: &str) -> Self {
1592 Self {
1593 mcp: vec![McpToolConfig {
1594 server: server.to_string(),
1595 include: vec!["*".to_string()],
1596 exclude: vec![],
1597 }],
1598 ..Default::default()
1599 }
1600 }
1601
1602 pub fn mcp_filtered(server: &str, include: Vec<&str>, exclude: Vec<&str>) -> Self {
1604 Self {
1605 mcp: vec![McpToolConfig {
1606 server: server.to_string(),
1607 include: include.into_iter().map(|s| s.to_string()).collect(),
1608 exclude: exclude.into_iter().map(|s| s.to_string()).collect(),
1609 }],
1610 ..Default::default()
1611 }
1612 }
1613}
1614
1615pub async fn parse_agent_markdown_content(content: &str) -> Result<StandardDefinition, AgentError> {
1616 let parts: Vec<&str> = content.split("---").collect();
1618
1619 if parts.len() < 3 {
1620 return Err(AgentError::Validation(
1621 "Invalid agent markdown format. Expected TOML frontmatter between --- markers"
1622 .to_string(),
1623 ));
1624 }
1625
1626 let toml_content = parts[1].trim();
1628 let mut agent_def: crate::StandardDefinition =
1629 toml::from_str(toml_content).map_err(|e| AgentError::Validation(e.to_string()))?;
1630
1631 if let Err(validation_error) = validate_plugin_name(&agent_def.name) {
1633 return Err(AgentError::Validation(format!(
1634 "Invalid agent name '{}': {}",
1635 agent_def.name, validation_error
1636 )));
1637 }
1638
1639 if !agent_def
1641 .name
1642 .chars()
1643 .all(|c| c.is_alphanumeric() || c == '_' || c == '/')
1644 || agent_def
1645 .name
1646 .chars()
1647 .next()
1648 .is_some_and(|c| c.is_numeric())
1649 || agent_def.name.chars().filter(|&c| c == '/').count() > 1
1650 {
1651 return Err(AgentError::Validation(format!(
1652 "Invalid agent name '{}': Agent names must be alphanumeric with underscores, at most one '/' for namespacing (e.g. '_system/plan'), cannot start with number.",
1653 agent_def.name
1654 )));
1655 }
1656
1657 let instructions = parts[2..].join("---").trim().to_string();
1659
1660 agent_def.instructions = instructions;
1662
1663 Ok(agent_def)
1664}
1665
1666pub fn validate_plugin_name(name: &str) -> Result<(), String> {
1669 if name.is_empty() {
1670 return Err("Plugin name cannot be empty".to_string());
1671 }
1672
1673 if name.contains('-') {
1674 return Err(format!(
1675 "Plugin name '{}' cannot contain hyphens. Use underscores instead.",
1676 name
1677 ));
1678 }
1679
1680 let slash_count = name.chars().filter(|&c| c == '/').count();
1681 if slash_count > 1 {
1682 return Err(format!(
1683 "Plugin name '{}' can contain at most one '/' for workspace namespacing (e.g. 'workspace/agent')",
1684 name
1685 ));
1686 }
1687
1688 let segments: Vec<&str> = name.split('/').collect();
1690 for segment in &segments {
1691 if segment.is_empty() {
1692 return Err(format!(
1693 "Plugin name '{}' has an empty segment around '/'",
1694 name
1695 ));
1696 }
1697
1698 if let Some(first_char) = segment.chars().next()
1699 && !first_char.is_ascii_alphabetic()
1700 && first_char != '_'
1701 {
1702 return Err(format!(
1703 "Each segment in '{}' must start with a letter or underscore",
1704 name
1705 ));
1706 }
1707
1708 for ch in segment.chars() {
1709 if !ch.is_ascii_alphanumeric() && ch != '_' {
1710 return Err(format!(
1711 "Plugin name '{}' can only contain letters, numbers, underscores, and at most one '/' for namespacing",
1712 name
1713 ));
1714 }
1715 }
1716 }
1717
1718 Ok(())
1719}
1720
1721#[cfg(test)]
1722mod tests {
1723 use super::*;
1724
1725 #[test]
1726 fn test_compaction_enabled_defaults_to_true_via_serde() {
1727 let json = r#"{"name": "test"}"#;
1729 let def: StandardDefinition = serde_json::from_str(json).unwrap();
1730 assert!(def.compaction_enabled);
1731 }
1732
1733 #[test]
1734 fn test_compaction_enabled_deserializes_true_when_absent() {
1735 let json = r#"{"name": "test", "description": "test agent"}"#;
1736 let def: StandardDefinition = serde_json::from_str(json).unwrap();
1737 assert!(def.compaction_enabled);
1738 }
1739
1740 #[test]
1741 fn test_compaction_enabled_deserializes_false() {
1742 let json = r#"{"name": "test", "description": "test agent", "compaction_enabled": false}"#;
1743 let def: StandardDefinition = serde_json::from_str(json).unwrap();
1744 assert!(!def.compaction_enabled);
1745 }
1746
1747 #[test]
1748 fn test_compaction_enabled_true_skipped_in_serialization() {
1749 let def = StandardDefinition {
1750 name: "test".to_string(),
1751 compaction_enabled: true,
1752 ..Default::default()
1753 };
1754 let json = serde_json::to_string(&def).unwrap();
1755 assert!(!json.contains("compaction_enabled"));
1756 }
1757
1758 #[test]
1759 fn test_compaction_enabled_false_serialized() {
1760 let def = StandardDefinition {
1761 name: "test".to_string(),
1762 compaction_enabled: false,
1763 ..Default::default()
1764 };
1765 let json = serde_json::to_string(&def).unwrap();
1766 assert!(json.contains("\"compaction_enabled\":false"));
1767 }
1768
1769 #[test]
1770 fn test_max_tokens_optional_defaults_to_none() {
1771 let def = StandardDefinition::default();
1772 assert!(def.model_settings().is_none());
1773 }
1774
1775 #[test]
1776 fn test_max_tokens_deserializes_when_present() {
1777 let json =
1778 r#"{"name": "test", "model_settings": {"model": "gpt-4.1", "max_tokens": 4096}}"#;
1779 let def: StandardDefinition = serde_json::from_str(json).unwrap();
1780 assert_eq!(def.model_settings().unwrap().inner.max_tokens, Some(4096));
1781 }
1782
1783 #[test]
1784 fn test_max_tokens_none_when_absent() {
1785 let json = r#"{"name": "test", "model_settings": {"model": "gpt-4.1"}}"#;
1786 let def: StandardDefinition = serde_json::from_str(json).unwrap();
1787 assert!(def.model_settings().unwrap().inner.max_tokens.is_none());
1788 }
1789
1790 #[test]
1791 fn test_max_tokens_none_skipped_in_serialization() {
1792 let settings = ModelSettings {
1793 model: "test-model".to_string(),
1794 inner: ModelSettingsInner {
1795 max_tokens: None,
1796 provider: ModelProvider::OpenAI {},
1797 ..Default::default()
1798 },
1799 };
1800 let json = serde_json::to_string(&settings).unwrap();
1801 assert!(!json.contains("max_tokens"));
1802 }
1803
1804 #[test]
1805 fn test_max_tokens_some_serialized() {
1806 let settings = ModelSettings {
1807 model: "test-model".to_string(),
1808 inner: ModelSettingsInner {
1809 max_tokens: Some(2048),
1810 provider: ModelProvider::OpenAI {},
1811 ..Default::default()
1812 },
1813 };
1814 let json = serde_json::to_string(&settings).unwrap();
1815 assert!(json.contains("\"max_tokens\":2048"));
1816 }
1817
1818 #[test]
1819 fn test_api_format_auto_detect_codex_prefix() {
1820 let fmt = OpenAiApiFormat::Auto;
1821 assert_eq!(
1822 fmt.resolve("codex-mini-latest"),
1823 ResolvedOpenAiApiFormat::Responses
1824 );
1825 assert_eq!(
1826 fmt.resolve("codex-mini-2025-01-24"),
1827 ResolvedOpenAiApiFormat::Responses
1828 );
1829 }
1830
1831 #[test]
1832 fn test_api_format_auto_detect_codex_suffix() {
1833 let fmt = OpenAiApiFormat::Auto;
1834 assert_eq!(
1835 fmt.resolve("gpt-5.1-codex"),
1836 ResolvedOpenAiApiFormat::Responses
1837 );
1838 assert_eq!(
1839 fmt.resolve("gpt-5.3-codex"),
1840 ResolvedOpenAiApiFormat::Responses
1841 );
1842 }
1843
1844 #[test]
1845 fn test_api_format_auto_detect_pro_models() {
1846 let fmt = OpenAiApiFormat::Auto;
1847 assert_eq!(fmt.resolve("gpt-5-pro"), ResolvedOpenAiApiFormat::Responses);
1848 assert_eq!(
1849 fmt.resolve("gpt-5.2-pro"),
1850 ResolvedOpenAiApiFormat::Responses
1851 );
1852 assert_eq!(
1853 fmt.resolve("gpt-5.4-pro"),
1854 ResolvedOpenAiApiFormat::Responses
1855 );
1856 assert_eq!(fmt.resolve("o3-pro"), ResolvedOpenAiApiFormat::Responses);
1857 }
1858
1859 #[test]
1860 fn test_api_format_auto_detect_deep_research_models() {
1861 let fmt = OpenAiApiFormat::Auto;
1862 assert_eq!(
1863 fmt.resolve("o3-deep-research"),
1864 ResolvedOpenAiApiFormat::Responses
1865 );
1866 assert_eq!(
1867 fmt.resolve("o4-mini-deep-research"),
1868 ResolvedOpenAiApiFormat::Responses
1869 );
1870 }
1871
1872 #[test]
1873 fn test_api_format_auto_detect_non_codex() {
1874 let fmt = OpenAiApiFormat::Auto;
1875 assert_eq!(fmt.resolve("gpt-4o"), ResolvedOpenAiApiFormat::Completions);
1876 assert_eq!(fmt.resolve("gpt-4.1"), ResolvedOpenAiApiFormat::Completions);
1877 assert_eq!(fmt.resolve("gpt-5"), ResolvedOpenAiApiFormat::Completions);
1878 assert_eq!(fmt.resolve("o1"), ResolvedOpenAiApiFormat::Completions);
1879 assert_eq!(
1880 fmt.resolve("gpt-5.4-mini"),
1881 ResolvedOpenAiApiFormat::Completions
1882 );
1883 assert_eq!(fmt.resolve("o3-mini"), ResolvedOpenAiApiFormat::Completions);
1884 }
1885
1886 #[test]
1887 fn test_api_format_explicit_override() {
1888 assert_eq!(
1890 OpenAiApiFormat::Responses.resolve("gpt-4o"),
1891 ResolvedOpenAiApiFormat::Responses
1892 );
1893 assert_eq!(
1895 OpenAiApiFormat::Completions.resolve("codex-mini-latest"),
1896 ResolvedOpenAiApiFormat::Completions
1897 );
1898 }
1899
1900 #[test]
1901 fn test_api_format_defaults_to_auto() {
1902 let inner = ModelSettingsInner::default();
1903 assert_eq!(inner.api_format, OpenAiApiFormat::Auto);
1904 }
1905
1906 #[test]
1907 fn test_api_format_auto_skipped_in_serialization() {
1908 let settings = ModelSettings {
1909 model: "test-model".to_string(),
1910 inner: ModelSettingsInner {
1911 provider: ModelProvider::OpenAI {},
1912 ..Default::default()
1913 },
1914 };
1915 let json = serde_json::to_string(&settings).unwrap();
1916 assert!(!json.contains("api_format"));
1917 }
1918
1919 #[test]
1920 fn test_api_format_responses_serialized() {
1921 let settings = ModelSettings {
1922 model: "test-model".to_string(),
1923 inner: ModelSettingsInner {
1924 api_format: OpenAiApiFormat::Responses,
1925 provider: ModelProvider::OpenAI {},
1926 ..Default::default()
1927 },
1928 };
1929 let json = serde_json::to_string(&settings).unwrap();
1930 assert!(json.contains("\"api_format\":\"responses\""));
1931 }
1932
1933 #[test]
1934 fn test_api_format_deserializes_from_toml() {
1935 let toml_str = r#"
1936 model = "codex-mini-latest"
1937 api_format = "responses"
1938 [provider]
1939 name = "openai"
1940 "#;
1941 let settings: ModelSettings = toml::from_str(toml_str).unwrap();
1942 assert_eq!(settings.inner.api_format, OpenAiApiFormat::Responses);
1943 }
1944
1945 #[test]
1948 fn test_tool_delivery_mode_defaults_to_deferred() {
1949 let mode: ToolDeliveryMode = Default::default();
1950 assert_eq!(mode, ToolDeliveryMode::Deferred);
1951 }
1952
1953 #[test]
1954 fn test_tool_delivery_mode_backwards_compat_all_tools() {
1955 let json = r#""all_tools""#;
1957 let mode: ToolDeliveryMode = serde_json::from_str(json).unwrap();
1958 assert_eq!(mode, ToolDeliveryMode::Full);
1959 }
1960
1961 #[test]
1962 fn test_tool_delivery_mode_backwards_compat_tool_search() {
1963 let json = r#""tool_search""#;
1965 let mode: ToolDeliveryMode = serde_json::from_str(json).unwrap();
1966 assert_eq!(mode, ToolDeliveryMode::Deferred);
1967 }
1968
1969 #[test]
1970 fn test_tools_config_is_core_tool() {
1971 let config = ToolsConfig::default();
1972 assert!(config.is_core_tool("final"));
1973 assert!(config.is_core_tool("tool_search"));
1974 assert!(config.is_core_tool("execute_shell"));
1975 assert!(config.is_core_tool("call_coder"));
1976 assert!(!config.is_core_tool("browsr_scrape"));
1977 }
1978
1979 #[test]
1980 fn test_tools_config_always_full_schema() {
1981 let config = ToolsConfig {
1982 always_full_schema: vec!["browsr_scrape".to_string()],
1983 ..Default::default()
1984 };
1985 assert!(config.is_core_tool("browsr_scrape"));
1986 assert!(!config.is_core_tool("browsr_browser"));
1987 }
1988
1989 #[test]
1990 fn test_effective_delivery_mode_full_stays_full() {
1991 let config = ToolsConfig {
1992 delivery_mode: ToolDeliveryMode::Full,
1993 ..Default::default()
1994 };
1995 assert_eq!(config.effective_delivery_mode(100), ToolDeliveryMode::Full);
1997 }
1998
1999 #[test]
2000 fn test_effective_delivery_mode_deferred_stays_deferred() {
2001 let config = ToolsConfig {
2002 delivery_mode: ToolDeliveryMode::Deferred,
2003 deferred_threshold: Some(20),
2004 ..Default::default()
2005 };
2006 assert_eq!(
2008 config.effective_delivery_mode(10),
2009 ToolDeliveryMode::Deferred
2010 );
2011 }
2012
2013 #[test]
2014 fn test_effective_delivery_mode_deferred_over_threshold() {
2015 let config = ToolsConfig {
2016 delivery_mode: ToolDeliveryMode::Deferred,
2017 deferred_threshold: Some(10),
2018 ..Default::default()
2019 };
2020 assert_eq!(
2022 config.effective_delivery_mode(15),
2023 ToolDeliveryMode::Deferred
2024 );
2025 }
2026
2027 #[test]
2028 fn test_runtime_mode_serde() {
2029 let mode: RuntimeMode = serde_json::from_str("\"cloud\"").unwrap();
2030 assert_eq!(mode, RuntimeMode::Cloud);
2031 let mode: RuntimeMode = serde_json::from_str("\"cli\"").unwrap();
2032 assert_eq!(mode, RuntimeMode::Cli);
2033 let mode: RuntimeMode = serde_json::from_str("\"browser\"").unwrap();
2034 assert_eq!(mode, RuntimeMode::Browser);
2035 assert_eq!(RuntimeMode::default(), RuntimeMode::Cloud);
2036 let json = serde_json::to_string(&RuntimeMode::Cli).unwrap();
2037 assert_eq!(json, "\"cli\"");
2038 }
2039
2040 #[test]
2043 fn merge_both_default_openai_agent_model_wins() {
2044 let base = ModelSettings::new("gpt-5.1");
2045 let agent = ModelSettings::new("gpt-4.1-mini");
2046
2047 let result = base.merge(&agent).unwrap();
2048 assert_eq!(result.model, "gpt-4.1-mini");
2049 assert!(matches!(result.inner.provider, ModelProvider::OpenAI {}));
2050 }
2051
2052 #[test]
2053 fn merge_both_default_openai_base_model_used_when_agent_empty() {
2054 let base = ModelSettings::new("gpt-5.1");
2055 let agent = ModelSettings::new("");
2056
2057 let result = base.merge(&agent).unwrap();
2058 assert_eq!(result.model, "gpt-5.1");
2059 }
2060
2061 #[test]
2062 fn merge_agent_explicit_provider_wins() {
2063 let base = ModelSettings {
2064 model: "gpt-5.1".into(),
2065 inner: ModelSettingsInner {
2066 provider: ModelProvider::OpenAICompatible {
2067 base_url: "https://custom.com/v1".into(),
2068 api_key: Some("key".into()),
2069 project_id: None,
2070 },
2071 ..Default::default()
2072 },
2073 };
2074 let agent = ModelSettings {
2075 model: "claude-sonnet-4".into(),
2076 inner: ModelSettingsInner {
2077 provider: ModelProvider::Anthropic {
2078 base_url: None,
2079 api_key: None,
2080 },
2081 ..Default::default()
2082 },
2083 };
2084
2085 let result = base.merge(&agent).unwrap();
2086 assert_eq!(result.model, "claude-sonnet-4");
2087 assert!(matches!(result.inner.provider, ModelProvider::Anthropic { .. }));
2088 }
2089
2090 #[test]
2091 fn merge_agent_explicit_provider_no_model_uses_base() {
2092 let base = ModelSettings::new("gpt-5.1");
2093 let agent = ModelSettings {
2094 model: "".into(),
2095 inner: ModelSettingsInner {
2096 provider: ModelProvider::Anthropic {
2097 base_url: None,
2098 api_key: None,
2099 },
2100 ..Default::default()
2101 },
2102 };
2103
2104 let result = base.merge(&agent).unwrap();
2105 assert_eq!(result.model, "gpt-5.1");
2106 assert!(matches!(result.inner.provider, ModelProvider::Anthropic { .. }));
2107 }
2108
2109 #[test]
2110 fn merge_workspace_custom_provider_overrides_agent_model() {
2111 let base = ModelSettings {
2112 model: "gpt-5.4".into(),
2113 inner: ModelSettingsInner {
2114 provider: ModelProvider::OpenAICompatible {
2115 base_url: "https://custom.azure.com/openai/v1".into(),
2116 api_key: Some("test-key".into()),
2117 project_id: None,
2118 },
2119 ..Default::default()
2120 },
2121 };
2122 let agent = ModelSettings::new("gpt-5.1");
2124
2125 let result = base.merge(&agent).unwrap();
2126 assert_eq!(result.model, "gpt-5.4");
2127 assert!(matches!(result.inner.provider, ModelProvider::OpenAICompatible { .. }));
2128 }
2129
2130 #[test]
2131 fn merge_workspace_custom_provider_agent_empty_model() {
2132 let base = ModelSettings {
2133 model: "gpt-5.4".into(),
2134 inner: ModelSettingsInner {
2135 provider: ModelProvider::OpenAICompatible {
2136 base_url: "https://custom.azure.com/openai/v1".into(),
2137 api_key: Some("test-key".into()),
2138 project_id: None,
2139 },
2140 ..Default::default()
2141 },
2142 };
2143 let agent = ModelSettings::new("");
2144
2145 let result = base.merge(&agent).unwrap();
2146 assert_eq!(result.model, "gpt-5.4");
2147 }
2148
2149 #[test]
2150 fn merge_both_empty_returns_none() {
2151 let base = ModelSettings::new("");
2152 let agent = ModelSettings::new("");
2153
2154 assert!(base.merge(&agent).is_none());
2155 }
2156
2157 #[test]
2158 fn merge_workspace_empty_agent_empty_returns_none() {
2159 let base = ModelSettings {
2160 model: "".into(),
2161 inner: ModelSettingsInner {
2162 provider: ModelProvider::OpenAICompatible {
2163 base_url: "https://custom.com".into(),
2164 api_key: None,
2165 project_id: None,
2166 },
2167 ..Default::default()
2168 },
2169 };
2170 let agent = ModelSettings::new("");
2171
2172 assert!(base.merge(&agent).is_none());
2173 }
2174
2175 #[test]
2176 fn merge_temperature_max_tokens_override() {
2177 let base = ModelSettings {
2178 model: "gpt-5.1".into(),
2179 inner: ModelSettingsInner {
2180 temperature: Some(0.5),
2181 max_tokens: Some(1000),
2182 top_p: Some(0.9),
2183 ..Default::default()
2184 },
2185 };
2186 let agent = ModelSettings {
2187 model: "gpt-4.1-mini".into(),
2188 inner: ModelSettingsInner {
2189 temperature: Some(0.9),
2190 max_tokens: None, ..Default::default()
2192 },
2193 };
2194
2195 let result = base.merge(&agent).unwrap();
2196 assert_eq!(result.model, "gpt-4.1-mini");
2197 assert_eq!(result.inner.temperature, Some(0.9));
2198 assert_eq!(result.inner.max_tokens, Some(1000)); assert_eq!(result.inner.top_p, Some(0.9)); }
2201
2202 #[test]
2203 fn merge_context_size_non_default_wins() {
2204 let base = ModelSettings {
2205 model: "gpt-5.1".into(),
2206 inner: ModelSettingsInner {
2207 context_size: 20000, ..Default::default()
2209 },
2210 };
2211 let agent = ModelSettings {
2212 model: "gpt-4.1-mini".into(),
2213 inner: ModelSettingsInner {
2214 context_size: 100000, ..Default::default()
2216 },
2217 };
2218
2219 let result = base.merge(&agent).unwrap();
2220 assert_eq!(result.inner.context_size, 100000);
2221 }
2222
2223 #[test]
2224 fn merge_context_size_default_falls_back() {
2225 let base = ModelSettings {
2226 model: "gpt-5.1".into(),
2227 inner: ModelSettingsInner {
2228 context_size: 128000,
2229 ..Default::default()
2230 },
2231 };
2232 let agent = ModelSettings {
2233 model: "gpt-4.1-mini".into(),
2234 inner: ModelSettingsInner {
2235 context_size: 20000, ..Default::default()
2237 },
2238 };
2239
2240 let result = base.merge(&agent).unwrap();
2241 assert_eq!(result.inner.context_size, 128000);
2242 }
2243
2244 #[test]
2245 fn merge_azure_ai_foundry_base_url_preserved() {
2246 let base = ModelSettings {
2247 model: "gpt-5.4".into(),
2248 inner: ModelSettingsInner {
2249 provider: ModelProvider::AzureAiFoundry {
2250 base_url: "https://myresource.openai.azure.com".into(),
2251 api_key: Some("test-key".into()),
2252 },
2253 ..Default::default()
2254 },
2255 };
2256 let agent = ModelSettings::new("gpt-5.1");
2257
2258 let result = base.merge(&agent).unwrap();
2259 assert_eq!(result.model, "gpt-5.4"); assert!(matches!(result.inner.provider, ModelProvider::AzureAiFoundry { .. }));
2261 if let ModelProvider::AzureAiFoundry { base_url, .. } = result.inner.provider {
2262 assert_eq!(base_url, "https://myresource.openai.azure.com");
2263 }
2264 }
2265
2266 #[test]
2267 fn merge_anthropic_provider_preserves_base_url() {
2268 let base = ModelSettings {
2269 model: "claude-sonnet-4".into(),
2270 inner: ModelSettingsInner {
2271 provider: ModelProvider::Anthropic {
2272 base_url: Some("https://custom.anthropic.com".into()),
2273 api_key: Some("key".into()),
2274 },
2275 temperature: Some(0.7),
2276 ..Default::default()
2277 },
2278 };
2279 let agent = ModelSettings::new("");
2280
2281 let result = base.merge(&agent).unwrap();
2282 assert_eq!(result.model, "claude-sonnet-4");
2283 assert_eq!(result.inner.temperature, Some(0.7));
2284 if let ModelProvider::Anthropic { base_url, api_key } = result.inner.provider {
2285 assert_eq!(base_url, Some("https://custom.anthropic.com".into()));
2286 assert_eq!(api_key, Some("key".into()));
2287 }
2288 }
2289
2290 #[test]
2291 fn merge_response_format_agent_wins() {
2292 let base = ModelSettings {
2293 model: "gpt-5.1".into(),
2294 inner: ModelSettingsInner {
2295 response_format: Some(serde_json::json!({"type": "text"})),
2296 ..Default::default()
2297 },
2298 };
2299 let agent = ModelSettings {
2300 model: "gpt-4.1-mini".into(),
2301 inner: ModelSettingsInner {
2302 response_format: Some(serde_json::json!({"type": "json_object"})),
2303 ..Default::default()
2304 },
2305 };
2306
2307 let result = base.merge(&agent).unwrap();
2308 assert_eq!(
2309 result.inner.response_format,
2310 Some(serde_json::json!({"type": "json_object"}))
2311 );
2312 }
2313
2314 #[test]
2315 fn merge_response_format_base_fallback() {
2316 let base = ModelSettings {
2317 model: "gpt-5.1".into(),
2318 inner: ModelSettingsInner {
2319 response_format: Some(serde_json::json!({"type": "text"})),
2320 ..Default::default()
2321 },
2322 };
2323 let agent = ModelSettings::new("gpt-4.1-mini");
2324
2325 let result = base.merge(&agent).unwrap();
2326 assert_eq!(
2327 result.inner.response_format,
2328 Some(serde_json::json!({"type": "text"}))
2329 );
2330 }
2331
2332 #[test]
2333 fn merge_parameters_agent_wins() {
2334 let base = ModelSettings {
2335 model: "gpt-5.1".into(),
2336 inner: ModelSettingsInner {
2337 parameters: Some(serde_json::json!({"key": "base"})),
2338 ..Default::default()
2339 },
2340 };
2341 let agent = ModelSettings {
2342 model: "gpt-4.1-mini".into(),
2343 inner: ModelSettingsInner {
2344 parameters: Some(serde_json::json!({"key": "agent"})),
2345 ..Default::default()
2346 },
2347 };
2348
2349 let result = base.merge(&agent).unwrap();
2350 assert_eq!(
2351 result.inner.parameters,
2352 Some(serde_json::json!({"key": "agent"}))
2353 );
2354 }
2355}