1use anyhow::{Context, Result};
23
24pub struct ProviderMeta {
26 pub name: &'static str,
28 pub url: &'static str,
30 pub model: &'static str,
32 pub env_key: &'static str,
34 pub api_key: bool,
36}
37use serde::Deserialize;
38use std::path::{Path, PathBuf};
39
40#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Deserialize)]
42#[serde(rename_all = "lowercase")]
43pub enum ProviderType {
44 OpenAI,
46 Anthropic,
48 LMStudio,
50 Gemini,
52 Groq,
54 Grok,
56 Ollama,
58 DeepSeek,
60 Mistral,
62 MiniMax,
64 OpenRouter,
66 Together,
68 Fireworks,
70 Vllm,
72 #[cfg(any(test, feature = "test-support"))]
74 Mock,
75}
76
77impl ProviderType {
78 pub fn meta(&self) -> ProviderMeta {
80 match self {
81 Self::OpenAI => ProviderMeta {
82 name: "openai",
83 url: "https://api.openai.com/v1",
84 model: "gpt-4o",
85 env_key: "OPENAI_API_KEY",
86 api_key: true,
87 },
88 Self::Anthropic => ProviderMeta {
89 name: "anthropic",
90 url: "https://api.anthropic.com",
91 model: "claude-sonnet-4-6",
92 env_key: "ANTHROPIC_API_KEY",
93 api_key: true,
94 },
95 Self::LMStudio => ProviderMeta {
96 name: "lm-studio",
97 url: "http://localhost:1234/v1",
98 model: "auto-detect",
99 env_key: "KODA_API_KEY",
100 api_key: false,
101 },
102 Self::Gemini => ProviderMeta {
103 name: "gemini",
104 url: "https://generativelanguage.googleapis.com",
105 model: "gemini-flash-latest",
106 env_key: "GEMINI_API_KEY",
107 api_key: true,
108 },
109 Self::Groq => ProviderMeta {
110 name: "groq",
111 url: "https://api.groq.com/openai/v1",
112 model: "llama-3.3-70b-versatile",
113 env_key: "GROQ_API_KEY",
114 api_key: true,
115 },
116 Self::Grok => ProviderMeta {
117 name: "grok",
118 url: "https://api.x.ai/v1",
119 model: "grok-3",
120 env_key: "XAI_API_KEY",
121 api_key: true,
122 },
123 Self::Ollama => ProviderMeta {
124 name: "ollama",
125 url: "http://localhost:11434/v1",
126 model: "auto-detect",
127 env_key: "KODA_API_KEY",
128 api_key: false,
129 },
130 Self::DeepSeek => ProviderMeta {
131 name: "deepseek",
132 url: "https://api.deepseek.com/v1",
133 model: "deepseek-chat",
134 env_key: "DEEPSEEK_API_KEY",
135 api_key: true,
136 },
137 Self::Mistral => ProviderMeta {
138 name: "mistral",
139 url: "https://api.mistral.ai/v1",
140 model: "mistral-large-latest",
141 env_key: "MISTRAL_API_KEY",
142 api_key: true,
143 },
144 Self::MiniMax => ProviderMeta {
145 name: "minimax",
146 url: "https://api.minimax.io/v1",
147 model: "minimax-text-01",
148 env_key: "MINIMAX_API_KEY",
149 api_key: true,
150 },
151 Self::OpenRouter => ProviderMeta {
152 name: "openrouter",
153 url: "https://openrouter.ai/api/v1",
154 model: "anthropic/claude-3.5-sonnet",
155 env_key: "OPENROUTER_API_KEY",
156 api_key: true,
157 },
158 Self::Together => ProviderMeta {
159 name: "together",
160 url: "https://api.together.xyz/v1",
161 model: "meta-llama/Llama-3.3-70B-Instruct-Turbo",
162 env_key: "TOGETHER_API_KEY",
163 api_key: true,
164 },
165 Self::Fireworks => ProviderMeta {
166 name: "fireworks",
167 url: "https://api.fireworks.ai/inference/v1",
168 model: "accounts/fireworks/models/llama-v3p3-70b-instruct",
169 env_key: "FIREWORKS_API_KEY",
170 api_key: true,
171 },
172 Self::Vllm => ProviderMeta {
173 name: "vllm",
174 url: "http://localhost:8000/v1",
175 model: "auto-detect",
176 env_key: "KODA_API_KEY",
177 api_key: false,
178 },
179 #[cfg(any(test, feature = "test-support"))]
180 Self::Mock => ProviderMeta {
181 name: "mock",
182 url: "http://localhost:0",
183 model: "mock-model",
184 env_key: "KODA_API_KEY",
185 api_key: false,
186 },
187 }
188 }
189
190 pub fn requires_api_key(&self) -> bool {
192 self.meta().api_key
193 }
194 pub fn default_base_url(&self) -> &str {
196 self.meta().url
197 }
198 pub fn default_model(&self) -> &str {
200 self.meta().model
201 }
202 pub fn env_key_name(&self) -> &str {
204 self.meta().env_key
205 }
206
207 pub fn from_url_or_name(url: &str, name: Option<&str>) -> Self {
209 if let Some(n) = name {
210 return match n.to_lowercase().as_str() {
211 "anthropic" | "claude" => Self::Anthropic,
212 "gemini" | "google" => Self::Gemini,
213 "groq" => Self::Groq,
214 "grok" | "xai" => Self::Grok,
215 "lmstudio" | "lm-studio" => Self::LMStudio,
216 "ollama" => Self::Ollama,
217 "deepseek" => Self::DeepSeek,
218 "mistral" => Self::Mistral,
219 "minimax" => Self::MiniMax,
220 "openrouter" => Self::OpenRouter,
221 "together" => Self::Together,
222 "fireworks" => Self::Fireworks,
223 "vllm" => Self::Vllm,
224 #[cfg(any(test, feature = "test-support"))]
225 "mock" => Self::Mock,
226 _ => Self::OpenAI,
227 };
228 }
229 let url = url.to_lowercase();
231 if url.contains("anthropic.com") {
232 Self::Anthropic
233 } else if url.contains("localhost:11434") || url.contains("127.0.0.1:11434") {
234 Self::Ollama
235 } else if url.contains("localhost:8000") || url.contains("127.0.0.1:8000") {
236 Self::Vllm
237 } else if url.contains("localhost") || url.contains("127.0.0.1") {
238 Self::LMStudio
239 } else if url.contains("generativelanguage.googleapis.com") {
240 Self::Gemini
241 } else if url.contains("groq.com") {
242 Self::Groq
243 } else if url.contains("x.ai") {
244 Self::Grok
245 } else if url.contains("deepseek.com") {
246 Self::DeepSeek
247 } else if url.contains("mistral.ai") {
248 Self::Mistral
249 } else if url.contains("minimax.chat") || url.contains("minimaxi.com") {
250 Self::MiniMax
251 } else if url.contains("openrouter.ai") {
252 Self::OpenRouter
253 } else if url.contains("together.xyz") {
254 Self::Together
255 } else if url.contains("fireworks.ai") {
256 Self::Fireworks
257 } else {
258 Self::OpenAI
259 }
260 }
261}
262
263impl std::fmt::Display for ProviderType {
264 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
265 write!(f, "{}", self.meta().name)
266 }
267}
268
269#[derive(Debug, Clone)]
271pub struct ModelSettings {
272 pub model: String,
274 pub max_tokens: Option<u32>,
276 pub temperature: Option<f64>,
278 pub thinking_budget: Option<u32>,
280 pub reasoning_effort: Option<String>,
282 pub max_context_tokens: usize,
284}
285
286impl ModelSettings {
287 pub fn defaults_for(model: &str, provider: &ProviderType) -> Self {
289 let max_tokens = match provider {
290 ProviderType::Anthropic => Some(16384),
291 _ => None,
292 };
293 let max_context_tokens = crate::model_context::context_window_for_model(model);
294 Self {
295 model: model.to_string(),
296 max_tokens,
297 temperature: None,
298 thinking_budget: None,
299 reasoning_effort: None,
300 max_context_tokens,
301 }
302 }
303}
304
305#[derive(Debug, Clone, Deserialize)]
335pub struct AgentConfig {
336 pub name: String,
338 #[serde(default)]
341 pub description: Option<String>,
342 pub system_prompt: String,
344 #[serde(default)]
346 pub allowed_tools: Vec<String>,
347 #[serde(default)]
349 pub disallowed_tools: Vec<String>,
350 #[serde(default)]
352 pub model: Option<String>,
353 #[serde(default)]
355 pub base_url: Option<String>,
356 #[serde(default)]
358 pub provider: Option<String>,
359 #[serde(default)]
361 pub max_tokens: Option<u32>,
362 #[serde(default)]
364 pub temperature: Option<f64>,
365 #[serde(default)]
367 pub thinking_budget: Option<u32>,
368 #[serde(default)]
370 pub reasoning_effort: Option<String>,
371 #[serde(default)]
373 pub max_context_tokens: Option<usize>,
374 #[serde(default)]
376 pub max_iterations: Option<u32>,
377 #[serde(default)]
381 pub write_access: bool,
382 #[serde(default)]
386 pub skip_memory: bool,
387}
388
389#[derive(Debug, Clone)]
391pub struct KodaConfig {
392 pub agent_name: String,
394 pub system_prompt: String,
396 pub allowed_tools: Vec<String>,
398 pub disallowed_tools: Vec<String>,
400 pub provider_type: ProviderType,
402 pub base_url: String,
404 pub model: String,
406 pub max_context_tokens: usize,
408 pub agents_dir: PathBuf,
410 pub model_settings: ModelSettings,
412 pub max_iterations: u32,
414 pub skip_memory: bool,
417 pub trust: crate::trust::TrustMode,
419}
420
421impl KodaConfig {
422 pub fn load(project_root: &Path, agent_name: &str) -> Result<Self> {
425 let agents_dir =
426 Self::find_agents_dir(project_root).unwrap_or_else(|_| PathBuf::from("agents"));
427
428 let agent_file = agents_dir.join(format!("{agent_name}.json"));
430 let agent: AgentConfig = if agent_file.exists() {
431 let json = std::fs::read_to_string(&agent_file)
432 .with_context(|| format!("Failed to read agent config: {agent_file:?}"))?;
433 serde_json::from_str(&json)
434 .with_context(|| format!("Failed to parse agent config: {agent_file:?}"))?
435 } else if let Some(builtin) = Self::load_builtin(agent_name) {
436 builtin
438 } else {
439 anyhow::bail!("Agent '{agent_name}' not found (checked disk and built-ins)");
440 };
441
442 let default_url = agent
443 .base_url
444 .clone()
445 .unwrap_or_else(|| "http://localhost:1234/v1".to_string());
446 let provider_type = ProviderType::from_url_or_name(&default_url, agent.provider.as_deref());
447
448 let mut base_url = agent.base_url;
450 if base_url.is_none()
451 && !provider_type.requires_api_key()
452 && let Some(env_url) = crate::runtime_env::get("KODA_LOCAL_URL")
453 {
454 base_url = Some(env_url);
455 }
456
457 let base_url = base_url.unwrap_or_else(|| provider_type.default_base_url().to_string());
458 let model = agent
459 .model
460 .unwrap_or_else(|| provider_type.default_model().to_string());
461
462 let mut settings = ModelSettings::defaults_for(&model, &provider_type);
463 if let Some(ctx) = agent.max_context_tokens {
465 settings.max_context_tokens = ctx;
466 }
467 let max_context_tokens = settings.max_context_tokens;
468 if let Some(mt) = agent.max_tokens {
469 settings.max_tokens = Some(mt);
470 }
471 if let Some(t) = agent.temperature {
472 settings.temperature = Some(t);
473 }
474 if let Some(tb) = agent.thinking_budget {
475 settings.thinking_budget = Some(tb);
476 }
477 if let Some(ref re) = agent.reasoning_effort {
478 settings.reasoning_effort = Some(re.clone());
479 }
480
481 let max_iterations = agent.max_iterations.unwrap_or(200);
482
483 Ok(Self {
484 agent_name: agent.name,
485 system_prompt: agent.system_prompt,
486 allowed_tools: agent.allowed_tools,
487 disallowed_tools: Self::apply_default_deny(agent.disallowed_tools, agent.write_access),
488 provider_type,
489 base_url,
490 model: model.clone(),
491 max_context_tokens,
492 agents_dir,
493 model_settings: settings,
494 max_iterations,
495 skip_memory: agent.skip_memory,
496 trust: crate::trust::TrustMode::Safe,
497 })
498 }
499
500 const WRITE_TOOLS: &'static [&'static str] = &["Write", "Edit", "Delete"];
503
504 fn apply_default_deny(mut disallowed: Vec<String>, write_access: bool) -> Vec<String> {
507 if !write_access {
508 for tool in Self::WRITE_TOOLS {
509 let name = tool.to_string();
510 if !disallowed.contains(&name) {
511 disallowed.push(name);
512 }
513 }
514 }
515 disallowed
516 }
517
518 pub fn with_overrides(
520 mut self,
521 base_url: Option<String>,
522 model: Option<String>,
523 provider: Option<String>,
524 ) -> Self {
525 if let Some(ref url) = base_url {
526 self.base_url = url.clone();
527 }
528 if let Some(ref p) = provider {
529 self.provider_type = ProviderType::from_url_or_name(&self.base_url, Some(p));
530 }
531 if base_url.is_some() && provider.is_none() {
532 self.provider_type = ProviderType::from_url_or_name(&self.base_url, None);
534 }
535 if let Some(m) = model {
536 self.model = m.clone();
537 self.model_settings.model = m.clone();
538 self.recalculate_model_derived();
540 }
541 self
542 }
543
544 pub fn with_model_overrides(
546 mut self,
547 max_tokens: Option<u32>,
548 temperature: Option<f64>,
549 thinking_budget: Option<u32>,
550 reasoning_effort: Option<String>,
551 ) -> Self {
552 if let Some(mt) = max_tokens {
553 self.model_settings.max_tokens = Some(mt);
554 }
555 if let Some(t) = temperature {
556 self.model_settings.temperature = Some(t);
557 }
558 if let Some(tb) = thinking_budget {
559 self.model_settings.thinking_budget = Some(tb);
560 }
561 if let Some(re) = reasoning_effort {
562 self.model_settings.reasoning_effort = Some(re);
563 }
564 self
565 }
566
567 pub fn with_trust(mut self, mode: crate::trust::TrustMode) -> Self {
569 self.trust = mode;
570 self
571 }
572
573 pub fn recalculate_model_derived(&mut self) {
580 let new_ctx = crate::model_context::context_window_for_model(&self.model);
581 self.max_context_tokens = new_ctx;
582 self.model_settings.max_context_tokens = new_ctx;
583
584 self.max_iterations = 200;
585 }
586
587 pub fn apply_provider_capabilities(&mut self, caps: &crate::providers::ModelCapabilities) {
593 if let Some(ctx) = caps.context_window {
594 self.max_context_tokens = ctx;
595 self.model_settings.max_context_tokens = ctx;
596 tracing::info!("Context window from API: {} tokens for {}", ctx, self.model);
597 }
598 if let Some(max_out) = caps.max_output_tokens {
599 if self.model_settings.max_tokens.is_none() {
601 self.model_settings.max_tokens = Some(max_out as u32);
602 tracing::info!("Max output tokens from API: {} for {}", max_out, self.model);
603 }
604 }
605 }
606
607 pub async fn query_and_apply_capabilities(
613 &mut self,
614 provider: &dyn crate::providers::LlmProvider,
615 ) {
616 match provider.model_capabilities(&self.model).await {
617 Ok(caps) if caps.context_window.is_some() || caps.max_output_tokens.is_some() => {
618 self.apply_provider_capabilities(&caps);
619 }
620 Ok(_) => {
621 tracing::debug!(
622 "Provider did not report capabilities for {}; using lookup table ({}k tokens)",
623 self.model,
624 self.max_context_tokens / 1000
625 );
626 }
627 Err(e) => {
628 tracing::debug!("Could not query model capabilities: {e:#}");
629 }
630 }
631 }
632
633 const BUILTIN_AGENTS: &[(&str, &str)] = &[
636 ("default", include_str!("../agents/default.json")),
637 ("task", include_str!("../agents/task.json")),
638 ("explore", include_str!("../agents/explore.json")),
639 ("plan", include_str!("../agents/plan.json")),
640 ("verify", include_str!("../agents/verify.json")),
641 ];
642
643 pub fn load_agent_json(project_root: &Path, agent_name: &str) -> Result<AgentConfig> {
650 let agents_dir =
651 Self::find_agents_dir(project_root).unwrap_or_else(|_| PathBuf::from("agents"));
652 let agent_file = agents_dir.join(format!("{agent_name}.json"));
653 if agent_file.exists() {
654 let json = std::fs::read_to_string(&agent_file)
655 .with_context(|| format!("Failed to read agent config: {agent_file:?}"))?;
656 serde_json::from_str(&json)
657 .with_context(|| format!("Failed to parse agent config: {agent_file:?}"))
658 } else {
659 Self::load_builtin(agent_name)
660 .ok_or_else(|| anyhow::anyhow!("Agent '{agent_name}' not found"))
661 }
662 }
663
664 pub fn load_builtin(name: &str) -> Option<AgentConfig> {
666 Self::BUILTIN_AGENTS
667 .iter()
668 .find(|(n, _)| *n == name)
669 .and_then(|(_, json)| serde_json::from_str(json).ok())
670 }
671
672 pub fn builtin_agents() -> Vec<(String, AgentConfig)> {
674 Self::BUILTIN_AGENTS
675 .iter()
676 .filter_map(|(name, json)| {
677 let config: AgentConfig = serde_json::from_str(json).ok()?;
678 Some((name.to_string(), config))
679 })
680 .collect()
681 }
682
683 pub fn default_for_testing(provider_type: ProviderType) -> Self {
686 let model = provider_type.default_model().to_string();
687 let model_settings = ModelSettings::defaults_for(&model, &provider_type);
688 let max_context_tokens = model_settings.max_context_tokens;
689
690 Self {
691 agent_name: "test".to_string(),
692 system_prompt: "You are a test agent.".to_string(),
693 allowed_tools: Vec::new(),
694 disallowed_tools: Vec::new(),
695 base_url: provider_type.default_base_url().to_string(),
696 model,
697 provider_type,
698 max_context_tokens,
699 agents_dir: PathBuf::from("agents"),
700 model_settings,
701 max_iterations: crate::loop_guard::MAX_ITERATIONS_DEFAULT,
702 skip_memory: false,
703 trust: crate::trust::TrustMode::Safe,
704 }
705 }
706
707 fn find_agents_dir(project_root: &Path) -> Result<PathBuf> {
716 let local = project_root.join("agents");
718 if local.is_dir() {
719 return Ok(local);
720 }
721
722 let config_agents = Self::user_agents_dir()?;
724 if config_agents.is_dir() {
725 return Ok(config_agents);
726 }
727
728 anyhow::bail!("No agents directory on disk (built-in agents are still available)")
730 }
731
732 fn user_agents_dir() -> Result<PathBuf> {
734 let home = std::env::var("HOME")
735 .or_else(|_| std::env::var("USERPROFILE"))
736 .map(PathBuf::from)
737 .unwrap_or_else(|_| PathBuf::from("."));
738 Ok(home.join(".config").join("koda").join("agents"))
739 }
740}
741
742#[cfg(test)]
743mod tests {
744 use super::*;
745 use tempfile::TempDir;
746
747 #[test]
750 fn test_provider_from_url_anthropic() {
751 assert_eq!(
752 ProviderType::from_url_or_name("https://api.anthropic.com/v1", None),
753 ProviderType::Anthropic
754 );
755 }
756
757 #[test]
758 fn test_provider_from_url_localhost_defaults_to_lmstudio() {
759 assert_eq!(
760 ProviderType::from_url_or_name("http://localhost:1234/v1", None),
761 ProviderType::LMStudio
762 );
763 }
764
765 #[test]
766 fn test_provider_from_explicit_name_overrides_url() {
767 assert_eq!(
768 ProviderType::from_url_or_name("https://my-proxy.corp.com/v1", Some("anthropic")),
769 ProviderType::Anthropic
770 );
771 }
772
773 #[test]
774 fn test_unknown_url_defaults_to_openai() {
775 assert_eq!(
776 ProviderType::from_url_or_name("https://random.example.com/v1", None),
777 ProviderType::OpenAI
778 );
779 }
780
781 #[test]
782 fn test_provider_name_aliases() {
783 assert_eq!(
784 ProviderType::from_url_or_name("", Some("claude")),
785 ProviderType::Anthropic
786 );
787 assert_eq!(
788 ProviderType::from_url_or_name("", Some("google")),
789 ProviderType::Gemini
790 );
791 assert_eq!(
792 ProviderType::from_url_or_name("", Some("xai")),
793 ProviderType::Grok
794 );
795 assert_eq!(
796 ProviderType::from_url_or_name("", Some("lm-studio")),
797 ProviderType::LMStudio
798 );
799 }
800
801 #[test]
802 fn test_provider_display() {
803 assert_eq!(format!("{}", ProviderType::OpenAI), "openai");
804 assert_eq!(format!("{}", ProviderType::Anthropic), "anthropic");
805 assert_eq!(format!("{}", ProviderType::LMStudio), "lm-studio");
806 }
807
808 #[test]
809 fn test_each_provider_has_default_url_and_model() {
810 let providers = [
811 ProviderType::OpenAI,
812 ProviderType::Anthropic,
813 ProviderType::LMStudio,
814 ProviderType::Gemini,
815 ProviderType::Groq,
816 ProviderType::Grok,
817 ProviderType::Mock,
818 ];
819 for p in providers {
820 assert!(!p.default_base_url().is_empty());
821 assert!(!p.default_model().is_empty());
822 assert!(!p.env_key_name().is_empty());
823 }
824 }
825
826 #[test]
829 fn test_load_valid_agent_config() {
830 let tmp = TempDir::new().unwrap();
831 let agents_dir = tmp.path().join("agents");
832 std::fs::create_dir_all(&agents_dir).unwrap();
833 std::fs::write(
834 agents_dir.join("test.json"),
835 r#"{
836 "name": "test",
837 "system_prompt": "You are a test.",
838 "allowed_tools": ["Read", "Write"],
839 "write_access": true
840 }"#,
841 )
842 .unwrap();
843 let config = KodaConfig::load(tmp.path(), "test").unwrap();
844 assert_eq!(config.agent_name, "test");
845 assert_eq!(config.allowed_tools, vec!["Read", "Write"]);
846 assert!(config.disallowed_tools.is_empty());
847 }
848
849 #[test]
850 fn test_load_missing_agent_returns_error() {
851 let tmp = TempDir::new().unwrap();
852 std::fs::create_dir_all(tmp.path().join("agents")).unwrap();
853 assert!(KodaConfig::load(tmp.path(), "nonexistent").is_err());
854 }
855
856 #[test]
857 fn test_load_malformed_json_returns_error() {
858 let tmp = TempDir::new().unwrap();
859 let agents_dir = tmp.path().join("agents");
860 std::fs::create_dir_all(&agents_dir).unwrap();
861 std::fs::write(agents_dir.join("bad.json"), "NOT JSON").unwrap();
862 assert!(KodaConfig::load(tmp.path(), "bad").is_err());
863 }
864
865 #[test]
868 fn test_default_deny_blocks_write_tools() {
869 let result = KodaConfig::apply_default_deny(vec![], false);
870 assert!(result.contains(&"Write".to_string()));
871 assert!(result.contains(&"Edit".to_string()));
872 assert!(result.contains(&"Delete".to_string()));
873 }
874
875 #[test]
876 fn test_write_access_true_allows_write_tools() {
877 let result = KodaConfig::apply_default_deny(vec![], true);
878 assert!(result.is_empty());
879 }
880
881 #[test]
882 fn test_default_deny_deduplicates() {
883 let result =
885 KodaConfig::apply_default_deny(vec!["Write".to_string(), "Bash".to_string()], false);
886 assert_eq!(result.iter().filter(|t| *t == "Write").count(), 1);
887 assert!(result.contains(&"Edit".to_string()));
888 assert!(result.contains(&"Delete".to_string()));
889 assert!(result.contains(&"Bash".to_string()));
890 }
891
892 #[test]
893 fn test_custom_agent_without_write_access_is_readonly() {
894 let tmp = TempDir::new().unwrap();
895 let agents_dir = tmp.path().join("agents");
896 std::fs::create_dir_all(&agents_dir).unwrap();
897 std::fs::write(
898 agents_dir.join("custom.json"),
899 r#"{
900 "name": "custom",
901 "system_prompt": "I am custom."
902 }"#,
903 )
904 .unwrap();
905 let config = KodaConfig::load(tmp.path(), "custom").unwrap();
906 assert!(config.disallowed_tools.contains(&"Write".to_string()));
907 assert!(config.disallowed_tools.contains(&"Edit".to_string()));
908 assert!(config.disallowed_tools.contains(&"Delete".to_string()));
909 }
910
911 #[test]
912 fn test_builtin_task_has_write_access() {
913 let agent = KodaConfig::load_builtin("task").unwrap();
914 assert!(agent.write_access, "task agent should have write_access");
915 }
916
917 #[test]
918 fn test_builtin_explore_no_write_access() {
919 let agent = KodaConfig::load_builtin("explore").unwrap();
920 assert!(!agent.write_access, "explore should be read-only");
921 }
922
923 #[test]
926 fn test_with_overrides_model() {
927 let config = KodaConfig::default_for_testing(ProviderType::OpenAI).with_overrides(
928 None,
929 Some("gpt-4-turbo".into()),
930 None,
931 );
932 assert_eq!(config.model, "gpt-4-turbo");
933 }
934
935 #[test]
936 fn test_with_overrides_base_url_re_detects_provider() {
937 let config = KodaConfig::default_for_testing(ProviderType::OpenAI).with_overrides(
938 Some("https://api.anthropic.com".into()),
939 None,
940 None,
941 );
942 assert_eq!(config.provider_type, ProviderType::Anthropic);
943 }
944
945 #[test]
946 fn test_with_overrides_explicit_provider_wins() {
947 let config = KodaConfig::default_for_testing(ProviderType::OpenAI).with_overrides(
948 Some("https://my-proxy.com".into()),
949 None,
950 Some("anthropic".into()),
951 );
952 assert_eq!(config.provider_type, ProviderType::Anthropic);
953 }
954
955 #[test]
956 fn test_with_overrides_no_changes() {
957 let config =
958 KodaConfig::default_for_testing(ProviderType::Gemini).with_overrides(None, None, None);
959 assert_eq!(config.provider_type, ProviderType::Gemini);
960 assert_eq!(config.model, "gemini-flash-latest");
961 }
962
963 #[test]
966 fn test_recalculate_updates_context_window() {
967 let mut config = KodaConfig::default_for_testing(ProviderType::LMStudio);
969 assert_eq!(config.max_context_tokens, 4_096); config.model = "claude-sonnet-4-6".to_string();
973 config.model_settings.model = config.model.clone();
974 config.provider_type = ProviderType::Anthropic;
975 config.recalculate_model_derived();
976
977 assert_eq!(config.max_context_tokens, 200_000);
978 assert_eq!(config.model_settings.max_context_tokens, 200_000);
979 assert_eq!(config.max_iterations, 200);
980 }
981
982 #[test]
983 fn test_with_overrides_model_recalculates() {
984 let config = KodaConfig::default_for_testing(ProviderType::LMStudio);
985 assert_eq!(config.max_context_tokens, 4_096);
986
987 let config = config.with_overrides(None, Some("gpt-4o".into()), Some("openai".into()));
988 assert_eq!(config.model, "gpt-4o");
989 assert_eq!(config.max_context_tokens, 128_000);
990 }
991
992 #[test]
995 fn test_provider_from_url_ollama() {
996 assert_eq!(
997 ProviderType::from_url_or_name("http://localhost:11434/api", None),
998 ProviderType::Ollama
999 );
1000 }
1001
1002 #[test]
1003 fn test_provider_from_url_vllm() {
1004 assert_eq!(
1005 ProviderType::from_url_or_name("http://localhost:8000/v1", None),
1006 ProviderType::Vllm
1007 );
1008 }
1009
1010 #[test]
1011 fn test_provider_from_url_gemini() {
1012 assert_eq!(
1013 ProviderType::from_url_or_name(
1014 "https://generativelanguage.googleapis.com/v1beta",
1015 None
1016 ),
1017 ProviderType::Gemini
1018 );
1019 }
1020
1021 #[test]
1022 fn test_provider_from_url_groq() {
1023 assert_eq!(
1024 ProviderType::from_url_or_name("https://api.groq.com/openai/v1", None),
1025 ProviderType::Groq
1026 );
1027 }
1028
1029 #[test]
1030 fn test_provider_from_url_grok() {
1031 assert_eq!(
1032 ProviderType::from_url_or_name("https://api.x.ai/v1", None),
1033 ProviderType::Grok
1034 );
1035 }
1036
1037 #[test]
1038 fn test_provider_from_url_deepseek() {
1039 assert_eq!(
1040 ProviderType::from_url_or_name("https://api.deepseek.com/v1", None),
1041 ProviderType::DeepSeek
1042 );
1043 }
1044
1045 #[test]
1046 fn test_provider_from_url_mistral() {
1047 assert_eq!(
1048 ProviderType::from_url_or_name("https://api.mistral.ai/v1", None),
1049 ProviderType::Mistral
1050 );
1051 }
1052
1053 #[test]
1054 fn test_provider_from_url_openrouter() {
1055 assert_eq!(
1056 ProviderType::from_url_or_name("https://openrouter.ai/api/v1", None),
1057 ProviderType::OpenRouter
1058 );
1059 }
1060
1061 #[test]
1062 fn test_provider_from_url_together() {
1063 assert_eq!(
1064 ProviderType::from_url_or_name("https://api.together.xyz/v1", None),
1065 ProviderType::Together
1066 );
1067 }
1068
1069 #[test]
1070 fn test_provider_from_url_fireworks() {
1071 assert_eq!(
1072 ProviderType::from_url_or_name("https://api.fireworks.ai/inference/v1", None),
1073 ProviderType::Fireworks
1074 );
1075 }
1076
1077 #[test]
1080 fn test_provider_name_aliases_extended() {
1081 let cases = [
1082 ("ollama", ProviderType::Ollama),
1083 ("deepseek", ProviderType::DeepSeek),
1084 ("mistral", ProviderType::Mistral),
1085 ("minimax", ProviderType::MiniMax),
1086 ("openrouter", ProviderType::OpenRouter),
1087 ("together", ProviderType::Together),
1088 ("fireworks", ProviderType::Fireworks),
1089 ("vllm", ProviderType::Vllm),
1090 ("groq", ProviderType::Groq),
1091 ("mock", ProviderType::Mock),
1092 ];
1093 for (name, expected) in cases {
1094 assert_eq!(
1095 ProviderType::from_url_or_name("", Some(name)),
1096 expected,
1097 "alias '{name}' failed"
1098 );
1099 }
1100 }
1101
1102 #[test]
1105 fn test_requires_api_key_local_providers() {
1106 assert!(!ProviderType::LMStudio.requires_api_key());
1108 assert!(!ProviderType::Ollama.requires_api_key());
1109 assert!(!ProviderType::Mock.requires_api_key());
1110 assert!(!ProviderType::Vllm.requires_api_key());
1111 }
1112
1113 #[test]
1114 fn test_requires_api_key_cloud_providers() {
1115 assert!(ProviderType::Anthropic.requires_api_key());
1116 assert!(ProviderType::OpenAI.requires_api_key());
1117 assert!(ProviderType::Gemini.requires_api_key());
1118 assert!(ProviderType::Groq.requires_api_key());
1119 assert!(ProviderType::Grok.requires_api_key());
1120 }
1121
1122 #[test]
1125 fn test_model_settings_defaults_anthropic_has_max_tokens() {
1126 let s = ModelSettings::defaults_for("claude-opus-4-5", &ProviderType::Anthropic);
1127 assert_eq!(s.max_tokens, Some(16384));
1128 assert_eq!(s.model, "claude-opus-4-5");
1129 assert!(s.temperature.is_none());
1130 }
1131
1132 #[test]
1133 fn test_model_settings_defaults_openai_no_max_tokens() {
1134 let s = ModelSettings::defaults_for("gpt-4o", &ProviderType::OpenAI);
1135 assert!(s.max_tokens.is_none(), "OpenAI should use provider default");
1136 assert_eq!(s.model, "gpt-4o");
1137 }
1138
1139 #[test]
1142 fn test_with_model_overrides_all_fields() {
1143 let config = KodaConfig::default_for_testing(ProviderType::Anthropic).with_model_overrides(
1144 Some(8192), Some(0.7), Some(2000), Some("low".into()), );
1149 assert_eq!(config.model_settings.max_tokens, Some(8192));
1150 assert_eq!(config.model_settings.temperature, Some(0.7));
1151 assert_eq!(config.model_settings.thinking_budget, Some(2000));
1152 assert_eq!(
1153 config.model_settings.reasoning_effort,
1154 Some("low".to_string())
1155 );
1156 }
1157
1158 #[test]
1159 fn test_with_model_overrides_none_changes_nothing() {
1160 let original = KodaConfig::default_for_testing(ProviderType::OpenAI);
1161 let original_tokens = original.model_settings.max_tokens;
1162 let config = original.with_model_overrides(None, None, None, None);
1163 assert_eq!(config.model_settings.max_tokens, original_tokens);
1164 assert!(config.model_settings.temperature.is_none());
1165 }
1166
1167 #[test]
1170 fn test_builtin_agents_is_not_empty() {
1171 let agents = KodaConfig::builtin_agents();
1172 assert!(!agents.is_empty(), "builtin_agents should not be empty");
1173 }
1174
1175 #[test]
1176 fn test_builtin_agents_contains_core_agents() {
1177 let agents = KodaConfig::builtin_agents();
1178 let names: Vec<&str> = agents.iter().map(|(name, _)| name.as_str()).collect();
1179 assert!(names.contains(&"task"), "should have 'task' agent");
1180 assert!(names.contains(&"explore"), "should have 'explore' agent");
1181 }
1182
1183 #[test]
1186 fn test_load_agent_json_returns_raw_options() {
1187 let tmp = tempfile::TempDir::new().unwrap();
1190 for name in ["explore", "plan", "verify", "task"] {
1191 let raw = KodaConfig::load_agent_json(tmp.path(), name)
1192 .unwrap_or_else(|e| panic!("load_agent_json({name}) failed: {e}"));
1193 assert!(
1194 raw.model.is_none(),
1195 "built-in agent '{name}' must not hardcode a model — \
1196 set it in the agent JSON if you need a provider-specific default"
1197 );
1198 assert!(
1199 raw.provider.is_none(),
1200 "built-in agent '{name}' must not hardcode a provider"
1201 );
1202 }
1203 }
1204
1205 #[test]
1206 fn test_load_agent_json_project_override_preserves_option() {
1207 let tmp = tempfile::TempDir::new().unwrap();
1209 let agents_dir = tmp.path().join("agents");
1210 std::fs::create_dir_all(&agents_dir).unwrap();
1211 std::fs::write(
1212 agents_dir.join("myscout.json"),
1213 r#"{"name":"myscout","system_prompt":"scout","model":"claude-3-haiku"}"#,
1214 )
1215 .unwrap();
1216 let raw = KodaConfig::load_agent_json(tmp.path(), "myscout").unwrap();
1217 assert_eq!(raw.model.as_deref(), Some("claude-3-haiku"));
1218 }
1219
1220 #[test]
1225 fn test_sub_agent_inherits_parent_provider_and_model() {
1226 let tmp = tempfile::TempDir::new().unwrap();
1227
1228 let parent = KodaConfig::default_for_testing(ProviderType::Gemini).with_overrides(
1230 None,
1231 Some("gemini-2.0-flash".to_string()),
1232 None,
1233 );
1234
1235 let raw = KodaConfig::load_agent_json(tmp.path(), "explore").unwrap();
1237 let mut cfg = KodaConfig::load(tmp.path(), "explore").unwrap();
1238
1239 let agent_has_own_provider = raw.provider.is_some() || raw.base_url.is_some();
1241 if !agent_has_own_provider {
1242 let model_override = raw.model.is_none().then(|| parent.model.clone());
1243 cfg = cfg.with_overrides(
1244 Some(parent.base_url.clone()),
1245 model_override,
1246 Some(parent.provider_type.to_string()),
1247 );
1248 }
1249
1250 assert_eq!(
1251 cfg.provider_type,
1252 ProviderType::Gemini,
1253 "provider must be inherited"
1254 );
1255 assert_eq!(
1256 cfg.model, "gemini-2.0-flash",
1257 "model must be inherited from parent"
1258 );
1259 }
1260
1261 #[test]
1264 fn test_sub_agent_own_provider_is_not_overridden() {
1265 let tmp = tempfile::TempDir::new().unwrap();
1266 let agents_dir = tmp.path().join("agents");
1267 std::fs::create_dir_all(&agents_dir).unwrap();
1268 std::fs::write(
1270 agents_dir.join("local-scout.json"),
1271 r#"{"name":"local-scout","system_prompt":"s","provider":"lmstudio","base_url":"http://localhost:1234/v1"}"#,
1272 )
1273 .unwrap();
1274
1275 let parent = KodaConfig::default_for_testing(ProviderType::Gemini).with_overrides(
1276 None,
1277 Some("gemini-2.0-flash".to_string()),
1278 None,
1279 );
1280
1281 let raw = KodaConfig::load_agent_json(tmp.path(), "local-scout").unwrap();
1282 let mut cfg = KodaConfig::load(tmp.path(), "local-scout").unwrap();
1283
1284 let agent_has_own_provider = raw.provider.is_some() || raw.base_url.is_some();
1285 if !agent_has_own_provider {
1286 let model_override = raw.model.is_none().then(|| parent.model.clone());
1287 cfg = cfg.with_overrides(
1288 Some(parent.base_url.clone()),
1289 model_override,
1290 Some(parent.provider_type.to_string()),
1291 );
1292 }
1293
1294 assert_eq!(cfg.provider_type, ProviderType::LMStudio);
1296 assert_ne!(
1297 cfg.provider_type,
1298 ProviderType::Gemini,
1299 "parent provider must not bleed into agent with explicit provider"
1300 );
1301 }
1302
1303 #[test]
1306 fn test_sub_agent_explicit_model_is_not_overridden() {
1307 let tmp = tempfile::TempDir::new().unwrap();
1308 let agents_dir = tmp.path().join("agents");
1309 std::fs::create_dir_all(&agents_dir).unwrap();
1310 std::fs::write(
1312 agents_dir.join("specialist.json"),
1313 r#"{"name":"specialist","system_prompt":"s","model":"gemini-2.5-flash"}"#,
1314 )
1315 .unwrap();
1316
1317 let parent = KodaConfig::default_for_testing(ProviderType::Gemini).with_overrides(
1318 None,
1319 Some("gemini-2.0-flash-lite".to_string()),
1320 None,
1321 );
1322
1323 let raw = KodaConfig::load_agent_json(tmp.path(), "specialist").unwrap();
1324 let mut cfg = KodaConfig::load(tmp.path(), "specialist").unwrap();
1325
1326 let agent_has_own_provider = raw.provider.is_some() || raw.base_url.is_some();
1327 if !agent_has_own_provider {
1328 let model_override = raw.model.is_none().then(|| parent.model.clone());
1329 cfg = cfg.with_overrides(
1330 Some(parent.base_url.clone()),
1331 model_override,
1332 Some(parent.provider_type.to_string()),
1333 );
1334 }
1335
1336 assert_eq!(cfg.provider_type, ProviderType::Gemini);
1338 assert_eq!(
1339 cfg.model, "gemini-2.5-flash",
1340 "agent's explicit model must not be overridden by parent"
1341 );
1342 }
1343}