1use anyhow::{Context, Result};
23
24pub use crate::provider_catalog::{ProviderMeta, ProviderType};
31
32use serde::Deserialize;
33use std::path::{Path, PathBuf};
34
35#[derive(Debug, Clone)]
37pub struct ModelSettings {
38 pub model: String,
40 pub max_tokens: Option<u32>,
42 pub temperature: Option<f64>,
44 pub thinking_budget: Option<u32>,
46 pub reasoning_effort: Option<String>,
48 pub max_context_tokens: usize,
50}
51
52impl ModelSettings {
53 pub fn defaults_for(model: &str, provider: &ProviderType) -> Self {
55 let max_tokens = match provider {
56 ProviderType::Anthropic => Some(16384),
57 _ => None,
58 };
59 let max_context_tokens = crate::model_context::context_window_for_model(model);
60 Self {
61 model: model.to_string(),
62 max_tokens,
63 temperature: None,
64 thinking_budget: None,
65 reasoning_effort: None,
66 max_context_tokens,
67 }
68 }
69}
70
71#[derive(Debug, Clone, Deserialize)]
101pub struct AgentConfig {
102 pub name: String,
104 #[serde(default)]
107 pub description: Option<String>,
108 pub system_prompt: String,
110 #[serde(default)]
112 pub allowed_tools: Vec<String>,
113 #[serde(default)]
115 pub disallowed_tools: Vec<String>,
116 #[serde(default)]
118 pub model: Option<String>,
119 #[serde(default)]
121 pub base_url: Option<String>,
122 #[serde(default)]
124 pub provider: Option<String>,
125 #[serde(default)]
127 pub max_tokens: Option<u32>,
128 #[serde(default)]
130 pub temperature: Option<f64>,
131 #[serde(default)]
133 pub thinking_budget: Option<u32>,
134 #[serde(default)]
136 pub reasoning_effort: Option<String>,
137 #[serde(default)]
139 pub max_context_tokens: Option<usize>,
140 #[serde(default)]
142 pub max_iterations: Option<u32>,
143 #[serde(default)]
147 pub write_access: bool,
148 #[serde(default)]
152 pub skip_memory: bool,
153}
154
155#[derive(Debug, Clone)]
157pub struct KodaConfig {
158 pub agent_name: String,
160 pub system_prompt: String,
162 pub allowed_tools: Vec<String>,
164 pub disallowed_tools: Vec<String>,
166 pub provider_type: ProviderType,
168 pub base_url: String,
170 pub model: String,
172 pub max_context_tokens: usize,
174 pub agents_dir: PathBuf,
176 pub model_settings: ModelSettings,
178 pub max_iterations: u32,
180 pub skip_memory: bool,
183 pub trust: crate::trust::TrustMode,
185}
186
187impl KodaConfig {
188 pub fn load(project_root: &Path, agent_name: &str) -> Result<Self> {
191 let agents_dir =
192 Self::find_agents_dir(project_root).unwrap_or_else(|_| PathBuf::from("agents"));
193
194 let agent_file = agents_dir.join(format!("{agent_name}.json"));
196 let agent: AgentConfig = if agent_file.exists() {
197 let json = std::fs::read_to_string(&agent_file)
198 .with_context(|| format!("Failed to read agent config: {agent_file:?}"))?;
199 serde_json::from_str(&json)
200 .with_context(|| format!("Failed to parse agent config: {agent_file:?}"))?
201 } else if let Some(builtin) = Self::load_builtin(agent_name) {
202 builtin
204 } else {
205 anyhow::bail!("Agent '{agent_name}' not found (checked disk and built-ins)");
206 };
207
208 let default_url = agent
209 .base_url
210 .clone()
211 .unwrap_or_else(|| "http://localhost:1234/v1".to_string());
212 let provider_type = ProviderType::from_url_or_name(&default_url, agent.provider.as_deref());
213
214 let mut base_url = agent.base_url;
216 if base_url.is_none()
217 && !provider_type.requires_api_key()
218 && let Some(env_url) = crate::runtime_env::get("KODA_LOCAL_URL")
219 {
220 base_url = Some(env_url);
221 }
222
223 let base_url = base_url.unwrap_or_else(|| provider_type.default_base_url().to_string());
224 let model = agent
225 .model
226 .unwrap_or_else(|| provider_type.default_model().to_string());
227
228 let mut settings = ModelSettings::defaults_for(&model, &provider_type);
229 if let Some(ctx) = agent.max_context_tokens {
231 settings.max_context_tokens = ctx;
232 }
233 let max_context_tokens = settings.max_context_tokens;
234 if let Some(mt) = agent.max_tokens {
235 settings.max_tokens = Some(mt);
236 }
237 if let Some(t) = agent.temperature {
238 settings.temperature = Some(t);
239 }
240 if let Some(tb) = agent.thinking_budget {
241 settings.thinking_budget = Some(tb);
242 }
243 if let Some(ref re) = agent.reasoning_effort {
244 settings.reasoning_effort = Some(re.clone());
245 }
246
247 let max_iterations = agent.max_iterations.unwrap_or(200);
248
249 Ok(Self {
250 agent_name: agent.name,
251 system_prompt: agent.system_prompt,
252 allowed_tools: agent.allowed_tools,
253 disallowed_tools: Self::apply_default_deny(agent.disallowed_tools, agent.write_access),
254 provider_type,
255 base_url,
256 model: model.clone(),
257 max_context_tokens,
258 agents_dir,
259 model_settings: settings,
260 max_iterations,
261 skip_memory: agent.skip_memory,
262 trust: crate::trust::TrustMode::Safe,
263 })
264 }
265
266 const WRITE_TOOLS: &'static [&'static str] = &["Write", "Edit", "Delete"];
269
270 fn apply_default_deny(mut disallowed: Vec<String>, write_access: bool) -> Vec<String> {
273 if !write_access {
274 for tool in Self::WRITE_TOOLS {
275 let name = tool.to_string();
276 if !disallowed.contains(&name) {
277 disallowed.push(name);
278 }
279 }
280 }
281 disallowed
282 }
283
284 pub fn with_overrides(
286 mut self,
287 base_url: Option<String>,
288 model: Option<String>,
289 provider: Option<String>,
290 ) -> Self {
291 if let Some(ref url) = base_url {
292 self.base_url = url.clone();
293 }
294 if let Some(ref p) = provider {
295 self.provider_type = ProviderType::from_url_or_name(&self.base_url, Some(p));
296 }
297 if base_url.is_some() && provider.is_none() {
298 self.provider_type = ProviderType::from_url_or_name(&self.base_url, None);
300 }
301 if let Some(m) = model {
302 self.model = m.clone();
303 self.model_settings.model = m.clone();
304 self.recalculate_model_derived();
306 }
307 self
308 }
309
310 pub fn with_model_overrides(
312 mut self,
313 max_tokens: Option<u32>,
314 temperature: Option<f64>,
315 thinking_budget: Option<u32>,
316 reasoning_effort: Option<String>,
317 ) -> Self {
318 if let Some(mt) = max_tokens {
319 self.model_settings.max_tokens = Some(mt);
320 }
321 if let Some(t) = temperature {
322 self.model_settings.temperature = Some(t);
323 }
324 if let Some(tb) = thinking_budget {
325 self.model_settings.thinking_budget = Some(tb);
326 }
327 if let Some(re) = reasoning_effort {
328 self.model_settings.reasoning_effort = Some(re);
329 }
330 self
331 }
332
333 pub fn with_trust(mut self, mode: crate::trust::TrustMode) -> Self {
335 self.trust = mode;
336 self
337 }
338
339 pub fn recalculate_model_derived(&mut self) {
346 let new_ctx = crate::model_context::context_window_for_model(&self.model);
347 self.max_context_tokens = new_ctx;
348 self.model_settings.max_context_tokens = new_ctx;
349
350 self.max_iterations = 200;
351 }
352
353 pub fn apply_provider_capabilities(&mut self, caps: &crate::providers::ModelCapabilities) {
359 if let Some(ctx) = caps.context_window {
360 self.max_context_tokens = ctx;
361 self.model_settings.max_context_tokens = ctx;
362 tracing::info!("Context window from API: {} tokens for {}", ctx, self.model);
363 }
364 if let Some(max_out) = caps.max_output_tokens {
365 if self.model_settings.max_tokens.is_none() {
367 self.model_settings.max_tokens = Some(max_out as u32);
368 tracing::info!("Max output tokens from API: {} for {}", max_out, self.model);
369 }
370 }
371 }
372
373 pub async fn query_and_apply_capabilities(
379 &mut self,
380 provider: &dyn crate::providers::LlmProvider,
381 ) {
382 match provider.model_capabilities(&self.model).await {
383 Ok(caps) if caps.context_window.is_some() || caps.max_output_tokens.is_some() => {
384 self.apply_provider_capabilities(&caps);
385 }
386 Ok(_) => {
387 tracing::debug!(
388 "Provider did not report capabilities for {}; using lookup table ({}k tokens)",
389 self.model,
390 self.max_context_tokens / 1000
391 );
392 }
393 Err(e) => {
394 tracing::debug!("Could not query model capabilities: {e:#}");
395 }
396 }
397 }
398
399 const BUILTIN_AGENTS: &[(&str, &str)] = &[
402 ("default", include_str!("../agents/default.json")),
403 ("task", include_str!("../agents/task.json")),
404 ("explore", include_str!("../agents/explore.json")),
405 ("plan", include_str!("../agents/plan.json")),
406 ("verify", include_str!("../agents/verify.json")),
407 ];
408
409 pub fn load_agent_json(project_root: &Path, agent_name: &str) -> Result<AgentConfig> {
416 let agents_dir =
417 Self::find_agents_dir(project_root).unwrap_or_else(|_| PathBuf::from("agents"));
418 let agent_file = agents_dir.join(format!("{agent_name}.json"));
419 if agent_file.exists() {
420 let json = std::fs::read_to_string(&agent_file)
421 .with_context(|| format!("Failed to read agent config: {agent_file:?}"))?;
422 serde_json::from_str(&json)
423 .with_context(|| format!("Failed to parse agent config: {agent_file:?}"))
424 } else {
425 Self::load_builtin(agent_name)
426 .ok_or_else(|| anyhow::anyhow!("Agent '{agent_name}' not found"))
427 }
428 }
429
430 pub fn load_builtin(name: &str) -> Option<AgentConfig> {
432 Self::BUILTIN_AGENTS
433 .iter()
434 .find(|(n, _)| *n == name)
435 .and_then(|(_, json)| serde_json::from_str(json).ok())
436 }
437
438 pub fn builtin_agents() -> Vec<(String, AgentConfig)> {
440 Self::BUILTIN_AGENTS
441 .iter()
442 .filter_map(|(name, json)| {
443 let config: AgentConfig = serde_json::from_str(json).ok()?;
444 Some((name.to_string(), config))
445 })
446 .collect()
447 }
448
449 pub fn default_for_testing(provider_type: ProviderType) -> Self {
452 let model = provider_type.default_model().to_string();
453 let model_settings = ModelSettings::defaults_for(&model, &provider_type);
454 let max_context_tokens = model_settings.max_context_tokens;
455
456 Self {
457 agent_name: "test".to_string(),
458 system_prompt: "You are a test agent.".to_string(),
459 allowed_tools: Vec::new(),
460 disallowed_tools: Vec::new(),
461 base_url: provider_type.default_base_url().to_string(),
462 model,
463 provider_type,
464 max_context_tokens,
465 agents_dir: PathBuf::from("agents"),
466 model_settings,
467 max_iterations: crate::loop_guard::MAX_ITERATIONS_DEFAULT,
468 skip_memory: false,
469 trust: crate::trust::TrustMode::Safe,
470 }
471 }
472
473 fn find_agents_dir(project_root: &Path) -> Result<PathBuf> {
482 let local = project_root.join("agents");
484 if local.is_dir() {
485 return Ok(local);
486 }
487
488 let config_agents = Self::user_agents_dir()?;
490 if config_agents.is_dir() {
491 return Ok(config_agents);
492 }
493
494 anyhow::bail!("No agents directory on disk (built-in agents are still available)")
496 }
497
498 fn user_agents_dir() -> Result<PathBuf> {
500 let home = std::env::var("HOME")
501 .or_else(|_| std::env::var("USERPROFILE"))
502 .map(PathBuf::from)
503 .unwrap_or_else(|_| PathBuf::from("."));
504 Ok(home.join(".config").join("koda").join("agents"))
505 }
506}
507
508#[cfg(test)]
509mod tests {
510 use super::*;
511 use tempfile::TempDir;
512
513 #[test]
516 fn test_provider_from_url_anthropic() {
517 assert_eq!(
518 ProviderType::from_url_or_name("https://api.anthropic.com/v1", None),
519 ProviderType::Anthropic
520 );
521 }
522
523 #[test]
524 fn test_provider_from_url_localhost_defaults_to_lmstudio() {
525 assert_eq!(
526 ProviderType::from_url_or_name("http://localhost:1234/v1", None),
527 ProviderType::LMStudio
528 );
529 }
530
531 #[test]
532 fn test_provider_from_explicit_name_overrides_url() {
533 assert_eq!(
534 ProviderType::from_url_or_name("https://my-proxy.corp.com/v1", Some("anthropic")),
535 ProviderType::Anthropic
536 );
537 }
538
539 #[test]
540 fn test_unknown_url_defaults_to_openai() {
541 assert_eq!(
542 ProviderType::from_url_or_name("https://random.example.com/v1", None),
543 ProviderType::OpenAI
544 );
545 }
546
547 #[test]
548 fn test_provider_name_aliases() {
549 assert_eq!(
550 ProviderType::from_url_or_name("", Some("claude")),
551 ProviderType::Anthropic
552 );
553 assert_eq!(
554 ProviderType::from_url_or_name("", Some("google")),
555 ProviderType::Gemini
556 );
557 assert_eq!(
558 ProviderType::from_url_or_name("", Some("xai")),
559 ProviderType::Grok
560 );
561 assert_eq!(
562 ProviderType::from_url_or_name("", Some("lm-studio")),
563 ProviderType::LMStudio
564 );
565 }
566
567 #[test]
568 fn test_provider_display() {
569 assert_eq!(format!("{}", ProviderType::OpenAI), "openai");
570 assert_eq!(format!("{}", ProviderType::Anthropic), "anthropic");
571 assert_eq!(format!("{}", ProviderType::LMStudio), "lm-studio");
572 }
573
574 #[test]
575 fn test_each_provider_has_default_url_and_model() {
576 let providers = [
577 ProviderType::OpenAI,
578 ProviderType::Anthropic,
579 ProviderType::LMStudio,
580 ProviderType::Gemini,
581 ProviderType::Groq,
582 ProviderType::Grok,
583 ProviderType::Mock,
584 ];
585 for p in providers {
586 assert!(!p.default_base_url().is_empty());
587 assert!(!p.default_model().is_empty());
588 assert!(!p.env_key_name().is_empty());
589 }
590 }
591
592 #[test]
595 fn test_load_valid_agent_config() {
596 let tmp = TempDir::new().unwrap();
597 let agents_dir = tmp.path().join("agents");
598 std::fs::create_dir_all(&agents_dir).unwrap();
599 std::fs::write(
600 agents_dir.join("test.json"),
601 r#"{
602 "name": "test",
603 "system_prompt": "You are a test.",
604 "allowed_tools": ["Read", "Write"],
605 "write_access": true
606 }"#,
607 )
608 .unwrap();
609 let config = KodaConfig::load(tmp.path(), "test").unwrap();
610 assert_eq!(config.agent_name, "test");
611 assert_eq!(config.allowed_tools, vec!["Read", "Write"]);
612 assert!(config.disallowed_tools.is_empty());
613 }
614
615 #[test]
616 fn test_load_missing_agent_returns_error() {
617 let tmp = TempDir::new().unwrap();
618 std::fs::create_dir_all(tmp.path().join("agents")).unwrap();
619 assert!(KodaConfig::load(tmp.path(), "nonexistent").is_err());
620 }
621
622 #[test]
623 fn test_load_malformed_json_returns_error() {
624 let tmp = TempDir::new().unwrap();
625 let agents_dir = tmp.path().join("agents");
626 std::fs::create_dir_all(&agents_dir).unwrap();
627 std::fs::write(agents_dir.join("bad.json"), "NOT JSON").unwrap();
628 assert!(KodaConfig::load(tmp.path(), "bad").is_err());
629 }
630
631 #[test]
634 fn test_default_deny_blocks_write_tools() {
635 let result = KodaConfig::apply_default_deny(vec![], false);
636 assert!(result.contains(&"Write".to_string()));
637 assert!(result.contains(&"Edit".to_string()));
638 assert!(result.contains(&"Delete".to_string()));
639 }
640
641 #[test]
642 fn test_write_access_true_allows_write_tools() {
643 let result = KodaConfig::apply_default_deny(vec![], true);
644 assert!(result.is_empty());
645 }
646
647 #[test]
648 fn test_default_deny_deduplicates() {
649 let result =
651 KodaConfig::apply_default_deny(vec!["Write".to_string(), "Bash".to_string()], false);
652 assert_eq!(result.iter().filter(|t| *t == "Write").count(), 1);
653 assert!(result.contains(&"Edit".to_string()));
654 assert!(result.contains(&"Delete".to_string()));
655 assert!(result.contains(&"Bash".to_string()));
656 }
657
658 #[test]
659 fn test_custom_agent_without_write_access_is_readonly() {
660 let tmp = TempDir::new().unwrap();
661 let agents_dir = tmp.path().join("agents");
662 std::fs::create_dir_all(&agents_dir).unwrap();
663 std::fs::write(
664 agents_dir.join("custom.json"),
665 r#"{
666 "name": "custom",
667 "system_prompt": "I am custom."
668 }"#,
669 )
670 .unwrap();
671 let config = KodaConfig::load(tmp.path(), "custom").unwrap();
672 assert!(config.disallowed_tools.contains(&"Write".to_string()));
673 assert!(config.disallowed_tools.contains(&"Edit".to_string()));
674 assert!(config.disallowed_tools.contains(&"Delete".to_string()));
675 }
676
677 #[test]
678 fn test_builtin_task_has_write_access() {
679 let agent = KodaConfig::load_builtin("task").unwrap();
680 assert!(agent.write_access, "task agent should have write_access");
681 }
682
683 #[test]
684 fn test_builtin_explore_no_write_access() {
685 let agent = KodaConfig::load_builtin("explore").unwrap();
686 assert!(!agent.write_access, "explore should be read-only");
687 }
688
689 #[test]
692 fn test_with_overrides_model() {
693 let config = KodaConfig::default_for_testing(ProviderType::OpenAI).with_overrides(
694 None,
695 Some("gpt-4-turbo".into()),
696 None,
697 );
698 assert_eq!(config.model, "gpt-4-turbo");
699 }
700
701 #[test]
702 fn test_with_overrides_base_url_re_detects_provider() {
703 let config = KodaConfig::default_for_testing(ProviderType::OpenAI).with_overrides(
704 Some("https://api.anthropic.com".into()),
705 None,
706 None,
707 );
708 assert_eq!(config.provider_type, ProviderType::Anthropic);
709 }
710
711 #[test]
712 fn test_with_overrides_explicit_provider_wins() {
713 let config = KodaConfig::default_for_testing(ProviderType::OpenAI).with_overrides(
714 Some("https://my-proxy.com".into()),
715 None,
716 Some("anthropic".into()),
717 );
718 assert_eq!(config.provider_type, ProviderType::Anthropic);
719 }
720
721 #[test]
722 fn test_with_overrides_no_changes() {
723 let config =
724 KodaConfig::default_for_testing(ProviderType::Gemini).with_overrides(None, None, None);
725 assert_eq!(config.provider_type, ProviderType::Gemini);
726 assert_eq!(config.model, "gemini-flash-latest");
727 }
728
729 #[test]
732 fn test_recalculate_updates_context_window() {
733 let mut config = KodaConfig::default_for_testing(ProviderType::LMStudio);
735 assert_eq!(config.max_context_tokens, 4_096); config.model = "claude-sonnet-4-6".to_string();
739 config.model_settings.model = config.model.clone();
740 config.provider_type = ProviderType::Anthropic;
741 config.recalculate_model_derived();
742
743 assert_eq!(config.max_context_tokens, 200_000);
744 assert_eq!(config.model_settings.max_context_tokens, 200_000);
745 assert_eq!(config.max_iterations, 200);
746 }
747
748 #[test]
749 fn test_with_overrides_model_recalculates() {
750 let config = KodaConfig::default_for_testing(ProviderType::LMStudio);
751 assert_eq!(config.max_context_tokens, 4_096);
752
753 let config = config.with_overrides(None, Some("gpt-4o".into()), Some("openai".into()));
754 assert_eq!(config.model, "gpt-4o");
755 assert_eq!(config.max_context_tokens, 128_000);
756 }
757
758 #[test]
761 fn test_provider_from_url_ollama() {
762 assert_eq!(
763 ProviderType::from_url_or_name("http://localhost:11434/api", None),
764 ProviderType::Ollama
765 );
766 }
767
768 #[test]
769 fn test_provider_from_url_vllm() {
770 assert_eq!(
771 ProviderType::from_url_or_name("http://localhost:8000/v1", None),
772 ProviderType::Vllm
773 );
774 }
775
776 #[test]
777 fn test_provider_from_url_gemini() {
778 assert_eq!(
779 ProviderType::from_url_or_name(
780 "https://generativelanguage.googleapis.com/v1beta",
781 None
782 ),
783 ProviderType::Gemini
784 );
785 }
786
787 #[test]
788 fn test_provider_from_url_groq() {
789 assert_eq!(
790 ProviderType::from_url_or_name("https://api.groq.com/openai/v1", None),
791 ProviderType::Groq
792 );
793 }
794
795 #[test]
796 fn test_provider_from_url_grok() {
797 assert_eq!(
798 ProviderType::from_url_or_name("https://api.x.ai/v1", None),
799 ProviderType::Grok
800 );
801 }
802
803 #[test]
804 fn test_provider_from_url_deepseek() {
805 assert_eq!(
806 ProviderType::from_url_or_name("https://api.deepseek.com/v1", None),
807 ProviderType::DeepSeek
808 );
809 }
810
811 #[test]
812 fn test_provider_from_url_mistral() {
813 assert_eq!(
814 ProviderType::from_url_or_name("https://api.mistral.ai/v1", None),
815 ProviderType::Mistral
816 );
817 }
818
819 #[test]
820 fn test_provider_from_url_openrouter() {
821 assert_eq!(
822 ProviderType::from_url_or_name("https://openrouter.ai/api/v1", None),
823 ProviderType::OpenRouter
824 );
825 }
826
827 #[test]
828 fn test_provider_from_url_together() {
829 assert_eq!(
830 ProviderType::from_url_or_name("https://api.together.xyz/v1", None),
831 ProviderType::Together
832 );
833 }
834
835 #[test]
836 fn test_provider_from_url_fireworks() {
837 assert_eq!(
838 ProviderType::from_url_or_name("https://api.fireworks.ai/inference/v1", None),
839 ProviderType::Fireworks
840 );
841 }
842
843 #[test]
846 fn test_provider_name_aliases_extended() {
847 let cases = [
848 ("ollama", ProviderType::Ollama),
849 ("deepseek", ProviderType::DeepSeek),
850 ("mistral", ProviderType::Mistral),
851 ("minimax", ProviderType::MiniMax),
852 ("openrouter", ProviderType::OpenRouter),
853 ("together", ProviderType::Together),
854 ("fireworks", ProviderType::Fireworks),
855 ("vllm", ProviderType::Vllm),
856 ("groq", ProviderType::Groq),
857 ("mock", ProviderType::Mock),
858 ];
859 for (name, expected) in cases {
860 assert_eq!(
861 ProviderType::from_url_or_name("", Some(name)),
862 expected,
863 "alias '{name}' failed"
864 );
865 }
866 }
867
868 #[test]
871 fn test_requires_api_key_local_providers() {
872 assert!(!ProviderType::LMStudio.requires_api_key());
874 assert!(!ProviderType::Ollama.requires_api_key());
875 assert!(!ProviderType::Mock.requires_api_key());
876 assert!(!ProviderType::Vllm.requires_api_key());
877 }
878
879 #[test]
880 fn test_requires_api_key_cloud_providers() {
881 assert!(ProviderType::Anthropic.requires_api_key());
882 assert!(ProviderType::OpenAI.requires_api_key());
883 assert!(ProviderType::Gemini.requires_api_key());
884 assert!(ProviderType::Groq.requires_api_key());
885 assert!(ProviderType::Grok.requires_api_key());
886 }
887
888 #[test]
891 fn test_model_settings_defaults_anthropic_has_max_tokens() {
892 let s = ModelSettings::defaults_for("claude-opus-4-5", &ProviderType::Anthropic);
893 assert_eq!(s.max_tokens, Some(16384));
894 assert_eq!(s.model, "claude-opus-4-5");
895 assert!(s.temperature.is_none());
896 }
897
898 #[test]
899 fn test_model_settings_defaults_openai_no_max_tokens() {
900 let s = ModelSettings::defaults_for("gpt-4o", &ProviderType::OpenAI);
901 assert!(s.max_tokens.is_none(), "OpenAI should use provider default");
902 assert_eq!(s.model, "gpt-4o");
903 }
904
905 #[test]
908 fn test_with_model_overrides_all_fields() {
909 let config = KodaConfig::default_for_testing(ProviderType::Anthropic).with_model_overrides(
910 Some(8192), Some(0.7), Some(2000), Some("low".into()), );
915 assert_eq!(config.model_settings.max_tokens, Some(8192));
916 assert_eq!(config.model_settings.temperature, Some(0.7));
917 assert_eq!(config.model_settings.thinking_budget, Some(2000));
918 assert_eq!(
919 config.model_settings.reasoning_effort,
920 Some("low".to_string())
921 );
922 }
923
924 #[test]
925 fn test_with_model_overrides_none_changes_nothing() {
926 let original = KodaConfig::default_for_testing(ProviderType::OpenAI);
927 let original_tokens = original.model_settings.max_tokens;
928 let config = original.with_model_overrides(None, None, None, None);
929 assert_eq!(config.model_settings.max_tokens, original_tokens);
930 assert!(config.model_settings.temperature.is_none());
931 }
932
933 #[test]
936 fn test_builtin_agents_is_not_empty() {
937 let agents = KodaConfig::builtin_agents();
938 assert!(!agents.is_empty(), "builtin_agents should not be empty");
939 }
940
941 #[test]
942 fn test_builtin_agents_contains_core_agents() {
943 let agents = KodaConfig::builtin_agents();
944 let names: Vec<&str> = agents.iter().map(|(name, _)| name.as_str()).collect();
945 assert!(names.contains(&"task"), "should have 'task' agent");
946 assert!(names.contains(&"explore"), "should have 'explore' agent");
947 }
948
949 #[test]
952 fn test_load_agent_json_returns_raw_options() {
953 let tmp = tempfile::TempDir::new().unwrap();
956 for name in ["explore", "plan", "verify", "task"] {
957 let raw = KodaConfig::load_agent_json(tmp.path(), name)
958 .unwrap_or_else(|e| panic!("load_agent_json({name}) failed: {e}"));
959 assert!(
960 raw.model.is_none(),
961 "built-in agent '{name}' must not hardcode a model — \
962 set it in the agent JSON if you need a provider-specific default"
963 );
964 assert!(
965 raw.provider.is_none(),
966 "built-in agent '{name}' must not hardcode a provider"
967 );
968 }
969 }
970
971 #[test]
972 fn test_load_agent_json_project_override_preserves_option() {
973 let tmp = tempfile::TempDir::new().unwrap();
975 let agents_dir = tmp.path().join("agents");
976 std::fs::create_dir_all(&agents_dir).unwrap();
977 std::fs::write(
978 agents_dir.join("myscout.json"),
979 r#"{"name":"myscout","system_prompt":"scout","model":"claude-3-haiku"}"#,
980 )
981 .unwrap();
982 let raw = KodaConfig::load_agent_json(tmp.path(), "myscout").unwrap();
983 assert_eq!(raw.model.as_deref(), Some("claude-3-haiku"));
984 }
985
986 #[test]
991 fn test_sub_agent_inherits_parent_provider_and_model() {
992 let tmp = tempfile::TempDir::new().unwrap();
993
994 let parent = KodaConfig::default_for_testing(ProviderType::Gemini).with_overrides(
996 None,
997 Some("gemini-2.0-flash".to_string()),
998 None,
999 );
1000
1001 let raw = KodaConfig::load_agent_json(tmp.path(), "explore").unwrap();
1003 let mut cfg = KodaConfig::load(tmp.path(), "explore").unwrap();
1004
1005 let agent_has_own_provider = raw.provider.is_some() || raw.base_url.is_some();
1007 if !agent_has_own_provider {
1008 let model_override = raw.model.is_none().then(|| parent.model.clone());
1009 cfg = cfg.with_overrides(
1010 Some(parent.base_url.clone()),
1011 model_override,
1012 Some(parent.provider_type.to_string()),
1013 );
1014 }
1015
1016 assert_eq!(
1017 cfg.provider_type,
1018 ProviderType::Gemini,
1019 "provider must be inherited"
1020 );
1021 assert_eq!(
1022 cfg.model, "gemini-2.0-flash",
1023 "model must be inherited from parent"
1024 );
1025 }
1026
1027 #[test]
1030 fn test_sub_agent_own_provider_is_not_overridden() {
1031 let tmp = tempfile::TempDir::new().unwrap();
1032 let agents_dir = tmp.path().join("agents");
1033 std::fs::create_dir_all(&agents_dir).unwrap();
1034 std::fs::write(
1036 agents_dir.join("local-scout.json"),
1037 r#"{"name":"local-scout","system_prompt":"s","provider":"lmstudio","base_url":"http://localhost:1234/v1"}"#,
1038 )
1039 .unwrap();
1040
1041 let parent = KodaConfig::default_for_testing(ProviderType::Gemini).with_overrides(
1042 None,
1043 Some("gemini-2.0-flash".to_string()),
1044 None,
1045 );
1046
1047 let raw = KodaConfig::load_agent_json(tmp.path(), "local-scout").unwrap();
1048 let mut cfg = KodaConfig::load(tmp.path(), "local-scout").unwrap();
1049
1050 let agent_has_own_provider = raw.provider.is_some() || raw.base_url.is_some();
1051 if !agent_has_own_provider {
1052 let model_override = raw.model.is_none().then(|| parent.model.clone());
1053 cfg = cfg.with_overrides(
1054 Some(parent.base_url.clone()),
1055 model_override,
1056 Some(parent.provider_type.to_string()),
1057 );
1058 }
1059
1060 assert_eq!(cfg.provider_type, ProviderType::LMStudio);
1062 assert_ne!(
1063 cfg.provider_type,
1064 ProviderType::Gemini,
1065 "parent provider must not bleed into agent with explicit provider"
1066 );
1067 }
1068
1069 #[test]
1072 fn test_sub_agent_explicit_model_is_not_overridden() {
1073 let tmp = tempfile::TempDir::new().unwrap();
1074 let agents_dir = tmp.path().join("agents");
1075 std::fs::create_dir_all(&agents_dir).unwrap();
1076 std::fs::write(
1078 agents_dir.join("specialist.json"),
1079 r#"{"name":"specialist","system_prompt":"s","model":"gemini-2.5-flash"}"#,
1080 )
1081 .unwrap();
1082
1083 let parent = KodaConfig::default_for_testing(ProviderType::Gemini).with_overrides(
1084 None,
1085 Some("gemini-2.0-flash-lite".to_string()),
1086 None,
1087 );
1088
1089 let raw = KodaConfig::load_agent_json(tmp.path(), "specialist").unwrap();
1090 let mut cfg = KodaConfig::load(tmp.path(), "specialist").unwrap();
1091
1092 let agent_has_own_provider = raw.provider.is_some() || raw.base_url.is_some();
1093 if !agent_has_own_provider {
1094 let model_override = raw.model.is_none().then(|| parent.model.clone());
1095 cfg = cfg.with_overrides(
1096 Some(parent.base_url.clone()),
1097 model_override,
1098 Some(parent.provider_type.to_string()),
1099 );
1100 }
1101
1102 assert_eq!(cfg.provider_type, ProviderType::Gemini);
1104 assert_eq!(
1105 cfg.model, "gemini-2.5-flash",
1106 "agent's explicit model must not be overridden by parent"
1107 );
1108 }
1109}