llm_stack_ollama/
config.rs1use std::time::Duration;
4
5#[derive(Debug, Clone)]
18pub struct OllamaConfig {
19 pub model: String,
21 pub base_url: String,
23 pub timeout: Option<Duration>,
25 pub client: Option<reqwest::Client>,
28}
29
30impl Default for OllamaConfig {
31 fn default() -> Self {
32 Self {
33 model: "llama3.2".into(),
34 base_url: "http://localhost:11434".into(),
35 timeout: None,
36 client: None,
37 }
38 }
39}
40
41#[cfg(test)]
42mod tests {
43 use super::*;
44
45 #[test]
46 fn test_default_config() {
47 let config = OllamaConfig::default();
48 assert_eq!(config.model, "llama3.2");
49 assert_eq!(config.base_url, "http://localhost:11434");
50 assert!(config.timeout.is_none());
51 assert!(config.client.is_none());
52 }
53
54 #[test]
55 fn test_debug_output() {
56 let config = OllamaConfig::default();
57 let debug = format!("{config:?}");
58 assert!(debug.contains("llama3.2"));
59 assert!(debug.contains("localhost:11434"));
60 }
61
62 #[test]
63 fn test_config_override() {
64 let config = OllamaConfig {
65 model: "mistral".into(),
66 base_url: "http://remote:11434".into(),
67 ..Default::default()
68 };
69 assert_eq!(config.model, "mistral");
70 assert_eq!(config.base_url, "http://remote:11434");
71 }
72}