1use std::collections::HashMap;
4
5use serde::Deserialize;
6
7#[derive(Debug, Clone, Deserialize)]
9pub struct AliceConfig {
10 pub runtime: RuntimeConfig,
12 #[serde(default)]
14 pub agent: AgentBackendConfig,
15 #[serde(default)]
17 pub memory: MemoryConfig,
18 #[serde(default)]
20 pub skills: SkillsConfig,
21 #[serde(default)]
23 pub channels: ChannelsConfig,
24 #[serde(default)]
26 pub mcp: McpConfig,
27}
28
29#[derive(Debug, Clone, Deserialize)]
31pub struct RuntimeConfig {
32 pub default_model: String,
34 pub max_steps: Option<u32>,
36 pub turn_timeout_ms: Option<u64>,
38 pub dispatch_mode: Option<DispatchMode>,
40}
41
42#[derive(Debug, Clone, Copy, Deserialize, PartialEq, Eq)]
44#[serde(rename_all = "snake_case")]
45pub enum DispatchMode {
46 PromptGuided,
48 NativePreferred,
50}
51
52#[derive(Debug, Clone, Copy, Default, Deserialize, PartialEq, Eq)]
54#[serde(rename_all = "snake_case")]
55pub enum AgentBackendType {
56 #[default]
58 Bob,
59 Acp,
61}
62
63#[derive(Debug, Clone, Default, Deserialize)]
65pub struct AgentBackendConfig {
66 #[serde(default)]
68 pub backend: AgentBackendType,
69 pub acp_command: Option<String>,
71 #[serde(default)]
73 pub acp_args: Vec<String>,
74 pub acp_working_dir: Option<String>,
76}
77
78#[derive(Debug, Clone, Deserialize)]
80pub struct MemoryConfig {
81 pub db_path: String,
83 pub recall_limit: usize,
85 pub bm25_weight: f32,
87 pub vector_weight: f32,
89 pub vector_dimensions: usize,
91 pub enable_vector: bool,
93}
94
95impl Default for MemoryConfig {
96 fn default() -> Self {
97 Self {
98 db_path: ".alice/memory.db".to_string(),
99 recall_limit: 6,
100 bm25_weight: 0.3,
101 vector_weight: 0.7,
102 vector_dimensions: 384,
103 enable_vector: true,
104 }
105 }
106}
107
108const fn default_true() -> bool {
109 true
110}
111
112const fn default_max_selected() -> usize {
113 3
114}
115
116const fn default_token_budget() -> usize {
117 1800
118}
119
120#[derive(Debug, Clone, Deserialize)]
122pub struct SkillsConfig {
123 #[serde(default = "default_true")]
125 pub enabled: bool,
126 #[serde(default = "default_max_selected")]
128 pub max_selected: usize,
129 #[serde(default = "default_token_budget")]
131 pub token_budget: usize,
132 #[serde(default)]
134 pub sources: Vec<SkillSourceEntry>,
135}
136
137impl Default for SkillsConfig {
138 fn default() -> Self {
139 Self { enabled: true, max_selected: 3, token_budget: 1800, sources: Vec::new() }
140 }
141}
142
143#[derive(Debug, Clone, Deserialize)]
145pub struct SkillSourceEntry {
146 pub path: String,
148 #[serde(default)]
150 pub recursive: bool,
151}
152
153#[derive(Debug, Clone, Default, Deserialize)]
155pub struct ChannelsConfig {
156 #[serde(default)]
158 pub discord: ChannelProviderConfig,
159 #[serde(default)]
161 pub telegram: ChannelProviderConfig,
162}
163
164#[derive(Debug, Clone, Default, Deserialize)]
166pub struct ChannelProviderConfig {
167 #[serde(default)]
169 pub enabled: bool,
170}
171
172#[derive(Debug, Clone, Default, Deserialize)]
174pub struct McpConfig {
175 #[serde(default)]
177 pub servers: Vec<McpServerConfig>,
178}
179
180#[derive(Debug, Clone, Deserialize)]
182pub struct McpServerConfig {
183 pub id: String,
185 pub command: String,
187 #[serde(default)]
189 pub args: Vec<String>,
190 pub env: Option<HashMap<String, String>>,
192 pub tool_timeout_ms: Option<u64>,
194}
195
196pub fn load_config(path: &str) -> eyre::Result<AliceConfig> {
202 let settings = config::Config::builder()
203 .add_source(config::File::with_name(path).required(true))
204 .build()?;
205 let config: AliceConfig = settings.try_deserialize()?;
206 Ok(config)
207}
208
209#[cfg(test)]
210mod tests {
211 use super::*;
212
213 #[test]
214 fn parse_minimal_config() {
215 let input = r#"
216[runtime]
217default_model = "openai:gpt-4o-mini"
218"#;
219
220 let config = config::Config::builder()
221 .add_source(config::File::from_str(input, config::FileFormat::Toml))
222 .build();
223 assert!(config.is_ok(), "minimal config should parse");
224 let Ok(config) = config else { return };
225
226 let parsed: Result<AliceConfig, config::ConfigError> = config.try_deserialize();
227 assert!(parsed.is_ok(), "minimal config should deserialize");
228 let Ok(parsed) = parsed else { return };
229
230 assert_eq!(parsed.runtime.default_model, "openai:gpt-4o-mini");
231 assert_eq!(parsed.memory.recall_limit, 6);
232 assert!(parsed.mcp.servers.is_empty());
233 assert!(parsed.skills.enabled);
235 assert_eq!(parsed.skills.max_selected, 3);
236 assert_eq!(parsed.skills.token_budget, 1800);
237 assert!(parsed.skills.sources.is_empty());
238 assert!(!parsed.channels.discord.enabled);
240 assert!(!parsed.channels.telegram.enabled);
241 }
242
243 #[test]
244 fn parse_full_config() {
245 let input = r#"
246[runtime]
247default_model = "openai:gpt-4o-mini"
248max_steps = 9
249turn_timeout_ms = 55000
250dispatch_mode = "prompt_guided"
251
252[memory]
253db_path = "./tmp/alice.db"
254recall_limit = 4
255bm25_weight = 0.4
256vector_weight = 0.6
257vector_dimensions = 256
258enable_vector = false
259
260[skills]
261enabled = false
262max_selected = 5
263token_budget = 2000
264
265[[skills.sources]]
266path = ".alice/skills"
267recursive = true
268
269[channels.discord]
270enabled = true
271
272[channels.telegram]
273enabled = true
274
275[[mcp.servers]]
276id = "filesystem"
277command = "npx"
278args = ["-y", "@modelcontextprotocol/server-filesystem", "."]
279tool_timeout_ms = 15000
280"#;
281
282 let config = config::Config::builder()
283 .add_source(config::File::from_str(input, config::FileFormat::Toml))
284 .build();
285 assert!(config.is_ok(), "full config should parse");
286 let Ok(config) = config else { return };
287
288 let parsed: Result<AliceConfig, config::ConfigError> = config.try_deserialize();
289 assert!(parsed.is_ok(), "full config should deserialize");
290 let Ok(parsed) = parsed else { return };
291
292 assert_eq!(parsed.runtime.max_steps, Some(9));
293 assert_eq!(parsed.runtime.dispatch_mode, Some(DispatchMode::PromptGuided));
294 assert_eq!(parsed.memory.vector_dimensions, 256);
295 assert!(!parsed.memory.enable_vector);
296 assert_eq!(parsed.mcp.servers.len(), 1);
297 assert_eq!(parsed.mcp.servers[0].id, "filesystem");
298 assert!(!parsed.skills.enabled);
300 assert_eq!(parsed.skills.max_selected, 5);
301 assert_eq!(parsed.skills.token_budget, 2000);
302 assert_eq!(parsed.skills.sources.len(), 1);
303 assert_eq!(parsed.skills.sources[0].path, ".alice/skills");
304 assert!(parsed.skills.sources[0].recursive);
305 assert!(parsed.channels.discord.enabled);
307 assert!(parsed.channels.telegram.enabled);
308 }
309
310 #[test]
311 fn parse_config_native_preferred_dispatch() {
312 let input = r#"
313[runtime]
314default_model = "openai:gpt-4o-mini"
315dispatch_mode = "native_preferred"
316"#;
317
318 let config = config::Config::builder()
319 .add_source(config::File::from_str(input, config::FileFormat::Toml))
320 .build();
321 assert!(config.is_ok(), "native_preferred config should parse");
322 let Ok(config) = config else { return };
323
324 let parsed: Result<AliceConfig, config::ConfigError> = config.try_deserialize();
325 assert!(parsed.is_ok(), "native_preferred config should deserialize");
326 let Ok(parsed) = parsed else { return };
327
328 assert_eq!(parsed.runtime.dispatch_mode, Some(DispatchMode::NativePreferred));
329 assert!(parsed.runtime.max_steps.is_none());
331 assert!(parsed.runtime.turn_timeout_ms.is_none());
332 }
333
334 #[test]
335 fn parse_config_multiple_skill_sources() {
336 let input = r#"
337[runtime]
338default_model = "openai:gpt-4o-mini"
339
340[skills]
341enabled = true
342max_selected = 2
343token_budget = 1200
344
345[[skills.sources]]
346path = ".alice/skills"
347recursive = true
348
349[[skills.sources]]
350path = "/opt/shared-skills"
351recursive = false
352
353[[skills.sources]]
354path = "~/custom-skills"
355"#;
356
357 let config = config::Config::builder()
358 .add_source(config::File::from_str(input, config::FileFormat::Toml))
359 .build();
360 assert!(config.is_ok(), "multi-source config should parse");
361 let Ok(config) = config else { return };
362
363 let parsed: Result<AliceConfig, config::ConfigError> = config.try_deserialize();
364 assert!(parsed.is_ok(), "multi-source config should deserialize");
365 let Ok(parsed) = parsed else { return };
366
367 assert_eq!(parsed.skills.sources.len(), 3);
368 assert!(parsed.skills.sources[0].recursive);
369 assert!(!parsed.skills.sources[1].recursive);
370 assert!(!parsed.skills.sources[2].recursive);
372 assert_eq!(parsed.skills.sources[1].path, "/opt/shared-skills");
373 assert_eq!(parsed.skills.max_selected, 2);
374 assert_eq!(parsed.skills.token_budget, 1200);
375 }
376
377 #[test]
378 fn parse_config_multiple_mcp_servers_with_env() {
379 let input = r#"
380[runtime]
381default_model = "openai:gpt-4o-mini"
382
383[[mcp.servers]]
384id = "filesystem"
385command = "npx"
386args = ["-y", "@modelcontextprotocol/server-filesystem", "."]
387
388[[mcp.servers]]
389id = "github"
390command = "npx"
391args = ["-y", "@modelcontextprotocol/server-github"]
392tool_timeout_ms = 30000
393
394[mcp.servers.env]
395GITHUB_TOKEN = "test-token"
396"#;
397
398 let config = config::Config::builder()
399 .add_source(config::File::from_str(input, config::FileFormat::Toml))
400 .build();
401 assert!(config.is_ok(), "multi-mcp config should parse");
402 let Ok(config) = config else { return };
403
404 let parsed: Result<AliceConfig, config::ConfigError> = config.try_deserialize();
405 assert!(parsed.is_ok(), "multi-mcp config should deserialize");
406 let Ok(parsed) = parsed else { return };
407
408 assert_eq!(parsed.mcp.servers.len(), 2);
409 assert_eq!(parsed.mcp.servers[0].id, "filesystem");
411 assert!(parsed.mcp.servers[0].env.is_none());
412 assert!(parsed.mcp.servers[0].tool_timeout_ms.is_none());
413 assert_eq!(parsed.mcp.servers[1].id, "github");
415 assert_eq!(parsed.mcp.servers[1].tool_timeout_ms, Some(30_000));
416 let Some(ref env) = parsed.mcp.servers[1].env else { return };
417 assert_eq!(env.get("GITHUB_TOKEN").map(String::as_str), Some("test-token"));
418 }
419}