Skip to main content

alice_runtime/
bootstrap.rs

1//! Runtime/bootstrap wiring for Alice.
2
3use std::sync::Arc;
4
5use alice_adapters::memory::sqlite_store::SqliteMemoryStore;
6use alice_core::memory::{domain::HybridWeights, service::MemoryService};
7use bob_adapters::{
8    llm_liter::LiterLlmAdapter, mcp_rmcp::McpToolAdapter, observe::TracingEventSink,
9    store_memory::InMemorySessionStore, tape_memory::InMemoryTapeStore,
10};
11use bob_core::ports::{EventSink, LlmPort, SessionStore, TapeStorePort, ToolPort};
12use bob_runtime::{
13    Agent, AgentBootstrap, DispatchMode, NoOpToolPort, RuntimeBuilder, TimeoutToolLayer, ToolLayer,
14    agent_loop::AgentLoop, composite::CompositeToolPort,
15};
16
17use crate::{
18    agent_backend::AgentBackend,
19    config::{AgentBackendType, AliceConfig, DispatchMode as ConfigDispatchMode, McpServerConfig},
20    context::AliceRuntimeContext,
21};
22
23const DEFAULT_MAX_STEPS: u32 = 12;
24const DEFAULT_TURN_TIMEOUT_MS: u64 = 90_000;
25const DEFAULT_TOOL_TIMEOUT_MS: u64 = 15_000;
26
27/// Convert a model identifier from the `provider:model` format (used by the
28/// bob runtime config) to the `provider/model` format expected by `liter_llm`.
29fn normalize_model_name(model: &str) -> String {
30    if let Some((provider, rest)) = model.split_once(':') {
31        format!("{provider}/{rest}")
32    } else {
33        model.to_string()
34    }
35}
36
37/// Build runtime context from configuration.
38///
39/// # Errors
40///
41/// Returns an error if any adapter fails to initialize.
42pub async fn build_runtime(cfg: &AliceConfig) -> eyre::Result<AliceRuntimeContext> {
43    let default_model = normalize_model_name(&cfg.runtime.default_model);
44
45    let config = liter_llm::ClientConfig::new("");
46    let client = liter_llm::DefaultClient::new(config, None)
47        .map_err(|e| eyre::eyre!("failed to create liter-llm client: {e}"))?;
48    let llm: Arc<dyn LlmPort> = Arc::new(LiterLlmAdapter::new(Arc::new(client)));
49    let tools = build_tool_port(cfg).await?;
50    let tools_ref = tools.clone();
51    let store: Arc<dyn SessionStore> = Arc::new(InMemorySessionStore::new());
52    let events: Arc<dyn EventSink> = Arc::new(TracingEventSink::new());
53    let tape: Arc<dyn TapeStorePort> = Arc::new(InMemoryTapeStore::new());
54
55    let policy = bob_core::types::TurnPolicy {
56        max_steps: cfg.runtime.max_steps.unwrap_or(DEFAULT_MAX_STEPS),
57        turn_timeout_ms: cfg.runtime.turn_timeout_ms.unwrap_or(DEFAULT_TURN_TIMEOUT_MS),
58        tool_timeout_ms: DEFAULT_TOOL_TIMEOUT_MS,
59        ..bob_core::types::TurnPolicy::default()
60    };
61
62    let runtime = RuntimeBuilder::new()
63        .with_llm(llm)
64        .with_tools(tools)
65        .with_store(store.clone())
66        .with_events(events.clone())
67        .with_default_model(default_model.clone())
68        .with_policy(policy)
69        .with_dispatch_mode(resolve_dispatch_mode(cfg.runtime.dispatch_mode))
70        .build()?;
71
72    // Build Agent + Session API (bob 0.2.2)
73    let agent = Agent::from_runtime(runtime.clone(), tools_ref.clone())
74        .with_store(store)
75        .with_tape(tape.clone())
76        .build();
77
78    let agent_loop = AgentLoop::new(runtime, tools_ref.clone()).with_tape(tape).with_events(events);
79
80    // Build agent backend based on configuration
81    let backend: Arc<dyn AgentBackend> = build_agent_backend(cfg, &agent)?;
82
83    let memory_store = SqliteMemoryStore::open(
84        &cfg.memory.db_path,
85        cfg.memory.vector_dimensions,
86        cfg.memory.enable_vector,
87    )?;
88    let weights = HybridWeights::new(cfg.memory.bm25_weight, cfg.memory.vector_weight)?;
89    let memory_service = Arc::new(MemoryService::new(
90        Arc::new(memory_store),
91        cfg.memory.recall_limit,
92        weights,
93        cfg.memory.vector_dimensions,
94        cfg.memory.enable_vector,
95    )?);
96
97    let skill_composer = crate::skill_wiring::build_skill_composer(&cfg.skills)?;
98
99    Ok(AliceRuntimeContext::new(
100        agent_loop,
101        agent,
102        backend,
103        memory_service,
104        skill_composer,
105        cfg.skills.token_budget,
106        default_model,
107    ))
108}
109
110/// Build the appropriate agent backend from configuration.
111fn build_agent_backend(cfg: &AliceConfig, agent: &Agent) -> eyre::Result<Arc<dyn AgentBackend>> {
112    match cfg.agent.backend {
113        AgentBackendType::Bob => {
114            let backend = crate::agent_backend::bob_backend::BobAgentBackend::new(agent.clone());
115            Ok(Arc::new(backend))
116        }
117        AgentBackendType::Acp => {
118            #[cfg(feature = "acp-agent")]
119            {
120                let command =
121                    cfg.agent.acp_command.clone().ok_or_else(|| {
122                        eyre::eyre!("agent.acp_command is required for acp backend")
123                    })?;
124                let config = crate::agent_backend::acp_backend::AcpConfig {
125                    command,
126                    args: cfg.agent.acp_args.clone(),
127                    working_dir: cfg.agent.acp_working_dir.clone(),
128                };
129                let backend = crate::agent_backend::acp_backend::AcpAgentBackend::new(config);
130                Ok(Arc::new(backend))
131            }
132            #[cfg(not(feature = "acp-agent"))]
133            {
134                let _ = agent;
135                Err(eyre::eyre!(
136                    "acp backend requires the 'acp-agent' feature; \
137                     rebuild with --features acp-agent"
138                ))
139            }
140        }
141    }
142}
143
144const fn resolve_dispatch_mode(mode: Option<ConfigDispatchMode>) -> DispatchMode {
145    match mode {
146        Some(ConfigDispatchMode::PromptGuided) => DispatchMode::PromptGuided,
147        Some(ConfigDispatchMode::NativePreferred) | None => DispatchMode::NativePreferred,
148    }
149}
150
151async fn build_tool_port(cfg: &AliceConfig) -> eyre::Result<Arc<dyn ToolPort>> {
152    if cfg.mcp.servers.is_empty() {
153        return Ok(Arc::new(NoOpToolPort));
154    }
155
156    if cfg.mcp.servers.len() == 1 {
157        return build_single_tool_port(&cfg.mcp.servers[0]).await;
158    }
159
160    let mut ports = Vec::with_capacity(cfg.mcp.servers.len());
161    for server in &cfg.mcp.servers {
162        let port = build_single_tool_port(server).await?;
163        ports.push((server.id.clone(), port));
164    }
165
166    Ok(Arc::new(CompositeToolPort::new(ports)))
167}
168
169async fn build_single_tool_port(server: &McpServerConfig) -> eyre::Result<Arc<dyn ToolPort>> {
170    let env = server
171        .env
172        .as_ref()
173        .map_or_else(Vec::new, |vars| vars.iter().map(|(k, v)| (k.clone(), v.clone())).collect());
174
175    let adapter = McpToolAdapter::connect_stdio(&server.id, &server.command, &server.args, &env)
176        .await
177        .map_err(|error| eyre::eyre!("failed to connect MCP server '{}': {error}", server.id))?;
178
179    let timeout = server.tool_timeout_ms.unwrap_or(DEFAULT_TOOL_TIMEOUT_MS);
180    let layer = TimeoutToolLayer::new(timeout);
181
182    Ok(layer.wrap(Arc::new(adapter)))
183}
184
185#[cfg(test)]
186mod tests {
187    use super::*;
188    use crate::config::{
189        AgentBackendConfig, AliceConfig, ChannelsConfig, McpConfig, MemoryConfig, RuntimeConfig,
190        SkillsConfig,
191    };
192
193    fn base_config() -> AliceConfig {
194        use std::sync::atomic::{AtomicU64, Ordering};
195        static COUNTER: AtomicU64 = AtomicU64::new(1);
196        let n = COUNTER.fetch_add(1, Ordering::Relaxed);
197        AliceConfig {
198            runtime: RuntimeConfig {
199                default_model: "openai:gpt-4o-mini".to_string(),
200                max_steps: Some(3),
201                turn_timeout_ms: Some(10_000),
202                dispatch_mode: Some(ConfigDispatchMode::PromptGuided),
203            },
204            agent: AgentBackendConfig::default(),
205            memory: MemoryConfig {
206                db_path: format!(
207                    "{}/alice-bootstrap-test-{}-{}.db",
208                    std::env::temp_dir().display(),
209                    std::process::id(),
210                    n
211                ),
212                ..MemoryConfig::default()
213            },
214            skills: SkillsConfig::default(),
215            channels: ChannelsConfig::default(),
216            mcp: McpConfig::default(),
217        }
218    }
219
220    #[tokio::test]
221    async fn build_runtime_without_mcp() {
222        let cfg = base_config();
223        let built = build_runtime(&cfg).await;
224        assert!(built.is_ok(), "runtime should build without mcp");
225        let Ok(built) = built else { return };
226        assert_eq!(built.default_model(), "openai/gpt-4o-mini");
227    }
228
229    #[tokio::test]
230    async fn build_runtime_with_bob_backend() {
231        let cfg = base_config();
232        let built = build_runtime(&cfg).await;
233        assert!(built.is_ok(), "runtime should build with bob backend");
234    }
235
236    #[cfg(feature = "acp-agent")]
237    #[tokio::test]
238    async fn build_runtime_with_acp_backend() {
239        let mut cfg = base_config();
240        cfg.agent = AgentBackendConfig {
241            backend: AgentBackendType::Acp,
242            acp_command: Some("mock-agent".to_string()),
243            acp_args: vec![],
244            acp_working_dir: None,
245        };
246        let built = build_runtime(&cfg).await;
247        assert!(built.is_ok(), "runtime should build with acp backend");
248    }
249
250    #[test]
251    fn dispatch_mode_mapping() {
252        assert_eq!(
253            resolve_dispatch_mode(Some(ConfigDispatchMode::PromptGuided)),
254            DispatchMode::PromptGuided
255        );
256        assert_eq!(
257            resolve_dispatch_mode(Some(ConfigDispatchMode::NativePreferred)),
258            DispatchMode::NativePreferred
259        );
260        assert_eq!(resolve_dispatch_mode(None), DispatchMode::NativePreferred);
261    }
262
263    #[test]
264    fn normalize_model_name_converts_colon_to_slash() {
265        assert_eq!(normalize_model_name("openai:gpt-4o-mini"), "openai/gpt-4o-mini");
266        assert_eq!(
267            normalize_model_name("anthropic:claude-sonnet-4-20250514"),
268            "anthropic/claude-sonnet-4-20250514"
269        );
270        assert_eq!(normalize_model_name("groq:llama3-70b"), "groq/llama3-70b");
271    }
272
273    #[test]
274    fn normalize_model_name_preserves_slash_format() {
275        assert_eq!(normalize_model_name("openai/gpt-4o-mini"), "openai/gpt-4o-mini");
276    }
277
278    #[test]
279    fn normalize_model_name_preserves_bare_model() {
280        assert_eq!(normalize_model_name("gpt-4o-mini"), "gpt-4o-mini");
281    }
282}