use crate::config::{self, ActonAIConfig, SandboxFileConfig};
use crate::conversation::ConversationBuilder;
use crate::error::{ActonAIError, ActonAIErrorKind};
use crate::kernel::{Kernel, KernelConfig};
use crate::llm::{LLMProvider, ProviderConfig};
use crate::messages::Message;
use crate::prompt::PromptBuilder;
use crate::tools::builtins::BuiltinTools;
use crate::tools::sandbox::hyperlight::PoolConfig;
use crate::tools::sandbox::{HyperlightSandboxFactory, SandboxConfig, SandboxFactory, SandboxPool};
use acton_reactive::prelude::*;
use std::collections::HashMap;
use std::path::Path;
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::Arc;
pub const DEFAULT_PROVIDER_NAME: &str = "default";
pub(crate) struct ActonAIInner {
pub(crate) runtime: ActorRuntime,
pub(crate) providers: HashMap<String, ActorHandle>,
pub(crate) default_provider: String,
pub(crate) builtins: Option<BuiltinTools>,
pub(crate) auto_builtins: bool,
pub(crate) is_shutdown: AtomicBool,
}
pub struct ActonAI {
pub(crate) inner: Arc<ActonAIInner>,
}
impl Clone for ActonAI {
fn clone(&self) -> Self {
Self {
inner: Arc::clone(&self.inner),
}
}
}
impl std::fmt::Debug for ActonAI {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("ActonAI")
.field(
"is_shutdown",
&self.inner.is_shutdown.load(Ordering::SeqCst),
)
.field("has_builtins", &self.inner.builtins.is_some())
.field("auto_builtins", &self.inner.auto_builtins)
.field("provider_count", &self.inner.providers.len())
.field("default_provider", &self.inner.default_provider)
.finish_non_exhaustive()
}
}
impl ActonAI {
#[must_use]
pub fn builder() -> ActonAIBuilder {
ActonAIBuilder::default()
}
#[must_use]
pub fn prompt(&self, content: impl Into<String>) -> PromptBuilder {
let mut builder = PromptBuilder::new(self.clone(), content.into());
if self.inner.auto_builtins && self.inner.builtins.is_some() {
builder = builder.use_builtins();
}
builder
}
#[must_use]
pub fn runtime(&self) -> &ActorRuntime {
&self.inner.runtime
}
pub fn runtime_mut(&mut self) -> &mut ActorRuntime {
&mut Arc::get_mut(&mut self.inner)
.expect("cannot get mutable runtime: ActonAI is shared")
.runtime
}
#[must_use]
pub fn provider_handle(&self) -> ActorHandle {
self.inner
.providers
.get(&self.inner.default_provider)
.cloned()
.expect("default provider must exist")
}
#[must_use]
pub fn provider_handle_named(&self, name: &str) -> Option<ActorHandle> {
self.inner.providers.get(name).cloned()
}
#[must_use]
pub fn default_provider_name(&self) -> &str {
&self.inner.default_provider
}
pub fn provider_names(&self) -> impl Iterator<Item = &str> {
self.inner.providers.keys().map(String::as_str)
}
#[must_use]
pub fn provider_count(&self) -> usize {
self.inner.providers.len()
}
#[must_use]
pub fn has_provider(&self, name: &str) -> bool {
self.inner.providers.contains_key(name)
}
#[must_use]
pub fn is_shutdown(&self) -> bool {
self.inner.is_shutdown.load(Ordering::SeqCst)
}
#[must_use]
pub fn builtins(&self) -> Option<&BuiltinTools> {
self.inner.builtins.as_ref()
}
#[must_use]
pub fn has_builtins(&self) -> bool {
self.inner.builtins.is_some()
}
#[must_use]
pub fn is_auto_builtins(&self) -> bool {
self.inner.auto_builtins
}
#[must_use]
pub fn continue_with(&self, messages: impl IntoIterator<Item = Message>) -> PromptBuilder {
let mut builder = PromptBuilder::new(self.clone(), String::new());
builder = builder.messages(messages);
if self.inner.auto_builtins && self.inner.builtins.is_some() {
builder = builder.use_builtins();
}
builder
}
#[must_use]
pub fn conversation(&self) -> ConversationBuilder {
ConversationBuilder::new(self.clone())
}
pub async fn shutdown(self) -> Result<(), ActonAIError> {
self.inner.is_shutdown.store(true, Ordering::SeqCst);
let mut runtime = self.inner.runtime.clone();
runtime
.shutdown_all()
.await
.map_err(|e| ActonAIError::launch_failed(e.to_string()))
}
}
#[derive(Default, Clone)]
enum BuiltinToolsConfig {
#[default]
None,
All,
Select(Vec<String>),
}
#[derive(Default, Clone)]
enum SandboxMode {
#[default]
None,
Hyperlight(SandboxConfig),
Pool {
pool_size: usize,
sandbox_config: SandboxConfig,
pool_config: PoolConfig,
},
PoolFromConfig {
sandbox_config: SandboxConfig,
pool_config: PoolConfig,
},
}
#[derive(Default)]
pub struct ActonAIBuilder {
app_name: Option<String>,
providers: HashMap<String, ProviderConfig>,
default_provider_name: Option<String>,
builtins: BuiltinToolsConfig,
auto_builtins: bool,
sandbox_mode: SandboxMode,
}
impl ActonAIBuilder {
#[must_use]
pub fn app_name(mut self, name: impl Into<String>) -> Self {
self.app_name = Some(name.into());
self
}
#[must_use]
pub fn provider_named(mut self, name: impl Into<String>, config: ProviderConfig) -> Self {
self.providers.insert(name.into(), config);
self
}
#[must_use]
pub fn default_provider(mut self, name: impl Into<String>) -> Self {
self.default_provider_name = Some(name.into());
self
}
pub fn from_config(self) -> Result<Self, ActonAIError> {
let config = config::load()?;
self.apply_config(config)
}
pub fn from_config_file(self, path: impl AsRef<Path>) -> Result<Self, ActonAIError> {
let config = config::from_path(path.as_ref())?;
self.apply_config(config)
}
pub fn try_from_config(self) -> Result<Self, ActonAIError> {
self.from_config()
}
fn apply_config(mut self, config: ActonAIConfig) -> Result<Self, ActonAIError> {
for (name, provider_config) in config.providers {
let runtime_config = provider_config.to_provider_config();
self.providers.insert(name, runtime_config);
}
if self.default_provider_name.is_none() {
self.default_provider_name = config.default_provider;
}
if let Some(sandbox_config) = config.sandbox {
self = self.apply_sandbox_file_config(&sandbox_config);
}
Ok(self)
}
fn apply_sandbox_file_config(mut self, config: &SandboxFileConfig) -> Self {
if matches!(self.sandbox_mode, SandboxMode::None) {
let sandbox_config = config.to_sandbox_config();
let pool_config = config.to_pool_config();
self.sandbox_mode = SandboxMode::PoolFromConfig {
sandbox_config,
pool_config,
};
}
self
}
#[must_use]
pub fn ollama(self, model: impl Into<String>) -> Self {
self.provider_named(DEFAULT_PROVIDER_NAME, ProviderConfig::ollama(model))
}
#[must_use]
pub fn ollama_at(self, base_url: impl Into<String>, model: impl Into<String>) -> Self {
self.provider_named(
DEFAULT_PROVIDER_NAME,
ProviderConfig::openai_compatible(base_url, model),
)
}
#[must_use]
pub fn anthropic(self, api_key: impl Into<String>) -> Self {
self.provider_named(DEFAULT_PROVIDER_NAME, ProviderConfig::anthropic(api_key))
}
#[must_use]
pub fn anthropic_model(self, api_key: impl Into<String>, model: impl Into<String>) -> Self {
self.provider_named(
DEFAULT_PROVIDER_NAME,
ProviderConfig::anthropic(api_key).with_model(model),
)
}
#[must_use]
pub fn openai(self, api_key: impl Into<String>) -> Self {
self.provider_named(DEFAULT_PROVIDER_NAME, ProviderConfig::openai(api_key))
}
#[must_use]
pub fn openai_model(self, api_key: impl Into<String>, model: impl Into<String>) -> Self {
self.provider_named(
DEFAULT_PROVIDER_NAME,
ProviderConfig::openai(api_key).with_model(model),
)
}
#[must_use]
pub fn provider(self, config: ProviderConfig) -> Self {
self.provider_named(DEFAULT_PROVIDER_NAME, config)
}
#[must_use]
pub fn with_builtins(mut self) -> Self {
self.builtins = BuiltinToolsConfig::All;
self.auto_builtins = true;
self
}
#[must_use]
pub fn manual_builtins(mut self) -> Self {
self.auto_builtins = false;
self
}
#[must_use]
pub fn with_builtin_tools(mut self, tools: &[&str]) -> Self {
self.builtins =
BuiltinToolsConfig::Select(tools.iter().map(|s| (*s).to_string()).collect());
self.auto_builtins = true;
self
}
#[must_use]
pub fn with_hyperlight_sandbox(mut self) -> Self {
self.sandbox_mode = SandboxMode::Hyperlight(SandboxConfig::default());
self
}
#[must_use]
pub fn with_hyperlight_sandbox_config(mut self, config: SandboxConfig) -> Self {
self.sandbox_mode = SandboxMode::Hyperlight(config);
self
}
#[must_use]
pub fn with_sandbox_pool(mut self, pool_size: usize) -> Self {
self.sandbox_mode = SandboxMode::Pool {
pool_size,
sandbox_config: SandboxConfig::default(),
pool_config: PoolConfig::default(),
};
self
}
#[must_use]
pub fn with_sandbox_pool_config(
mut self,
pool_size: usize,
sandbox_config: SandboxConfig,
) -> Self {
self.sandbox_mode = SandboxMode::Pool {
pool_size,
sandbox_config,
pool_config: PoolConfig::default(),
};
self
}
pub async fn launch(self) -> Result<ActonAI, ActonAIError> {
if self.providers.is_empty() {
return Err(ActonAIError::new(ActonAIErrorKind::Configuration {
field: "provider".to_string(),
reason: "no LLM provider configured; use ollama(), anthropic(), openai(), provider(), provider_named(), or from_config()".to_string(),
}));
}
let default_provider_name = self.resolve_default_provider_name()?;
let app_name = self.app_name.unwrap_or_else(|| "acton-ai".to_string());
let mut runtime = ActonApp::launch_async().await;
let kernel_config = KernelConfig::default().with_app_name(&app_name);
let _kernel = Kernel::spawn_with_config(&mut runtime, kernel_config).await;
let mut providers = HashMap::new();
for (name, config) in self.providers {
let handle = LLMProvider::spawn(&mut runtime, config).await;
providers.insert(name, handle);
}
{
use crate::tools::sandbox::hyperlight::WarmPool;
match self.sandbox_mode {
SandboxMode::None => {}
SandboxMode::Hyperlight(config) => {
let factory = HyperlightSandboxFactory::with_config_fallback(config);
if !factory.is_available() {
tracing::warn!(
"Hyperlight sandbox configured but hypervisor not available; \
sandboxed tools will fail"
);
}
let _factory = Arc::new(factory);
tracing::info!("Hyperlight sandbox factory initialized");
}
SandboxMode::Pool {
pool_size,
sandbox_config,
pool_config,
} => {
let pool_handle =
SandboxPool::spawn(&mut runtime, sandbox_config, pool_config).await;
pool_handle
.send(WarmPool {
count: pool_size,
guest_type: None,
})
.await;
tracing::info!(pool_size, "Hyperlight sandbox pool initialized and warmed");
}
SandboxMode::PoolFromConfig {
sandbox_config,
pool_config,
} => {
let warmup_count = pool_config.warmup_count;
let pool_handle =
SandboxPool::spawn(&mut runtime, sandbox_config, pool_config).await;
pool_handle
.send(WarmPool {
count: warmup_count,
guest_type: None,
})
.await;
tracing::info!(
warmup_count,
"Hyperlight sandbox pool initialized from config and warmed"
);
}
}
}
let builtins = match self.builtins {
BuiltinToolsConfig::None => None,
BuiltinToolsConfig::All => Some(BuiltinTools::all()),
BuiltinToolsConfig::Select(ref tools) => {
let tool_refs: Vec<&str> = tools.iter().map(String::as_str).collect();
Some(BuiltinTools::select(&tool_refs).map_err(|e| {
ActonAIError::new(ActonAIErrorKind::Configuration {
field: "builtins".to_string(),
reason: e.to_string(),
})
})?)
}
};
Ok(ActonAI {
inner: Arc::new(ActonAIInner {
runtime,
providers,
default_provider: default_provider_name,
builtins,
auto_builtins: self.auto_builtins,
is_shutdown: AtomicBool::new(false),
}),
})
}
fn resolve_default_provider_name(&self) -> Result<String, ActonAIError> {
if let Some(ref name) = self.default_provider_name {
if self.providers.contains_key(name) {
return Ok(name.clone());
}
return Err(ActonAIError::new(ActonAIErrorKind::Configuration {
field: "default_provider".to_string(),
reason: format!(
"default provider '{}' not found; available providers: {}",
name,
self.providers
.keys()
.cloned()
.collect::<Vec<_>>()
.join(", ")
),
}));
}
if self.providers.len() == 1 {
return Ok(self.providers.keys().next().unwrap().clone());
}
if self.providers.contains_key(DEFAULT_PROVIDER_NAME) {
return Ok(DEFAULT_PROVIDER_NAME.to_string());
}
Err(ActonAIError::new(ActonAIErrorKind::Configuration {
field: "default_provider".to_string(),
reason: format!(
"multiple providers configured but no default specified; use default_provider() to set one; available: {}",
self.providers.keys().cloned().collect::<Vec<_>>().join(", ")
),
}))
}
#[must_use]
pub fn is_auto_builtins(&self) -> bool {
self.auto_builtins
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn builder_default_has_no_provider() {
let builder = ActonAIBuilder::default();
assert!(builder.providers.is_empty());
assert!(builder.app_name.is_none());
}
#[test]
fn builder_app_name_sets_name() {
let builder = ActonAI::builder().app_name("test-app");
assert_eq!(builder.app_name, Some("test-app".to_string()));
}
#[test]
fn builder_ollama_sets_provider() {
let builder = ActonAI::builder().ollama("llama3.2");
assert!(!builder.providers.is_empty());
let config = builder.providers.get(DEFAULT_PROVIDER_NAME).unwrap();
assert_eq!(config.model, "llama3.2");
assert!(config.api_key.is_empty());
}
#[test]
fn builder_ollama_at_sets_custom_url() {
let builder = ActonAI::builder().ollama_at("http://custom:11434/v1", "llama3.2");
assert!(!builder.providers.is_empty());
let config = builder.providers.get(DEFAULT_PROVIDER_NAME).unwrap();
assert_eq!(config.model, "llama3.2");
assert_eq!(config.base_url, "http://custom:11434/v1");
}
#[test]
fn builder_anthropic_sets_provider() {
let builder = ActonAI::builder().anthropic("sk-ant-test");
assert!(!builder.providers.is_empty());
let config = builder.providers.get(DEFAULT_PROVIDER_NAME).unwrap();
assert_eq!(config.api_key, "sk-ant-test");
assert!(config.model.contains("claude"));
}
#[test]
fn builder_anthropic_model_sets_custom_model() {
let builder = ActonAI::builder().anthropic_model("sk-ant-test", "claude-3-haiku");
assert!(!builder.providers.is_empty());
let config = builder.providers.get(DEFAULT_PROVIDER_NAME).unwrap();
assert_eq!(config.api_key, "sk-ant-test");
assert_eq!(config.model, "claude-3-haiku");
}
#[test]
fn builder_openai_sets_provider() {
let builder = ActonAI::builder().openai("sk-test");
assert!(!builder.providers.is_empty());
let config = builder.providers.get(DEFAULT_PROVIDER_NAME).unwrap();
assert_eq!(config.api_key, "sk-test");
assert!(config.model.contains("gpt"));
}
#[test]
fn builder_openai_model_sets_custom_model() {
let builder = ActonAI::builder().openai_model("sk-test", "gpt-4-turbo");
assert!(!builder.providers.is_empty());
let config = builder.providers.get(DEFAULT_PROVIDER_NAME).unwrap();
assert_eq!(config.api_key, "sk-test");
assert_eq!(config.model, "gpt-4-turbo");
}
#[test]
fn builder_provider_sets_custom_config() {
let custom_config =
ProviderConfig::openai_compatible("http://custom:8080/v1", "custom-model");
let builder = ActonAI::builder().provider(custom_config);
assert!(!builder.providers.is_empty());
let config = builder.providers.get(DEFAULT_PROVIDER_NAME).unwrap();
assert_eq!(config.model, "custom-model");
assert_eq!(config.base_url, "http://custom:8080/v1");
}
#[tokio::test]
async fn launch_fails_without_provider() {
let result = ActonAI::builder().app_name("test").launch().await;
assert!(result.is_err());
let err = result.unwrap_err();
assert!(err.is_configuration());
assert!(err.to_string().contains("provider"));
}
#[test]
fn with_builtins_enables_auto_builtins() {
let builder = ActonAI::builder().with_builtins();
assert!(builder.is_auto_builtins());
}
#[test]
fn with_builtin_tools_enables_auto_builtins() {
let builder = ActonAI::builder().with_builtin_tools(&["bash", "read_file"]);
assert!(builder.is_auto_builtins());
}
#[test]
fn manual_builtins_disables_auto_builtins() {
let builder = ActonAI::builder().with_builtins().manual_builtins();
assert!(!builder.is_auto_builtins());
}
#[test]
fn default_builder_has_no_auto_builtins() {
let builder = ActonAI::builder();
assert!(!builder.is_auto_builtins());
}
#[test]
fn builder_provider_named_adds_named_provider() {
let builder = ActonAI::builder()
.provider_named("claude", ProviderConfig::anthropic("sk-test"))
.provider_named("local", ProviderConfig::ollama("qwen2.5:7b"));
assert_eq!(builder.providers.len(), 2);
assert!(builder.providers.contains_key("claude"));
assert!(builder.providers.contains_key("local"));
}
#[test]
fn builder_default_provider_sets_name() {
let builder = ActonAI::builder()
.provider_named("claude", ProviderConfig::anthropic("sk-test"))
.default_provider("claude");
assert_eq!(builder.default_provider_name, Some("claude".to_string()));
}
#[test]
fn resolve_default_single_provider() {
let builder = ActonAI::builder().provider_named("only-one", ProviderConfig::ollama("test"));
let name = builder.resolve_default_provider_name().unwrap();
assert_eq!(name, "only-one");
}
#[test]
fn resolve_default_explicit() {
let builder = ActonAI::builder()
.provider_named("a", ProviderConfig::ollama("test-a"))
.provider_named("b", ProviderConfig::ollama("test-b"))
.default_provider("b");
let name = builder.resolve_default_provider_name().unwrap();
assert_eq!(name, "b");
}
#[test]
fn resolve_default_uses_default_name() {
let builder = ActonAI::builder()
.ollama("test") .provider_named("other", ProviderConfig::anthropic("sk-test"));
let name = builder.resolve_default_provider_name().unwrap();
assert_eq!(name, DEFAULT_PROVIDER_NAME);
}
#[test]
fn resolve_default_fails_multiple_no_explicit() {
let builder = ActonAI::builder()
.provider_named("a", ProviderConfig::ollama("test-a"))
.provider_named("b", ProviderConfig::ollama("test-b"));
let result = builder.resolve_default_provider_name();
assert!(result.is_err());
}
#[test]
fn resolve_default_fails_invalid_name() {
let builder = ActonAI::builder()
.provider_named("actual", ProviderConfig::ollama("test"))
.default_provider("nonexistent");
let result = builder.resolve_default_provider_name();
assert!(result.is_err());
assert!(result.unwrap_err().to_string().contains("nonexistent"));
}
}