use crate::llm::{ProviderConfig, ProviderType, RateLimitConfig, SamplingParams};
use crate::tools::sandbox::hyperlight::{PoolConfig, SandboxConfig};
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::time::Duration;
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
pub struct ActonAIConfig {
#[serde(default)]
pub providers: HashMap<String, NamedProviderConfig>,
pub default_provider: Option<String>,
#[serde(default)]
pub sandbox: Option<SandboxFileConfig>,
}
impl ActonAIConfig {
#[must_use]
pub fn new() -> Self {
Self::default()
}
#[must_use]
pub fn with_provider(mut self, name: impl Into<String>, config: NamedProviderConfig) -> Self {
self.providers.insert(name.into(), config);
self
}
#[must_use]
pub fn with_default_provider(mut self, name: impl Into<String>) -> Self {
self.default_provider = Some(name.into());
self
}
#[must_use]
pub fn effective_default(&self) -> Option<&str> {
if let Some(ref name) = self.default_provider {
return Some(name.as_str());
}
if self.providers.len() == 1 {
return self.providers.keys().next().map(String::as_str);
}
None
}
#[must_use]
pub fn is_empty(&self) -> bool {
self.providers.is_empty()
}
#[must_use]
pub fn provider_count(&self) -> usize {
self.providers.len()
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct NamedProviderConfig {
#[serde(rename = "type")]
pub provider_type: String,
pub model: String,
#[serde(default)]
pub api_key: Option<String>,
#[serde(default)]
pub api_key_env: Option<String>,
#[serde(default)]
pub base_url: Option<String>,
#[serde(default)]
pub timeout_secs: Option<u64>,
#[serde(default)]
pub max_tokens: Option<u32>,
#[serde(default)]
pub rate_limit: Option<RateLimitFileConfig>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub temperature: Option<f64>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub top_k: Option<u32>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub top_p: Option<f64>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub frequency_penalty: Option<f64>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub presence_penalty: Option<f64>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub seed: Option<u64>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub stop_sequences: Option<Vec<String>>,
}
impl NamedProviderConfig {
#[must_use]
pub fn anthropic(model: impl Into<String>) -> Self {
Self {
provider_type: "anthropic".to_string(),
model: model.into(),
api_key: None,
api_key_env: Some("ANTHROPIC_API_KEY".to_string()),
base_url: None,
timeout_secs: None,
max_tokens: None,
rate_limit: None,
temperature: None,
top_k: None,
top_p: None,
frequency_penalty: None,
presence_penalty: None,
seed: None,
stop_sequences: None,
}
}
#[must_use]
pub fn openai(model: impl Into<String>) -> Self {
Self {
provider_type: "openai".to_string(),
model: model.into(),
api_key: None,
api_key_env: Some("OPENAI_API_KEY".to_string()),
base_url: None,
timeout_secs: None,
max_tokens: None,
rate_limit: None,
temperature: None,
top_k: None,
top_p: None,
frequency_penalty: None,
presence_penalty: None,
seed: None,
stop_sequences: None,
}
}
#[must_use]
pub fn ollama(model: impl Into<String>) -> Self {
Self {
provider_type: "ollama".to_string(),
model: model.into(),
api_key: None,
api_key_env: None,
base_url: Some("http://localhost:11434/v1".to_string()),
timeout_secs: Some(300),
max_tokens: None,
rate_limit: Some(RateLimitFileConfig {
requests_per_minute: 1000,
tokens_per_minute: 1_000_000,
}),
temperature: None,
top_k: None,
top_p: None,
frequency_penalty: None,
presence_penalty: None,
seed: None,
stop_sequences: None,
}
}
#[must_use]
pub fn with_api_key_env(mut self, env_var: impl Into<String>) -> Self {
self.api_key_env = Some(env_var.into());
self
}
#[must_use]
pub fn with_api_key(mut self, key: impl Into<String>) -> Self {
self.api_key = Some(key.into());
self
}
#[must_use]
pub fn with_base_url(mut self, url: impl Into<String>) -> Self {
self.base_url = Some(url.into());
self
}
#[must_use]
pub fn with_timeout_secs(mut self, secs: u64) -> Self {
self.timeout_secs = Some(secs);
self
}
#[must_use]
pub fn with_max_tokens(mut self, tokens: u32) -> Self {
self.max_tokens = Some(tokens);
self
}
#[must_use]
pub fn with_rate_limit(mut self, rate_limit: RateLimitFileConfig) -> Self {
self.rate_limit = Some(rate_limit);
self
}
#[must_use]
pub fn with_temperature(mut self, temperature: f64) -> Self {
self.temperature = Some(temperature);
self
}
#[must_use]
pub fn with_top_k(mut self, top_k: u32) -> Self {
self.top_k = Some(top_k);
self
}
#[must_use]
pub fn with_top_p(mut self, top_p: f64) -> Self {
self.top_p = Some(top_p);
self
}
#[must_use]
pub fn with_stop_sequences(mut self, sequences: Vec<String>) -> Self {
self.stop_sequences = Some(sequences);
self
}
#[must_use]
pub fn resolve_api_key(&self) -> String {
if let Some(ref env_var) = self.api_key_env {
if let Ok(key) = std::env::var(env_var) {
if !key.is_empty() {
return key;
}
}
}
let standard_env = match self.provider_type.to_lowercase().as_str() {
"anthropic" => Some("ANTHROPIC_API_KEY"),
"openai" => Some("OPENAI_API_KEY"),
_ => None,
};
if let Some(env_var) = standard_env {
if let Ok(key) = std::env::var(env_var) {
if !key.is_empty() {
return key;
}
}
}
if let Some(ref key) = self.api_key {
return key.clone();
}
String::new()
}
#[must_use]
pub fn to_provider_config(&self) -> ProviderConfig {
let api_key = self.resolve_api_key();
let base_config = match self.provider_type.to_lowercase().as_str() {
"anthropic" => ProviderConfig::anthropic(&api_key).with_model(&self.model),
"openai" => ProviderConfig::openai(&api_key).with_model(&self.model),
"ollama" => ProviderConfig::ollama(&self.model),
_ => {
let base_url = self
.base_url
.clone()
.unwrap_or_else(|| "http://localhost:8080/v1".to_string());
ProviderConfig::openai_compatible(&base_url, &self.model).with_api_key(&api_key)
}
};
let mut config = base_config;
if let Some(ref url) = self.base_url {
config = config
.with_base_url(url)
.with_provider_type(ProviderType::openai_compatible(url));
}
if let Some(secs) = self.timeout_secs {
config = config.with_timeout(Duration::from_secs(secs));
}
if let Some(tokens) = self.max_tokens {
config = config.with_max_tokens(tokens);
}
if let Some(ref rate_limit) = self.rate_limit {
config = config.with_rate_limit(rate_limit.to_rate_limit_config());
}
let mut sampling = SamplingParams::default();
if let Some(temp) = self.temperature {
sampling.temperature = Some(temp);
}
if let Some(k) = self.top_k {
sampling.top_k = Some(k);
}
if let Some(p) = self.top_p {
sampling.top_p = Some(p);
}
if let Some(fp) = self.frequency_penalty {
sampling.frequency_penalty = Some(fp);
}
if let Some(pp) = self.presence_penalty {
sampling.presence_penalty = Some(pp);
}
if let Some(s) = self.seed {
sampling.seed = Some(s);
}
if let Some(ref seqs) = self.stop_sequences {
sampling.stop_sequences = Some(seqs.clone());
}
if !sampling.is_empty() {
config = config.with_sampling(sampling);
}
config
}
}
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct SandboxFileConfig {
#[serde(default)]
pub pool_warmup: Option<usize>,
#[serde(default)]
pub pool_max_per_type: Option<usize>,
#[serde(default)]
pub max_executions_before_recycle: Option<usize>,
#[serde(default)]
pub limits: Option<SandboxLimitsConfig>,
}
impl SandboxFileConfig {
#[must_use]
pub fn new() -> Self {
Self::default()
}
#[must_use]
pub fn with_pool_warmup(mut self, count: usize) -> Self {
self.pool_warmup = Some(count);
self
}
#[must_use]
pub fn with_pool_max_per_type(mut self, max: usize) -> Self {
self.pool_max_per_type = Some(max);
self
}
#[must_use]
pub fn with_max_executions_before_recycle(mut self, max: usize) -> Self {
self.max_executions_before_recycle = Some(max);
self
}
#[must_use]
pub fn with_limits(mut self, limits: SandboxLimitsConfig) -> Self {
self.limits = Some(limits);
self
}
#[must_use]
pub fn to_pool_config(&self) -> PoolConfig {
let mut config = PoolConfig::default();
if let Some(warmup) = self.pool_warmup {
config = config.with_warmup_count(warmup);
}
if let Some(max) = self.pool_max_per_type {
config = config.with_max_per_type(max);
}
if let Some(max_exec) = self.max_executions_before_recycle {
config = config.with_max_executions_before_recycle(max_exec);
}
config
}
#[must_use]
pub fn to_sandbox_config(&self) -> SandboxConfig {
let mut config = SandboxConfig::default();
if let Some(ref limits) = self.limits {
if let Some(timeout_ms) = limits.max_execution_ms {
config = config.with_timeout(Duration::from_millis(timeout_ms));
}
if let Some(memory_mb) = limits.max_memory_mb {
config = config.with_memory_limit(memory_mb * 1024 * 1024);
}
}
if let Some(warmup) = self.pool_warmup {
config = config.with_pool_size(Some(warmup));
}
config
}
}
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct SandboxLimitsConfig {
#[serde(default)]
pub max_execution_ms: Option<u64>,
#[serde(default)]
pub max_memory_mb: Option<usize>,
}
impl SandboxLimitsConfig {
#[must_use]
pub fn new() -> Self {
Self::default()
}
#[must_use]
pub fn with_max_execution_ms(mut self, ms: u64) -> Self {
self.max_execution_ms = Some(ms);
self
}
#[must_use]
pub fn with_max_memory_mb(mut self, mb: usize) -> Self {
self.max_memory_mb = Some(mb);
self
}
}
#[derive(Debug, Clone, Copy, Serialize, Deserialize)]
pub struct RateLimitFileConfig {
pub requests_per_minute: u32,
pub tokens_per_minute: u32,
}
impl RateLimitFileConfig {
#[must_use]
pub fn new(requests_per_minute: u32, tokens_per_minute: u32) -> Self {
Self {
requests_per_minute,
tokens_per_minute,
}
}
#[must_use]
pub fn to_rate_limit_config(&self) -> RateLimitConfig {
RateLimitConfig::new(self.requests_per_minute, self.tokens_per_minute)
}
}
impl Default for RateLimitFileConfig {
fn default() -> Self {
Self {
requests_per_minute: 50,
tokens_per_minute: 40_000,
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::tools::sandbox::hyperlight::{
DEFAULT_MAX_EXECUTIONS_BEFORE_RECYCLE, DEFAULT_MAX_PER_TYPE, DEFAULT_MEMORY_LIMIT,
DEFAULT_TIMEOUT, DEFAULT_WARMUP_COUNT,
};
#[test]
fn acton_ai_config_default_is_empty() {
let config = ActonAIConfig::default();
assert!(config.is_empty());
assert_eq!(config.provider_count(), 0);
assert!(config.default_provider.is_none());
}
#[test]
fn acton_ai_config_with_provider() {
let config =
ActonAIConfig::new().with_provider("ollama", NamedProviderConfig::ollama("qwen2.5:7b"));
assert_eq!(config.provider_count(), 1);
assert!(config.providers.contains_key("ollama"));
}
#[test]
fn acton_ai_config_effective_default_single_provider() {
let config =
ActonAIConfig::new().with_provider("ollama", NamedProviderConfig::ollama("qwen2.5:7b"));
assert_eq!(config.effective_default(), Some("ollama"));
}
#[test]
fn acton_ai_config_effective_default_explicit() {
let config = ActonAIConfig::new()
.with_provider("ollama", NamedProviderConfig::ollama("qwen2.5:7b"))
.with_provider(
"claude",
NamedProviderConfig::anthropic("claude-sonnet-4-20250514"),
)
.with_default_provider("claude");
assert_eq!(config.effective_default(), Some("claude"));
}
#[test]
fn acton_ai_config_effective_default_multiple_no_explicit() {
let config = ActonAIConfig::new()
.with_provider("ollama", NamedProviderConfig::ollama("qwen2.5:7b"))
.with_provider(
"claude",
NamedProviderConfig::anthropic("claude-sonnet-4-20250514"),
);
assert!(config.effective_default().is_none());
}
#[test]
fn named_provider_config_anthropic() {
let config = NamedProviderConfig::anthropic("claude-sonnet-4-20250514");
assert_eq!(config.provider_type, "anthropic");
assert_eq!(config.model, "claude-sonnet-4-20250514");
assert_eq!(config.api_key_env, Some("ANTHROPIC_API_KEY".to_string()));
}
#[test]
fn named_provider_config_openai() {
let config = NamedProviderConfig::openai("gpt-4o");
assert_eq!(config.provider_type, "openai");
assert_eq!(config.model, "gpt-4o");
assert_eq!(config.api_key_env, Some("OPENAI_API_KEY".to_string()));
}
#[test]
fn named_provider_config_ollama() {
let config = NamedProviderConfig::ollama("qwen2.5:7b");
assert_eq!(config.provider_type, "ollama");
assert_eq!(config.model, "qwen2.5:7b");
assert_eq!(
config.base_url,
Some("http://localhost:11434/v1".to_string())
);
assert_eq!(config.timeout_secs, Some(300));
}
#[test]
fn named_provider_config_to_provider_config_anthropic() {
let config = NamedProviderConfig::anthropic("claude-3-haiku-20240307")
.with_max_tokens(1024)
.with_timeout_secs(60);
let provider = config.to_provider_config();
assert_eq!(provider.model, "claude-3-haiku-20240307");
assert_eq!(provider.max_tokens, 1024);
assert_eq!(provider.timeout, Duration::from_secs(60));
}
#[test]
fn named_provider_config_to_provider_config_ollama() {
let config = NamedProviderConfig::ollama("llama3.2");
let provider = config.to_provider_config();
assert_eq!(provider.model, "llama3.2");
assert_eq!(provider.base_url, "http://localhost:11434/v1");
}
#[test]
fn named_provider_config_resolve_api_key_direct() {
let config = NamedProviderConfig::anthropic("test").with_api_key("direct-key");
let key = config.resolve_api_key();
assert!(!key.is_empty() || config.api_key.is_some());
}
#[test]
fn rate_limit_file_config_to_runtime() {
let file_config = RateLimitFileConfig::new(100, 50_000);
let runtime = file_config.to_rate_limit_config();
assert_eq!(runtime.requests_per_minute, 100);
assert_eq!(runtime.tokens_per_minute, 50_000);
}
#[test]
fn config_serialization_roundtrip() {
let config = ActonAIConfig::new()
.with_provider("ollama", NamedProviderConfig::ollama("qwen2.5:7b"))
.with_default_provider("ollama");
let toml_str = toml::to_string(&config).unwrap();
let deserialized: ActonAIConfig = toml::from_str(&toml_str).unwrap();
assert_eq!(deserialized.default_provider, Some("ollama".to_string()));
assert!(deserialized.providers.contains_key("ollama"));
}
#[test]
fn config_from_toml_string() {
let toml_str = r#"
default_provider = "ollama"
[providers.claude]
type = "anthropic"
model = "claude-sonnet-4-20250514"
api_key_env = "ANTHROPIC_API_KEY"
[providers.ollama]
type = "ollama"
model = "qwen2.5:7b"
base_url = "http://localhost:11434/v1"
timeout_secs = 300
[providers.ollama.rate_limit]
requests_per_minute = 1000
tokens_per_minute = 1000000
"#;
let config: ActonAIConfig = toml::from_str(toml_str).unwrap();
assert_eq!(config.provider_count(), 2);
assert!(config.providers.contains_key("claude"));
assert!(config.providers.contains_key("ollama"));
assert_eq!(config.default_provider, Some("ollama".to_string()));
let ollama = config.providers.get("ollama").unwrap();
assert_eq!(ollama.timeout_secs, Some(300));
assert!(ollama.rate_limit.is_some());
}
#[test]
fn sandbox_file_config_default_is_none() {
let config = SandboxFileConfig::default();
assert!(config.pool_warmup.is_none());
assert!(config.pool_max_per_type.is_none());
assert!(config.max_executions_before_recycle.is_none());
assert!(config.limits.is_none());
}
#[test]
fn sandbox_limits_config_default_is_none() {
let config = SandboxLimitsConfig::default();
assert!(config.max_execution_ms.is_none());
assert!(config.max_memory_mb.is_none());
}
#[test]
fn sandbox_file_config_builder() {
let config = SandboxFileConfig::new()
.with_pool_warmup(8)
.with_pool_max_per_type(64)
.with_max_executions_before_recycle(500);
assert_eq!(config.pool_warmup, Some(8));
assert_eq!(config.pool_max_per_type, Some(64));
assert_eq!(config.max_executions_before_recycle, Some(500));
}
#[test]
fn sandbox_limits_config_builder() {
let config = SandboxLimitsConfig::new()
.with_max_execution_ms(60000)
.with_max_memory_mb(128);
assert_eq!(config.max_execution_ms, Some(60000));
assert_eq!(config.max_memory_mb, Some(128));
}
#[test]
fn sandbox_file_config_to_pool_config_with_values() {
let config = SandboxFileConfig {
pool_warmup: Some(8),
pool_max_per_type: Some(64),
max_executions_before_recycle: Some(500),
limits: None,
};
let pool_config = config.to_pool_config();
assert_eq!(pool_config.warmup_count, 8);
assert_eq!(pool_config.max_per_type, 64);
assert_eq!(pool_config.max_executions_before_recycle, 500);
}
#[test]
fn sandbox_file_config_to_pool_config_with_defaults() {
let config = SandboxFileConfig::default();
let pool_config = config.to_pool_config();
assert_eq!(pool_config.warmup_count, DEFAULT_WARMUP_COUNT);
assert_eq!(pool_config.max_per_type, DEFAULT_MAX_PER_TYPE);
assert_eq!(
pool_config.max_executions_before_recycle,
DEFAULT_MAX_EXECUTIONS_BEFORE_RECYCLE
);
}
#[test]
fn sandbox_file_config_to_pool_config_partial() {
let config = SandboxFileConfig {
pool_warmup: Some(16),
pool_max_per_type: None,
max_executions_before_recycle: None,
limits: None,
};
let pool_config = config.to_pool_config();
assert_eq!(pool_config.warmup_count, 16);
assert_eq!(pool_config.max_per_type, DEFAULT_MAX_PER_TYPE);
assert_eq!(
pool_config.max_executions_before_recycle,
DEFAULT_MAX_EXECUTIONS_BEFORE_RECYCLE
);
}
#[test]
fn sandbox_file_config_to_sandbox_config_with_limits() {
let config = SandboxFileConfig {
pool_warmup: Some(4),
pool_max_per_type: None,
max_executions_before_recycle: None,
limits: Some(SandboxLimitsConfig {
max_execution_ms: Some(60000),
max_memory_mb: Some(128),
}),
};
let sandbox_config = config.to_sandbox_config();
assert_eq!(sandbox_config.timeout, Duration::from_millis(60000));
assert_eq!(sandbox_config.memory_limit, 128 * 1024 * 1024);
assert_eq!(sandbox_config.pool_size, Some(4));
}
#[test]
fn sandbox_file_config_to_sandbox_config_with_defaults() {
let config = SandboxFileConfig::default();
let sandbox_config = config.to_sandbox_config();
assert_eq!(sandbox_config.timeout, DEFAULT_TIMEOUT);
assert_eq!(sandbox_config.memory_limit, DEFAULT_MEMORY_LIMIT);
}
#[test]
fn config_from_toml_with_sandbox_section() {
let toml_str = r#"
default_provider = "ollama"
[providers.ollama]
type = "ollama"
model = "qwen2.5:7b"
[sandbox]
pool_warmup = 8
pool_max_per_type = 64
max_executions_before_recycle = 500
[sandbox.limits]
max_execution_ms = 60000
max_memory_mb = 128
"#;
let config: ActonAIConfig = toml::from_str(toml_str).unwrap();
assert!(config.sandbox.is_some());
let sandbox = config.sandbox.unwrap();
assert_eq!(sandbox.pool_warmup, Some(8));
assert_eq!(sandbox.pool_max_per_type, Some(64));
assert_eq!(sandbox.max_executions_before_recycle, Some(500));
let limits = sandbox.limits.unwrap();
assert_eq!(limits.max_execution_ms, Some(60000));
assert_eq!(limits.max_memory_mb, Some(128));
}
#[test]
fn config_from_toml_without_sandbox_section() {
let toml_str = r#"
default_provider = "ollama"
[providers.ollama]
type = "ollama"
model = "qwen2.5:7b"
"#;
let config: ActonAIConfig = toml::from_str(toml_str).unwrap();
assert!(config.sandbox.is_none());
}
#[test]
fn config_from_toml_with_partial_sandbox() {
let toml_str = r#"
[sandbox]
pool_warmup = 8
"#;
let config: ActonAIConfig = toml::from_str(toml_str).unwrap();
assert!(config.sandbox.is_some());
let sandbox = config.sandbox.unwrap();
assert_eq!(sandbox.pool_warmup, Some(8));
assert!(sandbox.pool_max_per_type.is_none());
assert!(sandbox.limits.is_none());
}
#[test]
fn named_provider_config_sampling_fields_default_to_none() {
let config = NamedProviderConfig::anthropic("claude-sonnet-4-20250514");
assert!(config.temperature.is_none());
assert!(config.top_k.is_none());
assert!(config.top_p.is_none());
assert!(config.frequency_penalty.is_none());
assert!(config.presence_penalty.is_none());
assert!(config.seed.is_none());
assert!(config.stop_sequences.is_none());
}
#[test]
fn named_provider_config_to_provider_config_with_sampling() {
let config = NamedProviderConfig::anthropic("claude-sonnet-4-20250514")
.with_temperature(0.7)
.with_top_p(0.9);
let provider = config.to_provider_config();
assert_eq!(provider.sampling.temperature, Some(0.7));
assert_eq!(provider.sampling.top_p, Some(0.9));
}
#[test]
fn config_from_toml_with_sampling_params() {
let toml_str = r#"
default_provider = "claude"
[providers.claude]
type = "anthropic"
model = "claude-sonnet-4-20250514"
api_key_env = "ANTHROPIC_API_KEY"
temperature = 0.7
top_p = 0.9
stop_sequences = ["END", "STOP"]
"#;
let config: ActonAIConfig = toml::from_str(toml_str).unwrap();
let claude = config.providers.get("claude").unwrap();
assert_eq!(claude.temperature, Some(0.7));
assert_eq!(claude.top_p, Some(0.9));
assert_eq!(
claude.stop_sequences,
Some(vec!["END".to_string(), "STOP".to_string()])
);
}
#[test]
fn config_from_toml_with_only_limits() {
let toml_str = r#"
[sandbox.limits]
max_execution_ms = 60000
"#;
let config: ActonAIConfig = toml::from_str(toml_str).unwrap();
assert!(config.sandbox.is_some());
let sandbox = config.sandbox.unwrap();
assert!(sandbox.pool_warmup.is_none());
let limits = sandbox.limits.unwrap();
assert_eq!(limits.max_execution_ms, Some(60000));
assert!(limits.max_memory_mb.is_none());
}
#[test]
fn config_serialization_roundtrip_with_sandbox() {
let config = ActonAIConfig {
providers: HashMap::new(),
default_provider: None,
sandbox: Some(SandboxFileConfig {
pool_warmup: Some(8),
pool_max_per_type: Some(64),
max_executions_before_recycle: Some(500),
limits: Some(SandboxLimitsConfig {
max_execution_ms: Some(60000),
max_memory_mb: Some(128),
}),
}),
};
let toml_str = toml::to_string(&config).unwrap();
let deserialized: ActonAIConfig = toml::from_str(&toml_str).unwrap();
assert!(deserialized.sandbox.is_some());
let sandbox = deserialized.sandbox.unwrap();
assert_eq!(sandbox.pool_warmup, Some(8));
assert_eq!(sandbox.pool_max_per_type, Some(64));
assert_eq!(sandbox.max_executions_before_recycle, Some(500));
let limits = sandbox.limits.unwrap();
assert_eq!(limits.max_execution_ms, Some(60000));
assert_eq!(limits.max_memory_mb, Some(128));
}
#[test]
fn acton_ai_config_default_has_no_sandbox() {
let config = ActonAIConfig::default();
assert!(config.sandbox.is_none());
}
#[test]
fn sandbox_file_config_is_clone() {
let config = SandboxFileConfig::new().with_pool_warmup(8);
let cloned = config.clone();
assert_eq!(config.pool_warmup, cloned.pool_warmup);
}
#[test]
fn sandbox_limits_config_is_clone() {
let config = SandboxLimitsConfig::new().with_max_execution_ms(60000);
let cloned = config.clone();
assert_eq!(config.max_execution_ms, cloned.max_execution_ms);
}
}