use std::collections::{HashMap, HashSet};
use std::sync::Arc;
#[cfg(feature = "postgres")]
use deadpool_postgres::{Config as PoolConfig, Runtime};
use secrecy::{ExposeSecret, SecretString};
#[cfg(feature = "postgres")]
use tokio_postgres::NoTls;
use crate::channels::wasm::{
ChannelCapabilitiesFile, available_channel_names, install_bundled_channel,
};
use crate::llm::{SessionConfig, SessionManager};
use crate::secrets::{SecretsCrypto, SecretsStore};
use crate::settings::{KeySource, Settings};
use crate::setup::channels::{
SecretsContext, setup_http, setup_telegram, setup_tunnel, setup_wasm_channel,
};
use crate::setup::prompts::{
confirm, input, optional_input, print_error, print_header, print_info, print_step,
print_success, secret_input, select_many, select_one,
};
#[derive(Debug, thiserror::Error)]
pub enum SetupError {
#[error("I/O error: {0}")]
Io(#[from] std::io::Error),
#[error("Authentication error: {0}")]
Auth(String),
#[error("Database error: {0}")]
Database(String),
#[error("Configuration error: {0}")]
Config(String),
#[error("Channel setup error: {0}")]
Channel(String),
#[error("User cancelled")]
Cancelled,
}
impl From<crate::setup::channels::ChannelSetupError> for SetupError {
fn from(e: crate::setup::channels::ChannelSetupError) -> Self {
SetupError::Channel(e.to_string())
}
}
#[derive(Debug, Clone, Default)]
pub struct SetupConfig {
pub skip_auth: bool,
pub channels_only: bool,
}
pub struct SetupWizard {
config: SetupConfig,
settings: Settings,
session_manager: Option<Arc<SessionManager>>,
#[cfg(feature = "postgres")]
db_pool: Option<deadpool_postgres::Pool>,
#[cfg(feature = "libsql")]
db_backend: Option<crate::db::libsql_backend::LibSqlBackend>,
secrets_crypto: Option<Arc<SecretsCrypto>>,
llm_api_key: Option<SecretString>,
}
impl SetupWizard {
pub fn new() -> Self {
Self {
config: SetupConfig::default(),
settings: Settings::default(),
session_manager: None,
#[cfg(feature = "postgres")]
db_pool: None,
#[cfg(feature = "libsql")]
db_backend: None,
secrets_crypto: None,
llm_api_key: None,
}
}
pub fn with_config(config: SetupConfig) -> Self {
Self {
config,
settings: Settings::default(),
session_manager: None,
#[cfg(feature = "postgres")]
db_pool: None,
#[cfg(feature = "libsql")]
db_backend: None,
secrets_crypto: None,
llm_api_key: None,
}
}
pub fn with_session(mut self, session: Arc<SessionManager>) -> Self {
self.session_manager = Some(session);
self
}
pub async fn run(&mut self) -> Result<(), SetupError> {
print_header("IronClaw Setup Wizard");
if self.config.channels_only {
print_step(1, 1, "Channel Configuration");
self.step_channels().await?;
} else {
let total_steps = 7;
print_step(1, total_steps, "Database Connection");
self.step_database().await?;
print_step(2, total_steps, "Security");
self.step_security().await?;
if !self.config.skip_auth {
print_step(3, total_steps, "Inference Provider");
self.step_inference_provider().await?;
} else {
print_info("Skipping inference provider setup (using existing config)");
}
print_step(4, total_steps, "Model Selection");
self.step_model_selection().await?;
print_step(5, total_steps, "Embeddings (Semantic Search)");
self.step_embeddings()?;
print_step(6, total_steps, "Channel Configuration");
self.step_channels().await?;
print_step(7, total_steps, "Background Tasks");
self.step_heartbeat()?;
}
self.save_and_summarize().await?;
Ok(())
}
async fn step_database(&mut self) -> Result<(), SetupError> {
#[cfg(all(feature = "postgres", feature = "libsql"))]
{
let env_backend = std::env::var("DATABASE_BACKEND").ok();
if let Some(ref backend) = env_backend {
if backend == "libsql" || backend == "turso" || backend == "sqlite" {
return self.step_database_libsql().await;
}
if backend != "postgres" && backend != "postgresql" {
print_info(&format!(
"Unknown DATABASE_BACKEND '{}', defaulting to PostgreSQL",
backend
));
}
return self.step_database_postgres().await;
}
let pre_selected = self.settings.database_backend.as_deref().map(|b| match b {
"libsql" | "turso" | "sqlite" => 1,
_ => 0,
});
print_info("Which database backend would you like to use?");
println!();
let options = &[
"PostgreSQL - production-grade, requires a running server",
"libSQL - embedded SQLite, zero dependencies, optional Turso cloud sync",
];
let choice =
select_one("Select a database backend:", options).map_err(SetupError::Io)?;
if let Some(prev) = pre_selected
&& prev != choice
{
self.settings.database_url = None;
self.settings.libsql_path = None;
self.settings.libsql_url = None;
}
match choice {
1 => return self.step_database_libsql().await,
_ => return self.step_database_postgres().await,
}
}
#[cfg(all(feature = "postgres", not(feature = "libsql")))]
{
return self.step_database_postgres().await;
}
#[cfg(all(feature = "libsql", not(feature = "postgres")))]
{
return self.step_database_libsql().await;
}
}
#[cfg(feature = "postgres")]
async fn step_database_postgres(&mut self) -> Result<(), SetupError> {
self.settings.database_backend = Some("postgres".to_string());
let existing_url = std::env::var("DATABASE_URL")
.ok()
.or_else(|| self.settings.database_url.clone());
if let Some(ref url) = existing_url {
let display_url = mask_password_in_url(url);
print_info(&format!("Existing database URL: {}", display_url));
if confirm("Use this database?", true).map_err(SetupError::Io)? {
if let Err(e) = self.test_database_connection_postgres(url).await {
print_error(&format!("Connection failed: {}", e));
print_info("Let's configure a new database URL.");
} else {
print_success("Database connection successful");
self.settings.database_url = Some(url.clone());
return Ok(());
}
}
}
println!();
print_info("Enter your PostgreSQL connection URL.");
print_info("Format: postgres://user:password@host:port/database");
println!();
loop {
let url = input("Database URL").map_err(SetupError::Io)?;
if url.is_empty() {
print_error("Database URL is required.");
continue;
}
print_info("Testing connection...");
match self.test_database_connection_postgres(&url).await {
Ok(()) => {
print_success("Database connection successful");
if confirm("Run database migrations?", true).map_err(SetupError::Io)? {
self.run_migrations_postgres().await?;
}
self.settings.database_url = Some(url);
return Ok(());
}
Err(e) => {
print_error(&format!("Connection failed: {}", e));
if !confirm("Try again?", true).map_err(SetupError::Io)? {
return Err(SetupError::Database(
"Database connection failed".to_string(),
));
}
}
}
}
}
#[cfg(feature = "libsql")]
async fn step_database_libsql(&mut self) -> Result<(), SetupError> {
self.settings.database_backend = Some("libsql".to_string());
let default_path = crate::config::default_libsql_path();
let default_path_str = default_path.to_string_lossy().to_string();
let existing_path = std::env::var("LIBSQL_PATH")
.ok()
.or_else(|| self.settings.libsql_path.clone());
if let Some(ref path) = existing_path {
print_info(&format!("Existing database path: {}", path));
if confirm("Use this database?", true).map_err(SetupError::Io)? {
let turso_url = std::env::var("LIBSQL_URL")
.ok()
.or_else(|| self.settings.libsql_url.clone());
let turso_token = std::env::var("LIBSQL_AUTH_TOKEN").ok();
match self
.test_database_connection_libsql(
path,
turso_url.as_deref(),
turso_token.as_deref(),
)
.await
{
Ok(()) => {
print_success("Database connection successful");
self.settings.libsql_path = Some(path.clone());
if let Some(url) = turso_url {
self.settings.libsql_url = Some(url);
}
return Ok(());
}
Err(e) => {
print_error(&format!("Connection failed: {}", e));
print_info("Let's configure a new database path.");
}
}
}
}
println!();
print_info("IronClaw uses an embedded SQLite database (libSQL).");
print_info("No external database server required.");
println!();
let path_input = optional_input(
"Database file path",
Some(&format!("default: {}", default_path_str)),
)
.map_err(SetupError::Io)?;
let db_path = path_input.unwrap_or(default_path_str.clone());
println!();
let use_turso =
confirm("Enable Turso cloud sync (remote replica)?", false).map_err(SetupError::Io)?;
let (turso_url, turso_token) = if use_turso {
print_info("Enter your Turso database URL and auth token.");
print_info("Format: libsql://your-db.turso.io");
println!();
let url = input("Turso URL").map_err(SetupError::Io)?;
if url.is_empty() {
print_error("Turso URL is required for cloud sync.");
(None, None)
} else {
let token_secret = secret_input("Auth token").map_err(SetupError::Io)?;
let token = token_secret.expose_secret().to_string();
if token.is_empty() {
print_error("Auth token is required for cloud sync.");
(None, None)
} else {
(Some(url), Some(token))
}
}
} else {
(None, None)
};
print_info("Testing connection...");
match self
.test_database_connection_libsql(&db_path, turso_url.as_deref(), turso_token.as_deref())
.await
{
Ok(()) => {
print_success("Database connection successful");
self.run_migrations_libsql().await?;
self.settings.libsql_path = Some(db_path);
if let Some(url) = turso_url {
self.settings.libsql_url = Some(url);
}
Ok(())
}
Err(e) => Err(SetupError::Database(format!("Connection failed: {}", e))),
}
}
#[cfg(feature = "postgres")]
async fn test_database_connection_postgres(&mut self, url: &str) -> Result<(), SetupError> {
let mut cfg = PoolConfig::new();
cfg.url = Some(url.to_string());
cfg.pool = Some(deadpool_postgres::PoolConfig {
max_size: 5,
..Default::default()
});
let pool = cfg
.create_pool(Some(Runtime::Tokio1), NoTls)
.map_err(|e| SetupError::Database(format!("Failed to create pool: {}", e)))?;
let _ = pool
.get()
.await
.map_err(|e| SetupError::Database(format!("Failed to connect: {}", e)))?;
self.db_pool = Some(pool);
Ok(())
}
#[cfg(feature = "libsql")]
async fn test_database_connection_libsql(
&mut self,
path: &str,
turso_url: Option<&str>,
turso_token: Option<&str>,
) -> Result<(), SetupError> {
use crate::db::libsql_backend::LibSqlBackend;
use std::path::Path;
let db_path = Path::new(path);
let backend = if let (Some(url), Some(token)) = (turso_url, turso_token) {
LibSqlBackend::new_remote_replica(db_path, url, token)
.await
.map_err(|e| SetupError::Database(format!("Failed to connect: {}", e)))?
} else {
LibSqlBackend::new_local(db_path)
.await
.map_err(|e| SetupError::Database(format!("Failed to open database: {}", e)))?
};
self.db_backend = Some(backend);
Ok(())
}
#[cfg(feature = "postgres")]
async fn run_migrations_postgres(&self) -> Result<(), SetupError> {
if let Some(ref pool) = self.db_pool {
use refinery::embed_migrations;
embed_migrations!("migrations");
print_info("Running migrations...");
let mut client = pool
.get()
.await
.map_err(|e| SetupError::Database(format!("Pool error: {}", e)))?;
migrations::runner()
.run_async(&mut **client)
.await
.map_err(|e| SetupError::Database(format!("Migration failed: {}", e)))?;
print_success("Migrations applied");
}
Ok(())
}
#[cfg(feature = "libsql")]
async fn run_migrations_libsql(&self) -> Result<(), SetupError> {
if let Some(ref backend) = self.db_backend {
use crate::db::Database;
print_info("Running migrations...");
backend
.run_migrations()
.await
.map_err(|e| SetupError::Database(format!("Migration failed: {}", e)))?;
print_success("Migrations applied");
}
Ok(())
}
async fn step_security(&mut self) -> Result<(), SetupError> {
let env_key_exists = std::env::var("SECRETS_MASTER_KEY").is_ok();
if env_key_exists {
print_info("Secrets master key found in SECRETS_MASTER_KEY environment variable.");
self.settings.secrets_master_key_source = KeySource::Env;
print_success("Security configured (env var)");
return Ok(());
}
print_info("Checking OS keychain for existing master key...");
if let Ok(keychain_key_bytes) = crate::secrets::keychain::get_master_key().await {
let key_hex: String = keychain_key_bytes
.iter()
.map(|b| format!("{:02x}", b))
.collect();
self.secrets_crypto = Some(Arc::new(
SecretsCrypto::new(SecretString::from(key_hex))
.map_err(|e| SetupError::Config(e.to_string()))?,
));
print_info("Existing master key found in OS keychain.");
if confirm("Use existing keychain key?", true).map_err(SetupError::Io)? {
self.settings.secrets_master_key_source = KeySource::Keychain;
print_success("Security configured (keychain)");
return Ok(());
}
self.secrets_crypto = None;
}
println!();
print_info("The secrets master key encrypts sensitive data like API tokens.");
print_info("Choose where to store it:");
println!();
let options = [
"OS Keychain (recommended for local installs)",
"Environment variable (for CI/Docker)",
"Skip (disable secrets features)",
];
let choice = select_one("Select storage method:", &options).map_err(SetupError::Io)?;
match choice {
0 => {
print_info("Generating master key...");
let key = crate::secrets::keychain::generate_master_key();
crate::secrets::keychain::store_master_key(&key)
.await
.map_err(|e| {
SetupError::Config(format!("Failed to store in keychain: {}", e))
})?;
let key_hex: String = key.iter().map(|b| format!("{:02x}", b)).collect();
self.secrets_crypto = Some(Arc::new(
SecretsCrypto::new(SecretString::from(key_hex))
.map_err(|e| SetupError::Config(e.to_string()))?,
));
self.settings.secrets_master_key_source = KeySource::Keychain;
print_success("Master key generated and stored in OS keychain");
}
1 => {
print_info("Generate a key and add it to your environment:");
let key_hex = crate::secrets::keychain::generate_master_key_hex();
println!();
println!(" export SECRETS_MASTER_KEY={}", key_hex);
println!();
print_info("Add this to your shell profile or .env file.");
self.settings.secrets_master_key_source = KeySource::Env;
print_success("Configured for environment variable");
}
_ => {
self.settings.secrets_master_key_source = KeySource::None;
print_info("Secrets features disabled. Channel tokens must be set via env vars.");
}
}
Ok(())
}
async fn step_inference_provider(&mut self) -> Result<(), SetupError> {
if let Some(ref current) = self.settings.llm_backend {
let display = match current.as_str() {
"nearai" => "NEAR AI",
"anthropic" => "Anthropic (Claude)",
"openai" => "OpenAI",
"ollama" => "Ollama (local)",
"openai_compatible" => "OpenAI-compatible endpoint",
other => other,
};
print_info(&format!("Current provider: {}", display));
println!();
let is_known = matches!(
current.as_str(),
"nearai" | "anthropic" | "openai" | "ollama" | "openai_compatible"
);
if is_known && confirm("Keep current provider?", true).map_err(SetupError::Io)? {
match current.as_str() {
"nearai" => return self.setup_nearai().await,
"anthropic" => return self.setup_anthropic().await,
"openai" => return self.setup_openai().await,
"ollama" => return self.setup_ollama(),
"openai_compatible" => return self.setup_openai_compatible().await,
_ => {
return Err(SetupError::Config(format!(
"Unhandled provider: {}",
current
)));
}
}
}
if !is_known {
print_info(&format!(
"Unknown provider '{}', please select a supported provider.",
current
));
}
}
print_info("Select your inference provider:");
println!();
let options = &[
"NEAR AI - multi-model access via NEAR account",
"Anthropic - Claude models (direct API key)",
"OpenAI - GPT models (direct API key)",
"Ollama - local models, no API key needed",
"OpenAI-compatible - custom endpoint (vLLM, LiteLLM, Together, etc.)",
];
let choice = select_one("Provider:", options).map_err(SetupError::Io)?;
match choice {
0 => self.setup_nearai().await?,
1 => self.setup_anthropic().await?,
2 => self.setup_openai().await?,
3 => self.setup_ollama()?,
4 => self.setup_openai_compatible().await?,
_ => return Err(SetupError::Config("Invalid provider selection".to_string())),
}
Ok(())
}
async fn setup_nearai(&mut self) -> Result<(), SetupError> {
self.settings.llm_backend = Some("nearai".to_string());
if let Some(ref session) = self.session_manager
&& session.has_token().await
{
print_info("Existing session found. Validating...");
match session.ensure_authenticated().await {
Ok(()) => {
print_success("NEAR AI session valid");
return Ok(());
}
Err(e) => {
print_info(&format!("Session invalid: {}. Re-authenticating...", e));
}
}
}
let session = if let Some(ref s) = self.session_manager {
Arc::clone(s)
} else {
let config = SessionConfig::default();
Arc::new(SessionManager::new(config))
};
session
.ensure_authenticated()
.await
.map_err(|e| SetupError::Auth(e.to_string()))?;
self.session_manager = Some(session);
print_success("NEAR AI configured");
Ok(())
}
async fn setup_anthropic(&mut self) -> Result<(), SetupError> {
self.setup_api_key_provider(
"anthropic",
"ANTHROPIC_API_KEY",
"llm_anthropic_api_key",
"Anthropic API key",
"https://console.anthropic.com/settings/keys",
)
.await
}
async fn setup_openai(&mut self) -> Result<(), SetupError> {
self.setup_api_key_provider(
"openai",
"OPENAI_API_KEY",
"llm_openai_api_key",
"OpenAI API key",
"https://platform.openai.com/api-keys",
)
.await
}
async fn setup_api_key_provider(
&mut self,
backend: &str,
env_var: &str,
secret_name: &str,
prompt_label: &str,
hint_url: &str,
) -> Result<(), SetupError> {
let display_name = match backend {
"anthropic" => "Anthropic",
"openai" => "OpenAI",
other => other,
};
self.settings.llm_backend = Some(backend.to_string());
if self.settings.selected_model.is_some() {
self.settings.selected_model = None;
}
if let Ok(existing) = std::env::var(env_var) {
print_info(&format!("{env_var} found: {}", mask_api_key(&existing)));
if confirm("Use this key?", true).map_err(SetupError::Io)? {
if let Ok(ctx) = self.init_secrets_context().await {
let key = SecretString::from(existing.clone());
if let Err(e) = ctx.save_secret(secret_name, &key).await {
tracing::warn!("Failed to persist env key to secrets: {}", e);
}
}
self.llm_api_key = Some(SecretString::from(existing));
print_success(&format!("{display_name} configured (from env)"));
return Ok(());
}
}
println!();
print_info(&format!("Get your API key from: {hint_url}"));
println!();
let key = secret_input(prompt_label).map_err(SetupError::Io)?;
let key_str = key.expose_secret();
if key_str.is_empty() {
return Err(SetupError::Config("API key cannot be empty".to_string()));
}
if let Ok(ctx) = self.init_secrets_context().await {
ctx.save_secret(secret_name, &key)
.await
.map_err(|e| SetupError::Config(format!("Failed to save API key: {e}")))?;
print_success("API key encrypted and saved");
} else {
print_info(&format!(
"Secrets not available. Set {env_var} in your environment."
));
}
self.llm_api_key = Some(SecretString::from(key_str.to_string()));
print_success(&format!("{display_name} configured"));
Ok(())
}
fn setup_ollama(&mut self) -> Result<(), SetupError> {
self.settings.llm_backend = Some("ollama".to_string());
if self.settings.selected_model.is_some() {
self.settings.selected_model = None;
}
let default_url = self
.settings
.ollama_base_url
.as_deref()
.unwrap_or("http://localhost:11434");
let url_input = optional_input(
"Ollama base URL",
Some(&format!("default: {}", default_url)),
)
.map_err(SetupError::Io)?;
let url = url_input.unwrap_or_else(|| default_url.to_string());
self.settings.ollama_base_url = Some(url.clone());
print_success(&format!("Ollama configured ({})", url));
Ok(())
}
async fn setup_openai_compatible(&mut self) -> Result<(), SetupError> {
self.settings.llm_backend = Some("openai_compatible".to_string());
if self.settings.selected_model.is_some() {
self.settings.selected_model = None;
}
let existing_url = self
.settings
.openai_compatible_base_url
.clone()
.or_else(|| std::env::var("LLM_BASE_URL").ok());
let url = if let Some(ref u) = existing_url {
let url_input = optional_input("Base URL", Some(&format!("current: {}", u)))
.map_err(SetupError::Io)?;
url_input.unwrap_or_else(|| u.clone())
} else {
input("Base URL (e.g., http://localhost:8000/v1)").map_err(SetupError::Io)?
};
if url.is_empty() {
return Err(SetupError::Config(
"Base URL is required for OpenAI-compatible provider".to_string(),
));
}
self.settings.openai_compatible_base_url = Some(url.clone());
if confirm("Does this endpoint require an API key?", false).map_err(SetupError::Io)? {
let key = secret_input("API key").map_err(SetupError::Io)?;
let key_str = key.expose_secret();
if !key_str.is_empty() {
if let Ok(ctx) = self.init_secrets_context().await {
ctx.save_secret("llm_compatible_api_key", &key)
.await
.map_err(|e| {
SetupError::Config(format!("Failed to save API key: {}", e))
})?;
print_success("API key encrypted and saved");
} else {
print_info("Secrets not available. Set LLM_API_KEY in your environment.");
}
}
}
print_success(&format!("OpenAI-compatible configured ({})", url));
Ok(())
}
async fn step_model_selection(&mut self) -> Result<(), SetupError> {
if let Some(ref current) = self.settings.selected_model {
print_info(&format!("Current model: {}", current));
println!();
let options = ["Keep current model", "Change model"];
let choice =
select_one("What would you like to do?", &options).map_err(SetupError::Io)?;
if choice == 0 {
print_success(&format!("Keeping {}", current));
return Ok(());
}
}
let backend = self.settings.llm_backend.as_deref().unwrap_or("nearai");
match backend {
"anthropic" => {
let cached = self
.llm_api_key
.as_ref()
.map(|k| k.expose_secret().to_string());
let models = fetch_anthropic_models(cached.as_deref()).await;
self.select_from_model_list(&models)?;
}
"openai" => {
let cached = self
.llm_api_key
.as_ref()
.map(|k| k.expose_secret().to_string());
let models = fetch_openai_models(cached.as_deref()).await;
self.select_from_model_list(&models)?;
}
"ollama" => {
let base_url = self
.settings
.ollama_base_url
.as_deref()
.unwrap_or("http://localhost:11434");
let models = fetch_ollama_models(base_url).await;
if models.is_empty() {
print_info("No models found. Pull one first: ollama pull llama3");
}
self.select_from_model_list(&models)?;
}
"openai_compatible" => {
let model_id = input("Model name (e.g., meta-llama/Llama-3-8b-chat-hf)")
.map_err(SetupError::Io)?;
if model_id.is_empty() {
return Err(SetupError::Config("Model name is required".to_string()));
}
self.settings.selected_model = Some(model_id.clone());
print_success(&format!("Selected {}", model_id));
}
_ => {
let fetched = self.fetch_nearai_models().await;
let default_models: Vec<(String, String)> = vec![
(
"fireworks::accounts/fireworks/models/llama4-maverick-instruct-basic"
.into(),
"Llama 4 Maverick (default, fast)".into(),
),
(
"anthropic::claude-sonnet-4-20250514".into(),
"Claude Sonnet 4 (best quality)".into(),
),
("openai::gpt-4o".into(), "GPT-4o".into()),
];
let models = if fetched.is_empty() {
default_models
} else {
fetched.iter().map(|m| (m.clone(), m.clone())).collect()
};
self.select_from_model_list(&models)?;
}
}
Ok(())
}
fn select_from_model_list(&mut self, models: &[(String, String)]) -> Result<(), SetupError> {
println!("Available models:");
println!();
let mut options: Vec<&str> = models.iter().map(|(_, desc)| desc.as_str()).collect();
options.push("Custom model ID");
let choice = select_one("Select a model:", &options).map_err(SetupError::Io)?;
let selected = if choice == options.len() - 1 {
loop {
let raw = input("Enter model ID").map_err(SetupError::Io)?;
let trimmed = raw.trim().to_string();
if trimmed.is_empty() {
println!("Model ID cannot be empty.");
continue;
}
break trimmed;
}
} else {
models[choice].0.clone()
};
self.settings.selected_model = Some(selected.clone());
print_success(&format!("Selected {}", selected));
Ok(())
}
async fn fetch_nearai_models(&self) -> Vec<String> {
let session = match self.session_manager {
Some(ref s) => Arc::clone(s),
None => return vec![],
};
use crate::config::LlmConfig;
use crate::llm::create_llm_provider;
let base_url = std::env::var("NEARAI_BASE_URL")
.unwrap_or_else(|_| "https://cloud-api.near.ai".to_string());
let auth_base_url = std::env::var("NEARAI_AUTH_URL")
.unwrap_or_else(|_| "https://private.near.ai".to_string());
let config = LlmConfig {
backend: crate::config::LlmBackend::NearAi,
nearai: crate::config::NearAiConfig {
model: "dummy".to_string(),
cheap_model: None,
base_url,
auth_base_url,
session_path: crate::llm::session::default_session_path(),
api_mode: crate::config::NearAiApiMode::Responses,
api_key: None,
fallback_model: None,
max_retries: 3,
},
openai: None,
anthropic: None,
ollama: None,
openai_compatible: None,
};
match create_llm_provider(&config, session) {
Ok(provider) => match provider.list_models().await {
Ok(models) => models,
Err(e) => {
print_info(&format!("Could not fetch models: {}. Using defaults.", e));
vec![]
}
},
Err(e) => {
print_info(&format!(
"Could not initialize provider: {}. Using defaults.",
e
));
vec![]
}
}
}
fn step_embeddings(&mut self) -> Result<(), SetupError> {
print_info("Embeddings enable semantic search in your workspace memory.");
println!();
if !confirm("Enable semantic search?", true).map_err(SetupError::Io)? {
self.settings.embeddings.enabled = false;
print_info("Embeddings disabled. Workspace will use keyword search only.");
return Ok(());
}
let backend = self.settings.llm_backend.as_deref().unwrap_or("nearai");
let has_openai_key = std::env::var("OPENAI_API_KEY").is_ok()
|| (backend == "openai" && self.llm_api_key.is_some());
let has_nearai = backend == "nearai" || self.session_manager.is_some();
if backend == "openai" && has_openai_key {
self.settings.embeddings.enabled = true;
self.settings.embeddings.provider = "openai".to_string();
self.settings.embeddings.model = "text-embedding-3-small".to_string();
print_success("Embeddings enabled via OpenAI (using existing API key)");
return Ok(());
}
if !has_nearai && !has_openai_key {
print_info("No NEAR AI session or OpenAI key found for embeddings.");
print_info("Set OPENAI_API_KEY in your environment to enable embeddings.");
self.settings.embeddings.enabled = false;
return Ok(());
}
let mut options = Vec::new();
if has_nearai {
options.push("NEAR AI (uses same auth, no extra cost)");
}
options.push("OpenAI (requires API key)");
let choice = select_one("Select embeddings provider:", &options).map_err(SetupError::Io)?;
let provider = if has_nearai && choice == 0 {
"nearai"
} else {
"openai"
};
match provider {
"nearai" => {
self.settings.embeddings.enabled = true;
self.settings.embeddings.provider = "nearai".to_string();
self.settings.embeddings.model = "text-embedding-3-small".to_string();
print_success("Embeddings enabled via NEAR AI");
}
_ => {
if !has_openai_key {
print_info("OPENAI_API_KEY not set in environment.");
print_info("Add it to your .env file or environment to enable embeddings.");
}
self.settings.embeddings.enabled = true;
self.settings.embeddings.provider = "openai".to_string();
self.settings.embeddings.model = "text-embedding-3-small".to_string();
print_success("Embeddings configured for OpenAI");
}
}
Ok(())
}
async fn init_secrets_context(&mut self) -> Result<SecretsContext, SetupError> {
let crypto = if let Some(ref c) = self.secrets_crypto {
Arc::clone(c)
} else {
let key = if let Ok(env_key) = std::env::var("SECRETS_MASTER_KEY") {
env_key
} else if let Ok(keychain_key) = crate::secrets::keychain::get_master_key().await {
keychain_key.iter().map(|b| format!("{:02x}", b)).collect()
} else {
return Err(SetupError::Config(
"Secrets not configured. Run full setup or set SECRETS_MASTER_KEY.".to_string(),
));
};
let crypto = Arc::new(
SecretsCrypto::new(SecretString::from(key))
.map_err(|e| SetupError::Config(e.to_string()))?,
);
self.secrets_crypto = Some(Arc::clone(&crypto));
crypto
};
let selected_backend = self
.settings
.database_backend
.as_deref()
.unwrap_or("postgres");
#[cfg(all(feature = "libsql", feature = "postgres"))]
{
if selected_backend == "libsql" {
if let Some(store) = self.create_libsql_secrets_store(&crypto)? {
return Ok(SecretsContext::from_store(store, "default"));
}
if let Some(store) = self.create_postgres_secrets_store(&crypto).await? {
return Ok(SecretsContext::from_store(store, "default"));
}
} else {
if let Some(store) = self.create_postgres_secrets_store(&crypto).await? {
return Ok(SecretsContext::from_store(store, "default"));
}
if let Some(store) = self.create_libsql_secrets_store(&crypto)? {
return Ok(SecretsContext::from_store(store, "default"));
}
}
}
#[cfg(all(feature = "postgres", not(feature = "libsql")))]
{
let _ = selected_backend;
if let Some(store) = self.create_postgres_secrets_store(&crypto).await? {
return Ok(SecretsContext::from_store(store, "default"));
}
}
#[cfg(all(feature = "libsql", not(feature = "postgres")))]
{
let _ = selected_backend;
if let Some(store) = self.create_libsql_secrets_store(&crypto)? {
return Ok(SecretsContext::from_store(store, "default"));
}
}
Err(SetupError::Config(
"No database backend available for secrets storage".to_string(),
))
}
#[cfg(feature = "postgres")]
async fn create_postgres_secrets_store(
&mut self,
crypto: &Arc<SecretsCrypto>,
) -> Result<Option<Arc<dyn SecretsStore>>, SetupError> {
let pool = if let Some(ref p) = self.db_pool {
p.clone()
} else {
let url = self
.settings
.database_url
.clone()
.or_else(|| std::env::var("DATABASE_URL").ok());
if let Some(url) = url {
self.test_database_connection_postgres(&url).await?;
self.run_migrations_postgres().await?;
match self.db_pool.clone() {
Some(pool) => pool,
None => {
return Err(SetupError::Database(
"Database pool not initialized after connection test".to_string(),
));
}
}
} else {
return Ok(None);
}
};
let store: Arc<dyn SecretsStore> = Arc::new(crate::secrets::PostgresSecretsStore::new(
pool,
Arc::clone(crypto),
));
Ok(Some(store))
}
#[cfg(feature = "libsql")]
fn create_libsql_secrets_store(
&self,
crypto: &Arc<SecretsCrypto>,
) -> Result<Option<Arc<dyn SecretsStore>>, SetupError> {
if let Some(ref backend) = self.db_backend {
let store: Arc<dyn SecretsStore> = Arc::new(crate::secrets::LibSqlSecretsStore::new(
backend.shared_db(),
Arc::clone(crypto),
));
Ok(Some(store))
} else {
Ok(None)
}
}
async fn step_channels(&mut self) -> Result<(), SetupError> {
match setup_tunnel(&self.settings) {
Ok(Some(url)) => {
self.settings.tunnel.public_url = Some(url);
}
Ok(None) => {
self.settings.tunnel.public_url = None;
}
Err(e) => {
print_info(&format!("Tunnel setup skipped: {}", e));
}
}
println!();
let channels_dir = dirs::home_dir()
.ok_or_else(|| SetupError::Config("Could not determine home directory".into()))?
.join(".ironclaw/channels");
let mut discovered_channels = discover_wasm_channels(&channels_dir).await;
let installed_names: HashSet<String> = discovered_channels
.iter()
.map(|(name, _)| name.clone())
.collect();
let wasm_channel_names = wasm_channel_option_names(&discovered_channels);
let mut options: Vec<(String, bool)> = vec![
("CLI/TUI (always enabled)".to_string(), true),
(
"HTTP webhook".to_string(),
self.settings.channels.http_enabled,
),
];
for name in &wasm_channel_names {
let is_enabled = self.settings.channels.wasm_channels.contains(name);
let display_name = format!("{} (WASM)", capitalize_first(name));
options.push((display_name, is_enabled));
}
let options_refs: Vec<(&str, bool)> =
options.iter().map(|(s, b)| (s.as_str(), *b)).collect();
let selected = select_many("Which channels do you want to enable?", &options_refs)
.map_err(SetupError::Io)?;
let selected_wasm_channels: Vec<String> = wasm_channel_names
.iter()
.enumerate()
.filter_map(|(idx, name)| {
if selected.contains(&(idx + 2)) {
Some(name.clone())
} else {
None
}
})
.collect();
if let Some(installed) = install_selected_bundled_channels(
&channels_dir,
&selected_wasm_channels,
&installed_names,
)
.await?
&& !installed.is_empty()
{
print_success(&format!("Installed channels: {}", installed.join(", ")));
discovered_channels = discover_wasm_channels(&channels_dir).await;
}
let needs_secrets = selected.contains(&1) || !selected_wasm_channels.is_empty();
let secrets = if needs_secrets {
match self.init_secrets_context().await {
Ok(ctx) => Some(ctx),
Err(e) => {
print_info(&format!("Secrets not available: {}", e));
print_info("Channel tokens must be set via environment variables.");
None
}
}
} else {
None
};
if selected.contains(&1) {
println!();
if let Some(ref ctx) = secrets {
let result = setup_http(ctx).await?;
self.settings.channels.http_enabled = result.enabled;
self.settings.channels.http_port = Some(result.port);
} else {
self.settings.channels.http_enabled = true;
self.settings.channels.http_port = Some(8080);
print_info("HTTP webhook enabled on port 8080 (set HTTP_WEBHOOK_SECRET in env)");
}
} else {
self.settings.channels.http_enabled = false;
}
let discovered_by_name: HashMap<String, ChannelCapabilitiesFile> =
discovered_channels.into_iter().collect();
let mut enabled_wasm_channels = Vec::new();
for channel_name in selected_wasm_channels {
println!();
if let Some(ref ctx) = secrets {
let result = if let Some(cap_file) = discovered_by_name.get(&channel_name) {
if !cap_file.setup.required_secrets.is_empty() {
setup_wasm_channel(ctx, &channel_name, &cap_file.setup).await?
} else if channel_name == "telegram" {
let telegram_result = setup_telegram(ctx, &self.settings).await?;
if let Some(owner_id) = telegram_result.owner_id {
self.settings.channels.telegram_owner_id = Some(owner_id);
}
crate::setup::channels::WasmChannelSetupResult {
enabled: telegram_result.enabled,
channel_name: "telegram".to_string(),
}
} else {
print_info(&format!(
"No setup configuration found for {}",
channel_name
));
crate::setup::channels::WasmChannelSetupResult {
enabled: true,
channel_name: channel_name.clone(),
}
}
} else {
print_info(&format!(
"Channel '{}' is selected but not available on disk.",
channel_name
));
continue;
};
if result.enabled {
enabled_wasm_channels.push(result.channel_name);
}
} else {
print_info(&format!(
"{} enabled (configure tokens via environment)",
capitalize_first(&channel_name)
));
enabled_wasm_channels.push(channel_name.clone());
}
}
self.settings.channels.wasm_channels = enabled_wasm_channels;
Ok(())
}
fn step_heartbeat(&mut self) -> Result<(), SetupError> {
print_info("Heartbeat runs periodic background tasks (e.g., checking your calendar,");
print_info("monitoring for notifications, running scheduled workflows).");
println!();
if !confirm("Enable heartbeat?", false).map_err(SetupError::Io)? {
self.settings.heartbeat.enabled = false;
print_info("Heartbeat disabled.");
return Ok(());
}
self.settings.heartbeat.enabled = true;
let interval_str = optional_input("Check interval in minutes", Some("default: 30"))
.map_err(SetupError::Io)?;
if let Some(s) = interval_str {
if let Ok(mins) = s.parse::<u64>() {
self.settings.heartbeat.interval_secs = mins * 60;
}
} else {
self.settings.heartbeat.interval_secs = 1800; }
let notify_channel = optional_input("Notify channel on findings", Some("e.g., telegram"))
.map_err(SetupError::Io)?;
self.settings.heartbeat.notify_channel = notify_channel;
print_success(&format!(
"Heartbeat enabled (every {} minutes)",
self.settings.heartbeat.interval_secs / 60
));
Ok(())
}
async fn save_and_summarize(&mut self) -> Result<(), SetupError> {
self.settings.onboard_completed = true;
{
let db_map = self.settings.to_db_map();
let saved = false;
#[cfg(feature = "postgres")]
let saved = if !saved {
if let Some(ref pool) = self.db_pool {
let store = crate::history::Store::from_pool(pool.clone());
store
.set_all_settings("default", &db_map)
.await
.map_err(|e| {
SetupError::Database(format!(
"Failed to save settings to database: {}",
e
))
})?;
true
} else {
false
}
} else {
saved
};
#[cfg(feature = "libsql")]
let saved = if !saved {
if let Some(ref backend) = self.db_backend {
use crate::db::Database as _;
backend
.set_all_settings("default", &db_map)
.await
.map_err(|e| {
SetupError::Database(format!(
"Failed to save settings to database: {}",
e
))
})?;
true
} else {
false
}
} else {
saved
};
if !saved {
return Err(SetupError::Database(
"No database connection, cannot save settings".to_string(),
));
}
}
{
let mut env_vars: Vec<(&str, String)> = Vec::new();
if let Some(ref backend) = self.settings.database_backend {
env_vars.push(("DATABASE_BACKEND", backend.clone()));
}
if let Some(ref url) = self.settings.database_url {
env_vars.push(("DATABASE_URL", url.clone()));
}
if let Some(ref path) = self.settings.libsql_path {
env_vars.push(("LIBSQL_PATH", path.clone()));
}
if let Some(ref url) = self.settings.libsql_url {
env_vars.push(("LIBSQL_URL", url.clone()));
}
if !env_vars.is_empty() {
let pairs: Vec<(&str, &str)> =
env_vars.iter().map(|(k, v)| (*k, v.as_str())).collect();
crate::bootstrap::save_bootstrap_env(&pairs).map_err(|e| {
SetupError::Io(std::io::Error::other(format!(
"Failed to save bootstrap env to .env: {}",
e
)))
})?;
}
}
println!();
print_success("Configuration saved to database");
println!();
println!("Configuration Summary:");
println!("━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━");
let backend = self
.settings
.database_backend
.as_deref()
.unwrap_or("postgres");
match backend {
"libsql" => {
if let Some(ref path) = self.settings.libsql_path {
println!(" Database: libSQL ({})", path);
} else {
println!(" Database: libSQL (default path)");
}
if self.settings.libsql_url.is_some() {
println!(" Turso sync: enabled");
}
}
_ => {
if self.settings.database_url.is_some() {
println!(" Database: PostgreSQL (configured)");
}
}
}
match self.settings.secrets_master_key_source {
KeySource::Keychain => println!(" Security: OS keychain"),
KeySource::Env => println!(" Security: environment variable"),
KeySource::None => println!(" Security: disabled"),
}
if let Some(ref provider) = self.settings.llm_backend {
let display = match provider.as_str() {
"nearai" => "NEAR AI",
"anthropic" => "Anthropic",
"openai" => "OpenAI",
"ollama" => "Ollama",
"openai_compatible" => "OpenAI-compatible",
other => other,
};
println!(" Provider: {}", display);
}
if let Some(ref model) = self.settings.selected_model {
let display = if model.chars().count() > 40 {
let truncated: String = model.chars().take(37).collect();
format!("{}...", truncated)
} else {
model.clone()
};
println!(" Model: {}", display);
}
if self.settings.embeddings.enabled {
println!(
" Embeddings: {} ({})",
self.settings.embeddings.provider, self.settings.embeddings.model
);
} else {
println!(" Embeddings: disabled");
}
if let Some(ref tunnel_url) = self.settings.tunnel.public_url {
println!(" Tunnel: {}", tunnel_url);
}
println!(" Channels:");
println!(" - CLI/TUI: enabled");
if self.settings.channels.http_enabled {
let port = self.settings.channels.http_port.unwrap_or(8080);
println!(" - HTTP: enabled (port {})", port);
}
for channel_name in &self.settings.channels.wasm_channels {
let mode = if self.settings.tunnel.public_url.is_some() {
"webhook"
} else {
"polling"
};
println!(
" - {}: enabled ({})",
capitalize_first(channel_name),
mode
);
}
if self.settings.heartbeat.enabled {
println!(
" Heartbeat: every {} minutes",
self.settings.heartbeat.interval_secs / 60
);
}
println!();
println!("To start the agent, run:");
println!(" ironclaw");
println!();
println!("To change settings later:");
println!(" ironclaw config set <setting> <value>");
println!(" ironclaw onboard");
println!();
Ok(())
}
}
impl Default for SetupWizard {
fn default() -> Self {
Self::new()
}
}
#[cfg(feature = "postgres")]
fn mask_password_in_url(url: &str) -> String {
let Some(scheme_end) = url.find("://") else {
return url.to_string();
};
let credentials_start = scheme_end + 3;
let Some(at_pos) = url[credentials_start..].find('@') else {
return url.to_string();
};
let at_abs = credentials_start + at_pos;
let credentials = &url[credentials_start..at_abs];
let Some(colon_pos) = credentials.find(':') else {
return url.to_string();
};
let scheme = &url[..credentials_start]; let username = &credentials[..colon_pos]; let after_at = &url[at_abs..];
format!("{}{}:****{}", scheme, username, after_at)
}
async fn fetch_anthropic_models(cached_key: Option<&str>) -> Vec<(String, String)> {
let static_defaults = vec![
("claude-sonnet-4-20250514".into(), "Claude Sonnet 4".into()),
("claude-opus-4-20250514".into(), "Claude Opus 4".into()),
(
"claude-3-5-haiku-20241022".into(),
"Claude 3.5 Haiku (fast)".into(),
),
];
let api_key = cached_key
.map(String::from)
.or_else(|| std::env::var("ANTHROPIC_API_KEY").ok())
.filter(|k| !k.is_empty());
let api_key = match api_key {
Some(k) => k,
None => return static_defaults,
};
let client = reqwest::Client::new();
let resp = match client
.get("https://api.anthropic.com/v1/models")
.header("x-api-key", &api_key)
.header("anthropic-version", "2023-06-01")
.timeout(std::time::Duration::from_secs(5))
.send()
.await
{
Ok(r) if r.status().is_success() => r,
_ => return static_defaults,
};
#[derive(serde::Deserialize)]
struct ModelEntry {
id: String,
}
#[derive(serde::Deserialize)]
struct ModelsResponse {
data: Vec<ModelEntry>,
}
match resp.json::<ModelsResponse>().await {
Ok(body) => {
let mut models: Vec<(String, String)> = body
.data
.into_iter()
.filter(|m| !m.id.contains("embedding") && !m.id.contains("audio"))
.map(|m| {
let label = m.id.clone();
(m.id, label)
})
.collect();
if models.is_empty() {
return static_defaults;
}
models.sort_by(|a, b| a.0.cmp(&b.0));
models
}
Err(_) => static_defaults,
}
}
async fn fetch_openai_models(cached_key: Option<&str>) -> Vec<(String, String)> {
let static_defaults = vec![
("gpt-4o".into(), "GPT-4o".into()),
("gpt-4o-mini".into(), "GPT-4o Mini (fast)".into()),
("o3".into(), "o3 (reasoning)".into()),
];
let api_key = cached_key
.map(String::from)
.or_else(|| std::env::var("OPENAI_API_KEY").ok())
.filter(|k| !k.is_empty());
let api_key = match api_key {
Some(k) => k,
None => return static_defaults,
};
let client = reqwest::Client::new();
let resp = match client
.get("https://api.openai.com/v1/models")
.bearer_auth(&api_key)
.timeout(std::time::Duration::from_secs(5))
.send()
.await
{
Ok(r) if r.status().is_success() => r,
_ => return static_defaults,
};
#[derive(serde::Deserialize)]
struct ModelEntry {
id: String,
}
#[derive(serde::Deserialize)]
struct ModelsResponse {
data: Vec<ModelEntry>,
}
let chat_prefixes = ["gpt-4", "gpt-3.5", "o1", "o3", "o4", "chatgpt"];
match resp.json::<ModelsResponse>().await {
Ok(body) => {
let mut models: Vec<(String, String)> = body
.data
.into_iter()
.filter(|m| {
chat_prefixes.iter().any(|p| m.id.starts_with(p))
&& !m.id.contains("realtime")
&& !m.id.contains("audio")
})
.map(|m| {
let label = m.id.clone();
(m.id, label)
})
.collect();
if models.is_empty() {
return static_defaults;
}
models.sort_by(|a, b| a.0.cmp(&b.0));
models
}
Err(_) => static_defaults,
}
}
async fn fetch_ollama_models(base_url: &str) -> Vec<(String, String)> {
let static_defaults = vec![
("llama3".into(), "llama3".into()),
("mistral".into(), "mistral".into()),
("codellama".into(), "codellama".into()),
];
let url = format!("{}/api/tags", base_url.trim_end_matches('/'));
let client = reqwest::Client::new();
let resp = match client
.get(&url)
.timeout(std::time::Duration::from_secs(5))
.send()
.await
{
Ok(r) if r.status().is_success() => r,
Ok(_) => return static_defaults,
Err(_) => {
print_info("Could not connect to Ollama. Is it running?");
return static_defaults;
}
};
#[derive(serde::Deserialize)]
struct ModelEntry {
name: String,
}
#[derive(serde::Deserialize)]
struct TagsResponse {
models: Vec<ModelEntry>,
}
match resp.json::<TagsResponse>().await {
Ok(body) => {
let models: Vec<(String, String)> = body
.models
.into_iter()
.map(|m| {
let label = m.name.clone();
(m.name, label)
})
.collect();
if models.is_empty() {
return static_defaults;
}
models
}
Err(_) => static_defaults,
}
}
async fn discover_wasm_channels(dir: &std::path::Path) -> Vec<(String, ChannelCapabilitiesFile)> {
let mut channels = Vec::new();
if !dir.is_dir() {
return channels;
}
let mut entries = match tokio::fs::read_dir(dir).await {
Ok(e) => e,
Err(_) => return channels,
};
while let Ok(Some(entry)) = entries.next_entry().await {
let path = entry.path();
let filename = path.file_name().and_then(|n| n.to_str()).unwrap_or("");
if !filename.ends_with(".capabilities.json") {
continue;
}
let name = filename.trim_end_matches(".capabilities.json").to_string();
if name.is_empty() {
continue;
}
let wasm_path = dir.join(format!("{}.wasm", name));
if !wasm_path.exists() {
continue;
}
match tokio::fs::read(&path).await {
Ok(bytes) => match ChannelCapabilitiesFile::from_bytes(&bytes) {
Ok(cap_file) => {
channels.push((name, cap_file));
}
Err(e) => {
tracing::warn!(
path = %path.display(),
error = %e,
"Failed to parse channel capabilities file"
);
}
},
Err(e) => {
tracing::warn!(
path = %path.display(),
error = %e,
"Failed to read channel capabilities file"
);
}
}
}
channels.sort_by(|a, b| a.0.cmp(&b.0));
channels
}
fn mask_api_key(key: &str) -> String {
let chars: Vec<char> = key.chars().collect();
if chars.len() < 12 {
let prefix: String = chars.iter().take(4).collect();
return format!("{prefix}...");
}
let prefix: String = chars[..6].iter().collect();
let suffix: String = chars[chars.len() - 4..].iter().collect();
format!("{prefix}...{suffix}")
}
fn capitalize_first(s: &str) -> String {
let mut chars = s.chars();
match chars.next() {
None => String::new(),
Some(first) => first.to_uppercase().chain(chars).collect(),
}
}
#[cfg(test)]
async fn install_missing_bundled_channels(
channels_dir: &std::path::Path,
already_installed: &HashSet<String>,
) -> Result<Vec<String>, SetupError> {
let mut installed = Vec::new();
for name in available_channel_names().iter().copied() {
if already_installed.contains(name) {
continue;
}
install_bundled_channel(name, channels_dir, false)
.await
.map_err(SetupError::Channel)?;
installed.push(name.to_string());
}
Ok(installed)
}
fn wasm_channel_option_names(discovered: &[(String, ChannelCapabilitiesFile)]) -> Vec<String> {
let mut names: Vec<String> = discovered.iter().map(|(name, _)| name.clone()).collect();
for bundled in available_channel_names().iter().copied() {
if !names.iter().any(|name| name == bundled) {
names.push(bundled.to_string());
}
}
names
}
async fn install_selected_bundled_channels(
channels_dir: &std::path::Path,
selected_channels: &[String],
already_installed: &HashSet<String>,
) -> Result<Option<Vec<String>>, SetupError> {
let bundled: HashSet<&str> = available_channel_names().iter().copied().collect();
let selected_missing: HashSet<String> = selected_channels
.iter()
.filter(|name| bundled.contains(name.as_str()) && !already_installed.contains(*name))
.cloned()
.collect();
if selected_missing.is_empty() {
return Ok(None);
}
let mut installed = Vec::new();
for name in selected_missing {
install_bundled_channel(&name, channels_dir, false)
.await
.map_err(SetupError::Channel)?;
installed.push(name);
}
installed.sort();
Ok(Some(installed))
}
#[cfg(test)]
mod tests {
use std::collections::HashSet;
use tempfile::tempdir;
use super::*;
#[test]
fn test_wizard_creation() {
let wizard = SetupWizard::new();
assert!(!wizard.config.skip_auth);
assert!(!wizard.config.channels_only);
}
#[test]
fn test_wizard_with_config() {
let config = SetupConfig {
skip_auth: true,
channels_only: false,
};
let wizard = SetupWizard::with_config(config);
assert!(wizard.config.skip_auth);
}
#[test]
#[cfg(feature = "postgres")]
fn test_mask_password_in_url() {
assert_eq!(
mask_password_in_url("postgres://user:secret@localhost/db"),
"postgres://user:****@localhost/db"
);
assert_eq!(
mask_password_in_url("postgres://localhost/db"),
"postgres://localhost/db"
);
}
#[test]
fn test_capitalize_first() {
assert_eq!(capitalize_first("telegram"), "Telegram");
assert_eq!(capitalize_first("CAPS"), "CAPS");
assert_eq!(capitalize_first(""), "");
}
#[test]
fn test_mask_api_key() {
assert_eq!(
mask_api_key("sk-ant-api03-abcdef1234567890"),
"sk-ant...7890"
);
assert_eq!(mask_api_key("short"), "shor...");
assert_eq!(mask_api_key("exactly12ch"), "exac...");
assert_eq!(mask_api_key("exactly12chr"), "exactl...2chr");
assert_eq!(mask_api_key(""), "...");
assert_eq!(mask_api_key("日本語キー"), "日本語キ...");
}
#[tokio::test]
async fn test_install_missing_bundled_channels_installs_telegram() {
if !available_channel_names().contains(&"telegram") {
eprintln!("skipping: telegram WASM artifacts not built");
return;
}
let dir = tempdir().unwrap();
let installed = HashSet::<String>::new();
install_missing_bundled_channels(dir.path(), &installed)
.await
.unwrap();
assert!(dir.path().join("telegram.wasm").exists());
assert!(dir.path().join("telegram.capabilities.json").exists());
}
#[test]
fn test_wasm_channel_option_names_includes_available_when_missing() {
let discovered = Vec::new();
let options = wasm_channel_option_names(&discovered);
let available = available_channel_names();
for name in &available {
assert!(
options.contains(&name.to_string()),
"expected '{}' in options",
name
);
}
}
#[test]
fn test_wasm_channel_option_names_dedupes_available() {
let discovered = vec![(String::from("telegram"), ChannelCapabilitiesFile::default())];
let options = wasm_channel_option_names(&discovered);
assert_eq!(
options.iter().filter(|n| *n == "telegram").count(),
1,
"telegram should not be duplicated"
);
}
#[tokio::test]
async fn test_fetch_anthropic_models_static_fallback() {
let _guard = EnvGuard::clear("ANTHROPIC_API_KEY");
let models = fetch_anthropic_models(None).await;
assert!(!models.is_empty());
assert!(
models.iter().any(|(id, _)| id.contains("claude")),
"static defaults should include a Claude model"
);
}
#[tokio::test]
async fn test_fetch_openai_models_static_fallback() {
let _guard = EnvGuard::clear("OPENAI_API_KEY");
let models = fetch_openai_models(None).await;
assert!(!models.is_empty());
assert!(
models.iter().any(|(id, _)| id.contains("gpt")),
"static defaults should include a GPT model"
);
}
#[tokio::test]
async fn test_fetch_ollama_models_unreachable_fallback() {
let models = fetch_ollama_models("http://127.0.0.1:1").await;
assert!(!models.is_empty(), "should fall back to static defaults");
}
#[tokio::test]
async fn test_discover_wasm_channels_empty_dir() {
let dir = tempdir().unwrap();
let channels = discover_wasm_channels(dir.path()).await;
assert!(channels.is_empty());
}
#[tokio::test]
async fn test_discover_wasm_channels_nonexistent_dir() {
let channels =
discover_wasm_channels(std::path::Path::new("/tmp/ironclaw_nonexistent_dir")).await;
assert!(channels.is_empty());
}
struct EnvGuard {
key: &'static str,
original: Option<String>,
}
impl EnvGuard {
fn clear(key: &'static str) -> Self {
let original = std::env::var(key).ok();
unsafe {
std::env::remove_var(key);
}
Self { key, original }
}
}
impl Drop for EnvGuard {
fn drop(&mut self) {
unsafe {
if let Some(ref val) = self.original {
std::env::set_var(self.key, val);
} else {
std::env::remove_var(self.key);
}
}
}
}
}