mod config_cmd;
mod doctor;
mod setup;
mod tool_cmd;
mod tui;
use std::sync::Arc;
use anyhow::Result;
use clap::{Parser, Subcommand};
use garudust_agent::{Agent, AutoApprover};
use garudust_core::config::AgentConfig;
use garudust_core::config::McpServerConfig;
use garudust_memory::{FileMemoryStore, SessionDb};
use garudust_tools::{
load_script_tools, register_standard_tools, security::docker_available,
toolsets::mcp::connect_mcp_server, ToolRegistry,
};
use garudust_transport::build_transport;
use tokio::sync::mpsc;
use tokio::sync::RwLock;
use tui::{AgentEvent, TuiEvent};
type McpHandles = Vec<Box<dyn std::any::Any + Send>>;
#[derive(Subcommand)]
enum ConfigCmd {
Show,
Set { key: String, value: String },
}
#[derive(Subcommand)]
enum ToolCmd {
List {
#[arg(long)]
offline: bool,
},
Install {
name: String,
#[arg(long, default_value = garudust_tools::hub::DEFAULT_HUB)]
hub: String,
},
Uninstall {
name: String,
},
Update {
name: Option<String>,
},
}
#[derive(Subcommand)]
enum Cmd {
Setup,
Doctor,
Config {
#[command(subcommand)]
sub: ConfigCmd,
},
Model {
name: Option<String>,
},
Tool {
#[command(subcommand)]
sub: ToolCmd,
},
}
#[derive(Parser)]
#[command(name = "garudust", about = "Garudust AI Agent", version)]
struct Cli {
#[command(subcommand)]
cmd: Option<Cmd>,
task: Option<String>,
#[arg(long, env = "GARUDUST_MODEL")]
model: Option<String>,
#[arg(long, env = "OPENROUTER_API_KEY")]
api_key: Option<String>,
#[arg(long, env = "ANTHROPIC_API_KEY")]
anthropic_key: Option<String>,
#[arg(long, env = "GARUDUST_BASE_URL")]
base_url: Option<String>,
}
fn build_config(cli: &Cli) -> Arc<AgentConfig> {
let mut config = AgentConfig::load();
if let Some(m) = &cli.model {
config.model.clone_from(m);
}
if let Some(u) = &cli.base_url {
config.base_url = Some(u.clone());
}
if let Some(k) = &cli.anthropic_key {
config.api_key = Some(k.clone());
config.provider = "anthropic".into();
} else if let Some(k) = &cli.api_key {
config.api_key = Some(k.clone());
}
Arc::new(config)
}
async fn build_agent(config: Arc<AgentConfig>) -> (Arc<Agent>, McpHandles) {
let memory = Arc::new(FileMemoryStore::new(&config.home_dir));
let transport = build_transport(&config);
if config.security.terminal_sandbox == garudust_core::config::TerminalSandbox::Docker
&& !docker_available()
{
tracing::warn!(
"terminal_sandbox is set to 'docker' but Docker is not installed or not in PATH. \
Terminal commands will fail. Set `terminal_sandbox: none` or install Docker."
);
}
let db = SessionDb::open(&config.home_dir).ok().map(Arc::new);
let mut registry = ToolRegistry::new();
register_standard_tools(&mut registry, db.clone());
let mcp_handles = attach_mcp_servers(&mut registry, &config.mcp_servers).await;
for tool in load_script_tools(&config.home_dir).await {
registry.register(tool);
}
let agent = Agent::new(transport, Arc::new(registry), memory, config);
let agent = match db {
Some(db) => agent.with_session_db(db),
None => agent,
};
(Arc::new(agent), mcp_handles)
}
async fn attach_mcp_servers(
registry: &mut ToolRegistry,
servers: &[McpServerConfig],
) -> McpHandles {
let mut handles: McpHandles = Vec::new();
for srv in servers {
match connect_mcp_server(&srv.command, &srv.args).await {
Ok((tools, handle)) => {
tracing::info!(server = %srv.name, tools = tools.len(), "MCP server connected");
for t in tools {
registry.register_arc(t);
}
handles.push(handle);
}
Err(e) => {
tracing::warn!(server = %srv.name, "failed to connect MCP server: {e}");
}
}
}
handles
}
#[tokio::main]
async fn main() -> Result<()> {
tracing_subscriber::fmt()
.with_env_filter(std::env::var("RUST_LOG").unwrap_or_else(|_| "warn".into()))
.with_writer(std::io::stderr)
.init();
dotenvy::dotenv().ok();
let cli = Cli::parse();
match &cli.cmd {
Some(Cmd::Setup) => {
return setup::run().await;
}
Some(Cmd::Doctor) => {
let config = build_config(&cli);
doctor::run(&config).await;
return Ok(());
}
Some(Cmd::Config {
sub: ConfigCmd::Show,
}) => {
let config = build_config(&cli);
config_cmd::show(&config);
return Ok(());
}
Some(Cmd::Config {
sub: ConfigCmd::Set { key, value },
}) => {
let config = build_config(&cli);
config_cmd::set(key, value, &config.home_dir)?;
return Ok(());
}
Some(Cmd::Model { name }) => {
let config = build_config(&cli);
config_cmd::set_model(name.as_deref(), &config)?;
return Ok(());
}
Some(Cmd::Tool { sub }) => {
let config = build_config(&cli);
let tools_dir = config.home_dir.join("tools");
tokio::fs::create_dir_all(&tools_dir).await?;
match sub {
ToolCmd::List { offline } => {
tool_cmd::list(&tools_dir, *offline).await?;
}
ToolCmd::Install { name, hub } => {
tool_cmd::install(name, &tools_dir, hub).await?;
}
ToolCmd::Uninstall { name } => {
tool_cmd::uninstall(name, &tools_dir).await?;
}
ToolCmd::Update { name } => {
tool_cmd::update(name.as_deref(), &tools_dir).await?;
}
}
return Ok(());
}
None => {}
}
let config = build_config(&cli);
let (agent, mcp_handles) = build_agent(config.clone()).await;
if let Some(task) = &cli.task {
let _handles = mcp_handles;
let approver = Arc::new(AutoApprover);
let result = agent.run(task, approver, "cli").await?;
println!("{}", result.output);
eprintln!(
"[{} iter | {}in {}out tokens]",
result.iterations, result.usage.input_tokens, result.usage.output_tokens
);
} else {
let approver = Arc::new(AutoApprover);
let (tx_event, mut rx_event) = mpsc::channel::<TuiEvent>(32);
let (tx_agent, rx_agent) = mpsc::channel::<AgentEvent>(64);
let shared_agent = Arc::new(RwLock::new(agent.clone()));
let shared_handles = Arc::new(tokio::sync::Mutex::new(mcp_handles));
let shared_config = config.clone();
let approver2 = approver.clone();
let tx_agent2 = tx_agent.clone();
#[cfg(unix)]
{
let tx_quit = tx_event.clone();
tokio::spawn(async move {
if let Ok(mut sig) =
tokio::signal::unix::signal(tokio::signal::unix::SignalKind::terminate())
{
sig.recv().await;
let _ = tx_quit.send(TuiEvent::Quit).await;
}
});
}
tokio::spawn(async move {
while let Some(ev) = rx_event.recv().await {
match ev {
TuiEvent::Quit => break,
TuiEvent::NewSession => {} TuiEvent::ChangeModel(model) => {
let mut new_cfg = (*shared_config).clone();
new_cfg.model = model;
let (new_agent, new_handles) = build_agent(Arc::new(new_cfg)).await;
*shared_handles.lock().await = new_handles;
*shared_agent.write().await = new_agent;
}
TuiEvent::Submit(task) => {
let _ = tx_agent2.send(AgentEvent::Thinking).await;
let current_agent = shared_agent.read().await.clone();
let (chunk_tx, mut chunk_rx) = mpsc::unbounded_channel::<String>();
let tx_agent3 = tx_agent2.clone();
tokio::spawn(async move {
while let Some(delta) = chunk_rx.recv().await {
let _ = tx_agent3.send(AgentEvent::OutputChunk(delta)).await;
}
});
match current_agent
.run_streaming(&task, approver2.clone(), "cli", chunk_tx)
.await
{
Ok(r) => {
let _ = tx_agent2
.send(AgentEvent::Done {
iterations: r.iterations,
input_tokens: r.usage.input_tokens,
output_tokens: r.usage.output_tokens,
})
.await;
}
Err(e) => {
let _ = tx_agent2.send(AgentEvent::Error(e.to_string())).await;
}
}
}
}
}
});
tui::Tui::run(tx_event, rx_agent).await?;
}
Ok(())
}