garudust 0.2.5

Self-improving AI agent — TUI/CLI, tool hub, Ollama & OpenRouter support
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
mod config_cmd;
mod doctor;
mod setup;
mod skill_cmd;
mod tool_cmd;
mod tui;

use std::sync::Arc;

use anyhow::Result;
use clap::{Parser, Subcommand};
use garudust_agent::{Agent, AutoApprover};
use garudust_core::config::AgentConfig;
use garudust_core::config::McpServerConfig;
use garudust_memory::{FileMemoryStore, SessionDb};
use garudust_tools::{
    load_script_tools, register_standard_tools, security::docker_available,
    toolsets::mcp::connect_mcp_server, ToolRegistry,
};
use garudust_transport::build_transport;
use tokio::sync::mpsc;

use tokio::sync::RwLock;
use tui::{AgentEvent, TuiEvent};

type McpHandles = Vec<Box<dyn std::any::Any + Send>>;

#[derive(Subcommand)]
enum ConfigCmd {
    /// Show current configuration
    Show,
    /// Set a configuration value
    ///
    /// Secret keys (OPENROUTER_API_KEY, ANTHROPIC_API_KEY, …) are saved to ~/.garudust/.env.
    /// Other keys (model, provider, base_url, max_iterations, tool_delay_ms) go to config.yaml.
    Set { key: String, value: String },
}

#[derive(Subcommand)]
enum ToolCmd {
    /// List installed tools (and available hub tools)
    List {
        /// Skip fetching the hub — show only locally installed tools
        #[arg(long)]
        offline: bool,
    },
    /// Install a tool from the hub
    Install {
        /// Tool name as listed in the hub index
        name: String,
        /// Hub repository (default: garudust-org/garudust-hub)
        #[arg(long, default_value = garudust_tools::hub::DEFAULT_HUB)]
        hub: String,
    },
    /// Remove an installed tool
    Uninstall {
        /// Tool name to remove
        name: String,
    },
    /// Update installed hub tools to the latest version
    Update {
        /// Specific tool to update (omit to update all)
        name: Option<String>,
    },
}

#[derive(Subcommand)]
enum SkillCmd {
    /// List installed skills
    List,
    /// Install a skill from the hub, GitHub, a direct URL, or a well-known endpoint
    ///
    /// Sources:
    ///   git-workflow         — short name (resolved via hub index)
    ///   owner/repo/path      — GitHub (raw.githubusercontent.com)
    ///   https://…/SKILL.md   — direct URL
    ///   well-known:https://… — /.well-known/skills/<name>/SKILL.md
    Install {
        /// Skill name (short) or full source path / URL
        source: String,
        /// Skill name to use when saving (inferred from source if omitted)
        #[arg(long, default_value = "")]
        name: String,
        /// Hub repository (default: garudust-org/garudust-hub)
        #[arg(long, default_value = garudust_tools::hub::DEFAULT_HUB)]
        hub: String,
    },
    /// Remove an installed skill
    Uninstall {
        /// Skill name as shown in `garudust skill list`
        name: String,
    },
}

#[derive(Subcommand)]
enum Cmd {
    /// Interactive first-time setup wizard
    Setup,

    /// Check environment and configuration
    Doctor,

    /// View or update configuration
    Config {
        #[command(subcommand)]
        sub: ConfigCmd,
    },

    /// Get or set the active model
    Model {
        /// Model name to switch to (omit for interactive prompt)
        name: Option<String>,
    },

    /// Manage script tools (install, uninstall, update, list)
    Tool {
        #[command(subcommand)]
        sub: ToolCmd,
    },

    /// Manage skills (install, uninstall, list)
    Skill {
        #[command(subcommand)]
        sub: SkillCmd,
    },
}

#[derive(Parser)]
#[command(name = "garudust", about = "Garudust AI Agent", version)]
struct Cli {
    #[command(subcommand)]
    cmd: Option<Cmd>,

    /// One-shot task (omit to start interactive TUI)
    task: Option<String>,

    /// Override model (env: GARUDUST_MODEL)
    #[arg(long, env = "GARUDUST_MODEL")]
    model: Option<String>,

    /// Override OpenRouter API key (env: OPENROUTER_API_KEY)
    #[arg(long, env = "OPENROUTER_API_KEY")]
    api_key: Option<String>,

    /// Override Anthropic API key — sets provider=anthropic (env: ANTHROPIC_API_KEY)
    #[arg(long, env = "ANTHROPIC_API_KEY")]
    anthropic_key: Option<String>,

    /// Override base URL (env: GARUDUST_BASE_URL)
    #[arg(long, env = "GARUDUST_BASE_URL")]
    base_url: Option<String>,
}

fn build_config(cli: &Cli) -> Arc<AgentConfig> {
    let mut config = AgentConfig::load();

    // CLI flags override whatever was loaded from config files / env
    if let Some(m) = &cli.model {
        config.model.clone_from(m);
    }
    if let Some(u) = &cli.base_url {
        config.base_url = Some(u.clone());
    }
    if let Some(k) = &cli.anthropic_key {
        config.api_key = Some(k.clone());
        config.provider = "anthropic".into();
    } else if let Some(k) = &cli.api_key {
        config.api_key = Some(k.clone());
    }

    Arc::new(config)
}

/// Single source of truth for the CLI agent — registers all tools and MCP servers.
/// Returns the agent and MCP process handles; caller must keep handles alive for
/// as long as the agent is in use (dropping them terminates the MCP processes).
async fn build_agent(config: Arc<AgentConfig>) -> (Arc<Agent>, McpHandles) {
    let memory = Arc::new(FileMemoryStore::new(&config.home_dir));
    let transport = build_transport(&config);

    if config.security.terminal_sandbox == garudust_core::config::TerminalSandbox::Docker
        && !docker_available()
    {
        tracing::warn!(
            "terminal_sandbox is set to 'docker' but Docker is not installed or not in PATH. \
             Terminal commands will fail. Set `terminal_sandbox: none` or install Docker."
        );
    }

    let db = SessionDb::open(&config.home_dir).ok().map(Arc::new);

    let mut registry = ToolRegistry::new();
    register_standard_tools(&mut registry, db.clone());

    let mcp_handles = attach_mcp_servers(&mut registry, &config.mcp_servers).await;

    for tool in load_script_tools(&config.home_dir).await {
        registry.register(tool);
    }

    let agent = Agent::new(transport, Arc::new(registry), memory, config);
    let agent = match db {
        Some(db) => agent.with_session_db(db),
        None => agent,
    };
    (Arc::new(agent), mcp_handles)
}

async fn attach_mcp_servers(
    registry: &mut ToolRegistry,
    servers: &[McpServerConfig],
) -> McpHandles {
    let mut handles: McpHandles = Vec::new();
    for srv in servers {
        match connect_mcp_server(&srv.command, &srv.args).await {
            Ok((tools, handle)) => {
                tracing::info!(server = %srv.name, tools = tools.len(), "MCP server connected");
                for t in tools {
                    registry.register_arc(t);
                }
                handles.push(handle);
            }
            Err(e) => {
                tracing::warn!(server = %srv.name, "failed to connect MCP server: {e}");
            }
        }
    }
    handles
}

#[tokio::main]
async fn main() -> Result<()> {
    tracing_subscriber::fmt()
        .with_env_filter(std::env::var("RUST_LOG").unwrap_or_else(|_| "warn".into()))
        .with_writer(std::io::stderr)
        .init();
    dotenvy::dotenv().ok(); // load .env from current dir (development override)

    let cli = Cli::parse();

    // ── Subcommands that don't need a running agent ───────────────────────────
    match &cli.cmd {
        Some(Cmd::Setup) => {
            return setup::run().await;
        }

        Some(Cmd::Doctor) => {
            let config = build_config(&cli);
            doctor::run(&config).await;
            return Ok(());
        }

        Some(Cmd::Config {
            sub: ConfigCmd::Show,
        }) => {
            let config = build_config(&cli);
            config_cmd::show(&config);
            return Ok(());
        }

        Some(Cmd::Config {
            sub: ConfigCmd::Set { key, value },
        }) => {
            let config = build_config(&cli);
            config_cmd::set(key, value, &config.home_dir)?;
            return Ok(());
        }

        Some(Cmd::Model { name }) => {
            let config = build_config(&cli);
            config_cmd::set_model(name.as_deref(), &config)?;
            return Ok(());
        }

        Some(Cmd::Tool { sub }) => {
            let config = build_config(&cli);
            let tools_dir = config.home_dir.join("tools");
            tokio::fs::create_dir_all(&tools_dir).await?;
            match sub {
                ToolCmd::List { offline } => {
                    tool_cmd::list(&tools_dir, *offline).await?;
                }
                ToolCmd::Install { name, hub } => {
                    tool_cmd::install(name, &tools_dir, hub).await?;
                }
                ToolCmd::Uninstall { name } => {
                    tool_cmd::uninstall(name, &tools_dir).await?;
                }
                ToolCmd::Update { name } => {
                    tool_cmd::update(name.as_deref(), &tools_dir).await?;
                }
            }
            return Ok(());
        }

        Some(Cmd::Skill { sub }) => {
            let config = build_config(&cli);
            let skills_dir = config.home_dir.join("skills");
            tokio::fs::create_dir_all(&skills_dir).await?;
            match sub {
                SkillCmd::List => {
                    skill_cmd::list(&skills_dir).await?;
                }
                SkillCmd::Install { source, name, hub } => {
                    skill_cmd::install(source, name, hub, &skills_dir).await?;
                }
                SkillCmd::Uninstall { name } => {
                    skill_cmd::uninstall(name, &skills_dir).await?;
                }
            }
            return Ok(());
        }

        None => {}
    }

    // ── Agent modes ───────────────────────────────────────────────────────────
    let config = build_config(&cli);
    let (agent, mcp_handles) = build_agent(config.clone()).await;

    if let Some(task) = &cli.task {
        // ── One-shot mode ─────────────────────────────────────────────────────
        // Keep handles alive for the duration of the run; drop at block exit.
        let _handles = mcp_handles;
        let approver = Arc::new(AutoApprover);
        let result = agent.run(task, approver, "cli").await?;
        println!("{}", result.output);
        eprintln!(
            "[{} iter | {}in {}out tokens]",
            result.iterations, result.usage.input_tokens, result.usage.output_tokens
        );
    } else {
        // ── Interactive TUI mode ──────────────────────────────────────────────
        // shared_state holds both the agent and its MCP handles together so that
        // dropping the old state on /model switch reaps the old MCP processes.
        let approver = Arc::new(AutoApprover);

        let (tx_event, mut rx_event) = mpsc::channel::<TuiEvent>(32);
        let (tx_agent, rx_agent) = mpsc::channel::<AgentEvent>(64);

        // agent is in a RwLock (Arc<Agent>: Sync — fine for concurrent reads in Submit).
        // MCP handles are in a Mutex: Vec<Box<dyn Any+Send>> is Send but not Sync,
        // so RwLock won't work; Mutex<T> is Sync whenever T: Send.
        let shared_agent = Arc::new(RwLock::new(agent.clone()));
        let shared_handles = Arc::new(tokio::sync::Mutex::new(mcp_handles));
        let shared_config = config.clone();
        let approver2 = approver.clone();
        let tx_agent2 = tx_agent.clone();

        // Send TuiEvent::Quit on SIGTERM so the TUI can restore the terminal cleanly.
        #[cfg(unix)]
        {
            let tx_quit = tx_event.clone();
            tokio::spawn(async move {
                if let Ok(mut sig) =
                    tokio::signal::unix::signal(tokio::signal::unix::SignalKind::terminate())
                {
                    sig.recv().await;
                    let _ = tx_quit.send(TuiEvent::Quit).await;
                }
            });
        }

        tokio::spawn(async move {
            while let Some(ev) = rx_event.recv().await {
                match ev {
                    TuiEvent::Quit => break,
                    TuiEvent::NewSession => {} // agent is stateless per-call; UI already cleared
                    TuiEvent::ChangeModel(model) => {
                        let mut new_cfg = (*shared_config).clone();
                        new_cfg.model = model;
                        let (new_agent, new_handles) = build_agent(Arc::new(new_cfg)).await;
                        // Drop old handles first — terminates previous MCP child processes.
                        *shared_handles.lock().await = new_handles;
                        *shared_agent.write().await = new_agent;
                    }
                    TuiEvent::Submit(task) => {
                        let _ = tx_agent2.send(AgentEvent::Thinking).await;
                        let current_agent = shared_agent.read().await.clone();

                        let (chunk_tx, mut chunk_rx) = mpsc::unbounded_channel::<String>();
                        let tx_agent3 = tx_agent2.clone();
                        tokio::spawn(async move {
                            while let Some(delta) = chunk_rx.recv().await {
                                let _ = tx_agent3.send(AgentEvent::OutputChunk(delta)).await;
                            }
                        });

                        match current_agent
                            .run_streaming(&task, approver2.clone(), "cli", chunk_tx)
                            .await
                        {
                            Ok(r) => {
                                let _ = tx_agent2
                                    .send(AgentEvent::Done {
                                        iterations: r.iterations,
                                        input_tokens: r.usage.input_tokens,
                                        output_tokens: r.usage.output_tokens,
                                    })
                                    .await;
                            }
                            Err(e) => {
                                let _ = tx_agent2.send(AgentEvent::Error(e.to_string())).await;
                            }
                        }
                    }
                }
            }
        });

        let toolsets = agent.tool_names_by_toolset();
        let skill_names =
            garudust_tools::toolsets::skills::load_skills_from_dir(&config.home_dir.join("skills"))
                .await
                .into_iter()
                .map(|s| s.name)
                .collect::<Vec<_>>();
        tui::Tui::run(tx_event, rx_agent, toolsets, skill_names).await?;
    }

    Ok(())
}