koda-cli 0.2.0

A high-performance AI coding agent built in Rust
Documentation
//! Koda 🐻 - A high-performance AI coding agent built in Rust.
//!
//! CLI entry point. The binary is named `koda` for ergonomics.

mod ansi_parse;
mod completer;
mod diff_render;
mod headless;
mod highlight;
mod history_render;
mod input;
mod md_render;
mod mouse_select;
mod onboarding;
mod repl;
mod scroll_buffer;
mod server;
mod sink;
mod startup;
mod tool_history;
mod tui_app;
mod tui_commands;
mod tui_context;
mod tui_handlers_inference;
mod tui_output;
mod tui_render;
mod tui_types;
mod tui_viewport;
mod tui_wizards;
mod widgets;
mod wrap_input;
mod wrap_util;

use anyhow::{Context, Result};
use clap::{Parser, Subcommand};
use koda_core::persistence::Persistence;
use std::path::PathBuf;

/// Koda 🐻 - Your AI coding bear.
#[derive(Parser, Debug)]
#[command(name = "koda", version, about)]
struct Cli {
    #[command(subcommand)]
    command: Option<Command>,

    /// Run a single prompt and exit (headless mode).
    /// Use "-" to read from stdin.
    #[arg(short, long, value_name = "PROMPT")]
    prompt: Option<String>,

    /// Positional prompt (alternative to -p).
    /// `koda "fix the bug"` is equivalent to `koda -p "fix the bug"`.
    #[arg(value_name = "PROMPT", conflicts_with = "prompt")]
    positional_prompt: Option<String>,

    /// Output format for headless mode.
    #[arg(long, default_value = "text", value_parser = ["text", "json"])]
    output_format: String,

    /// Agent to use (matches a JSON file in agents/)
    #[arg(short, long, default_value = "default")]
    agent: String,

    /// Session ID to resume (omit to start a new session)
    #[arg(short, long = "resume", alias = "session")]
    session: Option<String>,

    /// Project root directory (defaults to current directory)
    #[arg(long)]
    project_root: Option<PathBuf>,

    /// LLM provider base URL override
    #[arg(long, env = "KODA_BASE_URL")]
    base_url: Option<String>,

    /// Model name override
    #[arg(long, env = "KODA_MODEL")]
    model: Option<String>,

    /// LLM provider (openai, anthropic, lmstudio, gemini, groq, grok)
    #[arg(long, env = "KODA_PROVIDER")]
    provider: Option<String>,

    /// Maximum output tokens
    #[arg(long)]
    max_tokens: Option<u32>,

    /// Sampling temperature (0.0 - 2.0)
    #[arg(long)]
    temperature: Option<f64>,

    /// Anthropic extended thinking budget (tokens)
    #[arg(long)]
    thinking_budget: Option<u32>,

    /// OpenAI reasoning effort (low, medium, high)
    #[arg(long)]
    reasoning_effort: Option<String>,
}

#[derive(Subcommand, Debug)]
enum Command {
    /// Start ACP server for external clients
    Server {
        #[arg(long, default_value = "9999")]
        port: u16,
        #[arg(long)]
        stdio: bool,
    },
    /// Connect CLI to a running server
    Connect { url: String },
}

#[tokio::main]
async fn main() -> Result<()> {
    let cli = Cli::parse();

    // Handle subcommands first
    if let Some(cmd) = &cli.command {
        match cmd {
            Command::Server { port, stdio } => {
                if *stdio {
                    // Load and inject stored API keys (env vars take precedence)
                    match koda_core::keystore::KeyStore::load() {
                        Ok(store) => store.inject_into_env(),
                        Err(e) => tracing::warn!("Failed to load keystore: {e}"),
                    }

                    let project_root = cli.project_root.clone().unwrap_or_else(|| {
                        std::env::current_dir().expect("Failed to get current directory")
                    });
                    let project_root = std::fs::canonicalize(&project_root)?;

                    let config = koda_core::config::KodaConfig::load(&project_root, &cli.agent)?;
                    let config = config
                        .with_overrides(
                            cli.base_url.clone(),
                            cli.model.clone(),
                            cli.provider.clone(),
                        )
                        .with_model_overrides(
                            cli.max_tokens,
                            cli.temperature,
                            cli.thinking_budget,
                            cli.reasoning_effort.clone(),
                        );
                    server::run_stdio_server(project_root, config).await?;
                } else {
                    eprintln!("WebSocket server (--port {port}) not yet implemented. Use --stdio.");
                    std::process::exit(1);
                }
                return Ok(());
            }
            Command::Connect { url } => {
                println!("Not implemented: Connect to {}", url);
                std::process::exit(0);
            }
        }
    }

    // Resolve headless prompt: -p flag, positional arg, or stdin
    let headless_prompt = resolve_headless_prompt(&cli)?;

    // Resolve project root
    let project_root = cli
        .project_root
        .unwrap_or_else(|| std::env::current_dir().expect("Failed to get current directory"));
    let project_root = std::fs::canonicalize(&project_root)?;

    // Initialize logging to file (invisible to user)
    let log_dir = koda_core::db::config_dir()?.join("logs");
    std::fs::create_dir_all(&log_dir)?;
    let file_appender = tracing_appender::rolling::daily(&log_dir, "koda.log");
    let (non_blocking, _guard) = tracing_appender::non_blocking(file_appender);
    tracing_subscriber::fmt()
        .with_writer(non_blocking)
        .with_env_filter("koda_agent=debug")
        .init();

    tracing::info!("Koda starting. Project root: {:?}", project_root);

    // Load and inject stored API keys (env vars take precedence)
    match koda_core::keystore::KeyStore::load() {
        Ok(store) => store.inject_into_env(),
        Err(e) => tracing::warn!("Failed to load keystore: {e}"),
    }

    // Headless mode: skip onboarding, banner, version check
    if let Some(prompt) = headless_prompt {
        let config = koda_core::config::KodaConfig::load(&project_root, &cli.agent)?;
        let config = config
            .with_overrides(cli.base_url, cli.model, cli.provider)
            .with_model_overrides(
                cli.max_tokens,
                cli.temperature,
                cli.thinking_budget,
                cli.reasoning_effort,
            );
        let db = koda_core::db::Database::init(&koda_core::db::config_dir()?).await?;
        let session_id = match cli.session {
            Some(id) => id,
            None => db.create_session(&config.agent_name, &project_root).await?,
        };
        let exit_code = headless::run_headless(
            project_root,
            config,
            db,
            session_id,
            prompt,
            &cli.output_format,
        )
        .await?;
        std::process::exit(exit_code);
    }

    // Interactive mode: full REPL experience
    let version_check = koda_core::version::spawn_version_check();

    // Detect first run (onboarding happens inside the TUI now)
    let first_run = onboarding::is_first_run();

    // Load configuration
    let config = koda_core::config::KodaConfig::load(&project_root, &cli.agent)?;
    let config = config
        .with_overrides(cli.base_url, cli.model, cli.provider)
        .with_model_overrides(
            cli.max_tokens,
            cli.temperature,
            cli.thinking_budget,
            cli.reasoning_effort,
        );

    // Initialize database
    let db = koda_core::db::Database::init(&koda_core::db::config_dir()?).await?;

    // Load or create session
    let session_id = match cli.session {
        Some(id) => id,
        None => db.create_session(&config.agent_name, &project_root).await?,
    };

    // Run the main event loop
    tui_app::run(
        project_root,
        config,
        db,
        session_id,
        version_check,
        first_run,
    )
    .await
}

/// Resolve the headless prompt from -p flag, positional arg, or stdin pipe.
fn resolve_headless_prompt(cli: &Cli) -> Result<Option<String>> {
    // Explicit -p flag
    if let Some(ref p) = cli.prompt {
        if p == "-" {
            // Read from stdin
            use std::io::Read;
            let mut input = String::new();
            std::io::stdin()
                .read_to_string(&mut input)
                .context("Failed to read from stdin")?;
            return Ok(Some(input.trim().to_string()));
        }
        return Ok(Some(p.clone()));
    }

    // Positional prompt
    if let Some(ref p) = cli.positional_prompt {
        return Ok(Some(p.clone()));
    }

    // Check if stdin is piped (not a TTY) — auto-headless
    if !atty_is_terminal() {
        use std::io::Read;
        let mut input = String::new();
        std::io::stdin()
            .read_to_string(&mut input)
            .context("Failed to read from stdin")?;
        let trimmed = input.trim().to_string();
        if !trimmed.is_empty() {
            return Ok(Some(trimmed));
        }
    }

    Ok(None)
}

/// Check if stdin is a terminal (not piped).
fn atty_is_terminal() -> bool {
    use std::io::IsTerminal;
    std::io::stdin().is_terminal()
}