Skip to main content

mermaid_cli/cli/
commands.rs

1use anyhow::Result;
2
3use crate::{
4    app::{get_config_dir, init_config},
5    models::ModelFactory,
6    ollama::{is_installed as is_ollama_installed, list_models as get_ollama_models},
7};
8
9use super::Commands;
10
11/// Handle CLI subcommands
12/// Returns Ok(true) if the command was handled and we should exit
13/// Returns Ok(false) if we should continue to the main application
14pub async fn handle_command(command: &Commands) -> Result<bool> {
15    match command {
16        Commands::Init => {
17            println!("Initializing Mermaid configuration...");
18            init_config()?;
19            println!("Configuration initialized successfully!");
20            Ok(true)
21        },
22        Commands::List => {
23            list_models().await?;
24            Ok(true)
25        },
26        Commands::Version => {
27            show_version();
28            Ok(true)
29        },
30        Commands::Status => {
31            show_status().await?;
32            Ok(true)
33        },
34        Commands::Chat => Ok(false),       // Continue to chat interface
35        Commands::Run { .. } => Ok(false), // Handled by main.rs
36    }
37}
38
39/// List available models across all backends
40pub async fn list_models() -> Result<()> {
41    let models = ModelFactory::list_all_models().await?;
42
43    if models.is_empty() {
44        println!("No models found across any backends");
45    } else {
46        println!("Available models:");
47        for model in models {
48            println!("  - {}", model);
49        }
50    }
51    Ok(())
52}
53
54/// Show version information
55pub fn show_version() {
56    println!("Mermaid v{}", env!("CARGO_PKG_VERSION"));
57    println!("   An open-source, model-agnostic AI pair programmer");
58}
59
60/// Show status of all dependencies
61async fn show_status() -> Result<()> {
62    println!("Mermaid Status:");
63    println!();
64
65    // Check available backends
66    let backends = ModelFactory::available_providers().await;
67    if backends.is_empty() {
68        println!("  [WARNING] Backends: None available");
69    } else {
70        println!("  [OK] Backends: {}", backends.join(", "));
71    }
72
73    // Check Ollama
74    if is_ollama_installed() {
75        let models = get_ollama_models().unwrap_or_default();
76        if models.is_empty() {
77            println!("  [WARNING] Ollama: Installed (no models)");
78        } else {
79            println!("  [OK] Ollama: Running ({} models installed)", models.len());
80            for model in models.iter().take(3) {
81                println!("      - {}", model);
82            }
83            if models.len() > 3 {
84                println!("      ... and {} more", models.len() - 3);
85            }
86        }
87    } else {
88        println!("  [ERROR] Ollama: Not installed");
89    }
90
91    // Check configuration (uses platform-specific path via ProjectDirs)
92    if let Ok(config_dir) = get_config_dir() {
93        let config_path = config_dir.join("config.toml");
94        if config_path.exists() {
95            println!("  [OK] Configuration: {}", config_path.display());
96        } else {
97            println!("  [WARNING] Configuration: Not found (using defaults)");
98        }
99    }
100
101    // Environment variables (for API providers)
102    println!("\n  Environment:");
103    if std::env::var("OPENROUTER_API_KEY").is_ok() {
104        println!("    - OPENROUTER_API_KEY: Set");
105    }
106    if std::env::var("OLLAMA_API_KEY").is_ok() {
107        println!("    - OLLAMA_API_KEY: Set (for Ollama Cloud)");
108    }
109
110    println!();
111    Ok(())
112}