Skip to main content

mermaid_cli/cli/
commands.rs

1use anyhow::Result;
2use std::path::PathBuf;
3
4use crate::{
5    app::init_config,
6    models::ModelFactory,
7    ollama::{is_installed as is_ollama_installed, list_models as get_ollama_models},
8};
9
10use super::Commands;
11
12/// Handle CLI subcommands
13/// Returns Ok(true) if the command was handled and we should exit
14/// Returns Ok(false) if we should continue to the main application
15pub async fn handle_command(command: &Commands) -> Result<bool> {
16    match command {
17        Commands::Init => {
18            println!("Initializing Mermaid configuration...");
19            init_config()?;
20            println!("Configuration initialized successfully!");
21            Ok(true)
22        },
23        Commands::List => {
24            list_models().await?;
25            Ok(true)
26        },
27        Commands::Version => {
28            show_version();
29            Ok(true)
30        },
31        Commands::Status => {
32            show_status().await?;
33            Ok(true)
34        },
35        Commands::Chat => Ok(false), // Continue to chat interface
36        Commands::Run { .. } => Ok(false), // Handled by main.rs
37    }
38}
39
40/// List available models across all backends
41pub async fn list_models() -> Result<()> {
42    let models = ModelFactory::list_all_backend_models().await?;
43
44    if models.is_empty() {
45        println!("No models found across any backends");
46    } else {
47        println!("Available models:");
48        for model in models {
49            println!("  - {}", model);
50        }
51    }
52    Ok(())
53}
54
55/// Show version information
56pub fn show_version() {
57    println!("Mermaid v{}", env!("CARGO_PKG_VERSION"));
58    println!("   An open-source, model-agnostic AI pair programmer");
59}
60
61/// Show status of all dependencies
62async fn show_status() -> Result<()> {
63    println!("Mermaid Status:");
64    println!();
65
66    // Check available backends
67    let backends = ModelFactory::get_available_backends().await;
68    if backends.is_empty() {
69        println!("  [WARNING] Backends: None available");
70    } else {
71        println!("  [OK] Backends: {}", backends.join(", "));
72    }
73
74    // Check Ollama
75    if is_ollama_installed() {
76        let models = get_ollama_models().unwrap_or_default();
77        if models.is_empty() {
78            println!("  [WARNING] Ollama: Installed (no models)");
79        } else {
80            println!("  [OK] Ollama: Running ({} models installed)", models.len());
81            for model in models.iter().take(3) {
82                println!("      - {}", model);
83            }
84            if models.len() > 3 {
85                println!("      ... and {} more", models.len() - 3);
86            }
87        }
88    } else {
89        println!("  [ERROR] Ollama: Not installed");
90    }
91
92    // Check configuration
93    if let Ok(home) = std::env::var("HOME") {
94        let config_path = PathBuf::from(home).join(".config/mermaid/config.toml");
95        if config_path.exists() {
96            println!("  [OK] Configuration: {}", config_path.display());
97        } else {
98            println!("  [WARNING] Configuration: Not found (using defaults)");
99        }
100    }
101
102    // Environment variables (for API providers)
103    println!("\n  Environment:");
104    if std::env::var("OPENROUTER_API_KEY").is_ok() {
105        println!("    - OPENROUTER_API_KEY: Set");
106    }
107    if std::env::var("OLLAMA_API_KEY").is_ok() {
108        println!("    - OLLAMA_API_KEY: Set (for Ollama Cloud)");
109    }
110
111    println!();
112    Ok(())
113}