mermaid_cli/cli/
commands.rs1use anyhow::Result;
2use colored::Colorize;
3use std::path::PathBuf;
4
5use crate::{
6 app::init_config,
7 models::ModelFactory,
8 ollama::{is_installed as is_ollama_installed, list_models as get_ollama_models},
9 proxy::is_proxy_running,
10};
11
12use super::Commands;
13
14pub async fn handle_command(command: &Commands) -> Result<bool> {
16 match command {
17 Commands::Init => {
18 println!("Initializing Mermaid configuration...");
19 init_config()?;
20 println!("Configuration initialized successfully!");
21 Ok(true)
22 },
23 Commands::List => {
24 list_models().await?;
25 Ok(true)
26 },
27 Commands::Version => {
28 show_version();
29 Ok(true)
30 },
31 Commands::Status => {
32 show_status().await?;
33 Ok(true)
34 },
35 Commands::Chat => Ok(false), }
37}
38
39pub async fn list_models() -> Result<()> {
41 println!("Available models:");
42 let models = ModelFactory::list_available().await?;
43 for model in models {
44 println!(" • {}", model.green());
45 }
46 Ok(())
47}
48
49pub fn show_version() {
51 println!("Mermaid v{}", env!("CARGO_PKG_VERSION"));
52 println!(" An open-source, model-agnostic AI pair programmer");
53}
54
55async fn show_status() -> Result<()> {
57 println!("Mermaid Status:");
58 println!();
59
60 if is_ollama_installed() {
62 let models = get_ollama_models().unwrap_or_default();
63 if models.is_empty() {
64 println!(" [WARNING] Ollama: Installed (no models)");
65 } else {
66 println!(" [OK] Ollama: Running ({} models installed)", models.len());
67 for model in models.iter().take(3) {
68 println!(" • {}", model);
69 }
70 if models.len() > 3 {
71 println!(" ... and {} more", models.len() - 3);
72 }
73 }
74 } else {
75 println!(" [ERROR] Ollama: Not installed");
76 }
77
78 if is_proxy_running().await {
80 println!(" [OK] LiteLLM Proxy: Running at http://localhost:4000");
81 } else {
82 println!(" [ERROR] LiteLLM Proxy: Not running");
83 }
84
85 if let Ok(home) = std::env::var("HOME") {
87 let config_path = PathBuf::from(home).join(".config/mermaid/config.toml");
88 if config_path.exists() {
89 println!(" [OK] Configuration: {}", config_path.display());
90 } else {
91 println!(" [WARNING] Configuration: Not found (using defaults)");
92 }
93 }
94
95 if which::which("podman-compose").is_ok() {
97 println!(" [OK] Container Runtime: Podman Compose");
98 } else if which::which("podman").is_ok() {
99 println!(" [OK] Container Runtime: Podman");
100 } else if which::which("docker-compose").is_ok() {
101 println!(" [OK] Container Runtime: Docker Compose");
102 } else if which::which("docker").is_ok() {
103 println!(" [OK] Container Runtime: Docker");
104 } else {
105 println!(" [ERROR] Container Runtime: Not found (Podman or Docker required)");
106 }
107
108 println!("\n Environment:");
110 if std::env::var("OPENAI_API_KEY").is_ok() {
111 println!(" • OPENAI_API_KEY: Set");
112 }
113 if std::env::var("ANTHROPIC_API_KEY").is_ok() {
114 println!(" • ANTHROPIC_API_KEY: Set");
115 }
116 if std::env::var("GROQ_API_KEY").is_ok() {
117 println!(" • GROQ_API_KEY: Set");
118 }
119 if std::env::var("LITELLM_MASTER_KEY").is_ok() {
120 println!(" • LITELLM_MASTER_KEY: Set");
121 }
122
123 println!();
124 Ok(())
125}