mermaid_cli/cli/
commands.rs1use anyhow::Result;
2
3use crate::{
4 app::{get_config_dir, init_config},
5 models::ModelFactory,
6 ollama::{is_installed as is_ollama_installed, list_models as get_ollama_models},
7};
8
9use super::Commands;
10
11pub async fn handle_command(command: &Commands) -> Result<bool> {
15 match command {
16 Commands::Init => {
17 println!("Initializing Mermaid configuration...");
18 init_config()?;
19 println!("Configuration initialized successfully!");
20 Ok(true)
21 },
22 Commands::List => {
23 list_models().await?;
24 Ok(true)
25 },
26 Commands::Version => {
27 show_version();
28 Ok(true)
29 },
30 Commands::Status => {
31 show_status().await?;
32 Ok(true)
33 },
34 Commands::Chat => Ok(false), Commands::Run { .. } => Ok(false), }
37}
38
39pub async fn list_models() -> Result<()> {
41 let models = ModelFactory::list_all_models().await?;
42
43 if models.is_empty() {
44 println!("No models found across any backends");
45 } else {
46 println!("Available models:");
47 for model in models {
48 println!(" - {}", model);
49 }
50 }
51 Ok(())
52}
53
54pub fn show_version() {
56 println!("Mermaid v{}", env!("CARGO_PKG_VERSION"));
57 println!(" An open-source, model-agnostic AI pair programmer");
58}
59
60async fn show_status() -> Result<()> {
62 println!("Mermaid Status:");
63 println!();
64
65 let backends = ModelFactory::available_providers().await;
67 if backends.is_empty() {
68 println!(" [WARNING] Backends: None available");
69 } else {
70 println!(" [OK] Backends: {}", backends.join(", "));
71 }
72
73 if is_ollama_installed() {
75 let models = get_ollama_models().unwrap_or_default();
76 if models.is_empty() {
77 println!(" [WARNING] Ollama: Installed (no models)");
78 } else {
79 println!(" [OK] Ollama: Running ({} models installed)", models.len());
80 for model in models.iter().take(3) {
81 println!(" - {}", model);
82 }
83 if models.len() > 3 {
84 println!(" ... and {} more", models.len() - 3);
85 }
86 }
87 } else {
88 println!(" [ERROR] Ollama: Not installed");
89 }
90
91 if let Ok(config_dir) = get_config_dir() {
93 let config_path = config_dir.join("config.toml");
94 if config_path.exists() {
95 println!(" [OK] Configuration: {}", config_path.display());
96 } else {
97 println!(" [WARNING] Configuration: Not found (using defaults)");
98 }
99 }
100
101 println!("\n Environment:");
103 if std::env::var("OPENROUTER_API_KEY").is_ok() {
104 println!(" - OPENROUTER_API_KEY: Set");
105 }
106 if std::env::var("OLLAMA_API_KEY").is_ok() {
107 println!(" - OLLAMA_API_KEY: Set (for Ollama Cloud)");
108 }
109
110 println!();
111 Ok(())
112}