mermaid_cli/ollama/
cloud_setup.rs1use anyhow::Result;
2use std::io::{self, Write};
3
4use crate::app::{get_config_dir, load_config, save_config};
5
6pub fn is_cloud_configured() -> bool {
8 if std::env::var("OLLAMA_API_KEY").is_ok() {
10 return true;
11 }
12
13 if let Ok(config) = load_config() {
15 return config.ollama.cloud_api_key.is_some();
16 }
17
18 false
19}
20
21pub fn setup_cloud_interactive() -> Result<bool> {
24 println!("\n=== Ollama Cloud Setup ===\n");
25 println!("Ollama Cloud allows you to run large models on datacenter-grade hardware.");
26 println!("Cloud models use the :cloud suffix (e.g., kimi-k2-thinking:cloud)\n");
27 println!("To get started:");
28 println!(" 1. Visit https://ollama.com/cloud");
29 println!(" 2. Sign in or create an account");
30 println!(" 3. Generate an API key\n");
31
32 print!("Would you like to set up Ollama Cloud now? [Y/n]: ");
33 io::stdout().flush()?;
34
35 let mut input = String::new();
36 io::stdin().read_line(&mut input)?;
37 let input = input.trim().to_lowercase();
38
39 if input == "n" || input == "no" {
40 println!("\nSkipping cloud setup. You can set it up later with:");
41 println!(" export OLLAMA_API_KEY=your_key_here");
42 println!(" Or add it to ~/.config/mermaid/config.toml");
43 return Ok(false);
44 }
45
46 print!("\nEnter your Ollama Cloud API key: ");
48 io::stdout().flush()?;
49
50 let mut api_key = String::new();
51 io::stdin().read_line(&mut api_key)?;
52 let api_key = api_key.trim();
53
54 if api_key.is_empty() {
55 println!("\nNo API key provided. Setup cancelled.");
56 return Ok(false);
57 }
58
59 if api_key.len() < 10 {
61 println!("\nAPI key seems too short. Please check and try again.");
62 return Ok(false);
63 }
64
65 let mut config = load_config().unwrap_or_default();
67
68 config.ollama.cloud_api_key = Some(api_key.to_string());
70
71 let config_path = get_config_dir()?.join("config.toml");
73 save_config(&config, Some(config_path.clone()))?;
74
75 println!("\n✓ Ollama Cloud API key saved to: {}", config_path.display());
76 println!("\nYou can now use cloud models with the :cloud suffix:");
77 println!(" :model kimi-k2-thinking:cloud");
78 println!(" :model qwen3-coder:480b-cloud");
79 println!(" :model deepseek-v3.1:671b-cloud\n");
80
81 Ok(true)
82}
83
84pub fn is_cloud_model(model_name: &str) -> bool {
86 model_name.ends_with(":cloud")
87}
88
89pub fn prompt_cloud_setup_if_needed(model_name: &str) -> Result<bool> {
91 if !is_cloud_model(model_name) {
92 return Ok(true); }
94
95 if is_cloud_configured() {
96 return Ok(true); }
98
99 println!("\n⚠ Cloud model requested but Ollama Cloud is not configured.");
101 println!(" Model: {}\n", model_name);
102
103 setup_cloud_interactive()
104}