mermaid_cli/ollama/
cloud_setup.rs

1use anyhow::Result;
2use std::io::{self, Write};
3
4use crate::app::{get_config_dir, load_config, save_config};
5
6/// Check if Ollama cloud is configured (API key exists)
7pub fn is_cloud_configured() -> bool {
8    // Check environment variable first
9    if std::env::var("OLLAMA_API_KEY").is_ok() {
10        return true;
11    }
12
13    // Check config file
14    if let Ok(config) = load_config() {
15        return config.ollama.cloud_api_key.is_some();
16    }
17
18    false
19}
20
21/// Interactive setup flow for Ollama cloud API key
22/// Returns true if setup was completed successfully
23pub fn setup_cloud_interactive() -> Result<bool> {
24    println!("\n=== Ollama Cloud Setup ===\n");
25    println!("Ollama Cloud allows you to run large models on datacenter-grade hardware.");
26    println!("Cloud models use the :cloud suffix (e.g., kimi-k2-thinking:cloud)\n");
27    println!("To get started:");
28    println!("  1. Visit https://ollama.com/cloud");
29    println!("  2. Sign in or create an account");
30    println!("  3. Generate an API key\n");
31
32    print!("Would you like to set up Ollama Cloud now? [Y/n]: ");
33    io::stdout().flush()?;
34
35    let mut input = String::new();
36    io::stdin().read_line(&mut input)?;
37    let input = input.trim().to_lowercase();
38
39    if input == "n" || input == "no" {
40        println!("\nSkipping cloud setup. You can set it up later with:");
41        println!("  export OLLAMA_API_KEY=your_key_here");
42        println!("  Or add it to ~/.config/mermaid/config.toml");
43        return Ok(false);
44    }
45
46    // Prompt for API key
47    print!("\nEnter your Ollama Cloud API key: ");
48    io::stdout().flush()?;
49
50    let mut api_key = String::new();
51    io::stdin().read_line(&mut api_key)?;
52    let api_key = api_key.trim();
53
54    if api_key.is_empty() {
55        println!("\nNo API key provided. Setup cancelled.");
56        return Ok(false);
57    }
58
59    // Validate API key format (basic check)
60    if api_key.len() < 10 {
61        println!("\nAPI key seems too short. Please check and try again.");
62        return Ok(false);
63    }
64
65    // Load or create config
66    let mut config = load_config().unwrap_or_default();
67
68    // Save API key to config
69    config.ollama.cloud_api_key = Some(api_key.to_string());
70
71    // Save config file
72    let config_path = get_config_dir()?.join("config.toml");
73    save_config(&config, Some(config_path.clone()))?;
74
75    println!("\n✓ Ollama Cloud API key saved to: {}", config_path.display());
76    println!("\nYou can now use cloud models with the :cloud suffix:");
77    println!("  :model kimi-k2-thinking:cloud");
78    println!("  :model qwen3-coder:480b-cloud");
79    println!("  :model deepseek-v3.1:671b-cloud\n");
80
81    Ok(true)
82}
83
84/// Check if a model name requires cloud access
85pub fn is_cloud_model(model_name: &str) -> bool {
86    model_name.ends_with(":cloud")
87}
88
89/// Prompt user to set up cloud if trying to use a cloud model without API key
90pub fn prompt_cloud_setup_if_needed(model_name: &str) -> Result<bool> {
91    if !is_cloud_model(model_name) {
92        return Ok(true); // Not a cloud model, proceed
93    }
94
95    if is_cloud_configured() {
96        return Ok(true); // Already configured, proceed
97    }
98
99    // Cloud model requested but not configured
100    println!("\n⚠ Cloud model requested but Ollama Cloud is not configured.");
101    println!("   Model: {}\n", model_name);
102
103    setup_cloud_interactive()
104}