greppy/cli/
login.rs

1//! Login command implementation
2
3use crate::ai::ollama::OllamaClient;
4use crate::auth::{self, Provider};
5use crate::core::config::{AiProvider, Config};
6use crate::core::error::Result;
7use dialoguer::{theme::ColorfulTheme, Input, Select};
8
9/// Run the login command - let user choose provider with arrow keys
10pub async fn run() -> Result<()> {
11    // Check if already logged in
12    let providers = auth::get_authenticated_providers();
13    let config = Config::load()?;
14    let has_ollama = config.ai.provider == AiProvider::Ollama;
15
16    // Show current config if any
17    if !providers.is_empty() || has_ollama {
18        println!("Current providers:");
19        for p in &providers {
20            match p {
21                Provider::Anthropic => println!("  ✓ Claude (Anthropic)"),
22                Provider::Google => println!("  ✓ Gemini (Google)"),
23            }
24        }
25        if has_ollama {
26            println!("  ✓ Ollama ({})", config.ai.ollama_model);
27        }
28        println!();
29    }
30
31    // Interactive selection - show what's available to add
32    let mut options: Vec<&str> = Vec::new();
33
34    // Always show Claude option
35    if providers.contains(&Provider::Anthropic) {
36        options.push("Claude (Anthropic) ✓ configured");
37    } else {
38        options.push("Claude (Anthropic) - OAuth, free tier");
39    }
40
41    // Always show Gemini option
42    if providers.contains(&Provider::Google) {
43        options.push("Gemini (Google) ✓ configured");
44    } else {
45        options.push("Gemini (Google) - OAuth, free tier");
46    }
47
48    // Always show Ollama option
49    let ollama_label = if has_ollama {
50        format!("Ollama ({}) ✓ configured", config.ai.ollama_model)
51    } else {
52        "Ollama (Local) - No internet, runs on your machine".to_string()
53    };
54    options.push(&ollama_label);
55
56    options.push("Cancel");
57
58    let selection = Select::with_theme(&ColorfulTheme::default())
59        .with_prompt("Add/configure AI provider")
60        .items(&options)
61        .default(0)
62        .interact()
63        .map_err(|e| crate::core::error::Error::DaemonError {
64            message: format!("Selection failed: {}", e),
65        })?;
66
67    match selection {
68        0 => {
69            // Claude
70            println!("\nAuthenticating with Claude...\n");
71            auth::login_anthropic().await?;
72            // Set as active provider
73            let mut config = Config::load()?;
74            config.ai.provider = AiProvider::Claude;
75            config.save()?;
76        }
77        1 => {
78            // Gemini
79            println!("\nAuthenticating with Google...\n");
80            auth::login_google().await?;
81            let mut config = Config::load()?;
82            config.ai.provider = AiProvider::Gemini;
83            config.save()?;
84        }
85        2 => {
86            // Ollama
87            setup_ollama().await?;
88        }
89        3 => {
90            // Cancel
91            println!("Cancelled.");
92            return Ok(());
93        }
94        _ => unreachable!(),
95    }
96
97    println!("\nYou can now use semantic search:");
98    println!("  greppy search \"your query\"");
99    println!("\nSwitch models anytime with: greppy model");
100
101    Ok(())
102}
103
104/// Setup Ollama local model
105async fn setup_ollama() -> Result<()> {
106    println!("\n Setting up Ollama (local AI)...\n");
107
108    // Check if Ollama is running
109    let client = OllamaClient::new();
110
111    print!("Checking Ollama connection... ");
112    if !client.is_available().await {
113        println!("NOT FOUND\n");
114        println!("Ollama is not running. Please:");
115        println!("  1. Install Ollama: https://ollama.ai");
116        println!("  2. Start Ollama: ollama serve");
117        println!("  3. Pull a model: ollama pull qwen2.5-coder:0.5b");
118        println!("  4. Run 'greppy login' again");
119        return Err(crate::core::error::Error::ConfigError {
120            message: "Ollama not available".to_string(),
121        });
122    }
123    println!("OK\n");
124
125    // List available models
126    let models = client.list_models().await.unwrap_or_default();
127
128    if models.is_empty() {
129        println!("No models found. Please pull a model first:");
130        println!("  ollama pull qwen2.5-coder:0.5b   # Small, fast (400MB)");
131        println!("  ollama pull codellama:7b         # Better quality (4GB)");
132        println!("  ollama pull deepseek-coder:6.7b  # Code-focused (4GB)");
133        return Err(crate::core::error::Error::ConfigError {
134            message: "No Ollama models available".to_string(),
135        });
136    }
137
138    println!("Available models:");
139    let model_names: Vec<&str> = models.iter().map(|m| m.name.as_str()).collect();
140
141    let selection = Select::with_theme(&ColorfulTheme::default())
142        .with_prompt("Select model for AI reranking")
143        .items(&model_names)
144        .default(0)
145        .interact()
146        .map_err(|e| crate::core::error::Error::DaemonError {
147            message: format!("Selection failed: {}", e),
148        })?;
149
150    let selected_model = model_names[selection].to_string();
151
152    // Ask for custom URL (advanced)
153    let url: String = Input::with_theme(&ColorfulTheme::default())
154        .with_prompt("Ollama URL")
155        .default("http://localhost:11434".to_string())
156        .interact_text()
157        .map_err(|e| crate::core::error::Error::DaemonError {
158            message: format!("Input failed: {}", e),
159        })?;
160
161    // Test the model
162    print!("\nTesting model '{}'... ", selected_model);
163    let test_client = OllamaClient::with_config(&url, &selected_model);
164
165    match test_client
166        .rerank(
167            "test query",
168            &["code snippet 1".to_string(), "code snippet 2".to_string()],
169        )
170        .await
171    {
172        Ok(_) => println!("OK"),
173        Err(e) => {
174            println!("FAILED\n");
175            println!("Error: {}", e);
176            println!("\nThe model may need to be loaded first. Try:");
177            println!("  ollama run {}", selected_model);
178            return Err(e);
179        }
180    }
181
182    // Save to config
183    let mut config = Config::load()?;
184    config.ai.provider = AiProvider::Ollama;
185    config.ai.ollama_model = selected_model.clone();
186    config.ai.ollama_url = url;
187    config.save()?;
188
189    println!("\n Ollama configured successfully!");
190    println!("  Model: {}", selected_model);
191
192    Ok(())
193}
194
195/// Run the logout command - remove all stored credentials
196pub fn logout() -> Result<()> {
197    let providers = auth::get_authenticated_providers();
198    let config = Config::load()?;
199    let has_ollama = config.ai.provider == AiProvider::Ollama;
200
201    if providers.is_empty() && !has_ollama {
202        println!("Not logged in to any provider.");
203        return Ok(());
204    }
205
206    println!("Logging out from:");
207    for p in &providers {
208        match p {
209            Provider::Anthropic => println!("  - Claude (Anthropic)"),
210            Provider::Google => println!("  - Gemini (Google)"),
211        }
212    }
213    if has_ollama {
214        println!("  - Ollama (local)");
215    }
216
217    // Clear OAuth tokens
218    auth::logout()?;
219
220    // Clear Ollama config
221    if has_ollama {
222        let mut config = Config::load()?;
223        config.ai.provider = AiProvider::Claude; // Reset to default
224        config.save()?;
225    }
226
227    println!("\nSuccessfully logged out.");
228    Ok(())
229}
230
231/// Check if user is authenticated with any provider (including Ollama)
232pub fn is_authenticated() -> bool {
233    if !auth::get_authenticated_providers().is_empty() {
234        return true;
235    }
236    // Check if Ollama is configured
237    if let Ok(config) = Config::load() {
238        if config.ai.provider == AiProvider::Ollama {
239            return true;
240        }
241    }
242    false
243}
244
245/// Get the preferred provider (Anthropic > Google > Ollama)
246pub fn get_preferred_provider() -> Option<Provider> {
247    let providers = auth::get_authenticated_providers();
248    if providers.contains(&Provider::Anthropic) {
249        Some(Provider::Anthropic)
250    } else if providers.contains(&Provider::Google) {
251        Some(Provider::Google)
252    } else {
253        None
254    }
255}
256
257/// Check if Ollama is configured as the provider
258pub fn is_ollama_configured() -> bool {
259    Config::load()
260        .map(|c| c.ai.provider == AiProvider::Ollama)
261        .unwrap_or(false)
262}
263
264/// Get Ollama client if configured
265pub fn get_ollama_client() -> Option<OllamaClient> {
266    let config = Config::load().ok()?;
267    if config.ai.provider == AiProvider::Ollama {
268        Some(OllamaClient::with_config(
269            &config.ai.ollama_url,
270            &config.ai.ollama_model,
271        ))
272    } else {
273        None
274    }
275}