greppy/cli/
search.rs

1//! Search command implementation
2
3use crate::ai::{claude::ClaudeClient, gemini::GeminiClient};
4use crate::auth::{self, Provider};
5use crate::cli::login::{get_ollama_client, is_ollama_configured};
6use crate::cli::{OutputFormat, SearchArgs};
7use crate::core::error::Result;
8use crate::core::project::Project;
9use crate::daemon::client;
10use crate::index::TantivyIndex;
11use crate::output::format_results;
12use crate::search::SearchQuery;
13use std::env;
14use tracing::debug;
15
16/// Run the search command
17pub async fn run(args: SearchArgs) -> Result<()> {
18    let project_path = args
19        .project
20        .clone()
21        .unwrap_or_else(|| env::current_dir().expect("Failed to get current directory"));
22
23    let project = Project::detect(&project_path)?;
24    let format = if args.json {
25        OutputFormat::Json
26    } else {
27        OutputFormat::Human
28    };
29
30    // Direct mode: BM25 only
31    if args.direct {
32        return run_direct_search(&args, &project, format).await;
33    }
34
35    // Semantic mode: check OAuth, search, then AI
36    run_semantic_search(&args, &project, format).await
37}
38
39/// Direct BM25 search (no AI)
40async fn run_direct_search(
41    args: &SearchArgs,
42    project: &Project,
43    format: OutputFormat,
44) -> Result<()> {
45    // Try daemon first
46    if let Ok(true) = client::is_running() {
47        debug!("Using daemon for direct search");
48        if let Ok(results) = client::search(&args.query, &project.root, args.limit).await {
49            print!("{}", format_results(&results, format));
50            return Ok(());
51        }
52        debug!("Daemon search failed, falling back to direct");
53    }
54
55    // Direct mode (blocking, but fine for CLI)
56    let index = TantivyIndex::open(&project.root)?;
57    let query = SearchQuery::new(&args.query).with_limit(args.limit);
58    let results = query.execute(&index)?;
59    print!("{}", format_results(&results, format));
60
61    Ok(())
62}
63
64/// Semantic search (BM25 + AI reranking)
65async fn run_semantic_search(
66    args: &SearchArgs,
67    project: &Project,
68    format: OutputFormat,
69) -> Result<()> {
70    // Check which provider is authenticated (OAuth or Ollama)
71    let providers = auth::get_authenticated_providers();
72    let ollama_configured = is_ollama_configured();
73
74    if providers.is_empty() && !ollama_configured {
75        eprintln!("Not logged in. Run 'greppy login' to enable semantic search.");
76        eprintln!("Using direct BM25 search instead.\n");
77        return run_direct_search(args, project, format).await;
78    }
79
80    // Get BM25 results first (fetch more than needed for reranking)
81    let fetch_limit = (args.limit * 2).min(20); // Fetch 2x for better reranking, max 20
82    let mut results = if let Ok(true) = client::is_running() {
83        debug!("Using daemon for search");
84        client::search(&args.query, &project.root, fetch_limit).await?
85    } else {
86        let index = TantivyIndex::open(&project.root)?;
87        let query = SearchQuery::new(&args.query).with_limit(fetch_limit);
88        query.execute(&index)?
89    };
90
91    // If no results, nothing to rerank
92    if results.results.is_empty() {
93        println!("No results found for: {}", args.query);
94        return Ok(());
95    }
96
97    // Build chunks for reranking
98    let chunks: Vec<String> = results
99        .results
100        .iter()
101        .map(|r| {
102            format!(
103                "// {}\n{}",
104                r.path,
105                r.content.chars().take(1500).collect::<String>()
106            )
107        })
108        .collect();
109
110    // Call AI to rerank - check Ollama first (local), then OAuth providers
111    let indices = if ollama_configured {
112        if let Some(client) = get_ollama_client() {
113            debug!("Using Ollama for reranking");
114            client.rerank(&args.query, &chunks).await?
115        } else {
116            // Fallback to BM25 order
117            (0..chunks.len()).collect()
118        }
119    } else if providers.contains(&Provider::Anthropic) {
120        let token = auth::get_anthropic_token()?;
121        let client = ClaudeClient::new(token);
122        client.rerank(&args.query, &chunks).await?
123    } else {
124        let token = auth::get_google_token()?;
125        let client = GeminiClient::new(token);
126        client.rerank(&args.query, &chunks).await?
127    };
128
129    // Reorder results based on AI ranking
130    let original_results = std::mem::take(&mut results.results);
131    for &idx in indices.iter().take(args.limit) {
132        if idx < original_results.len() {
133            results.results.push(original_results[idx].clone());
134        }
135    }
136
137    // If AI returned fewer indices than requested, fill with remaining
138    if results.results.len() < args.limit {
139        for (i, result) in original_results.into_iter().enumerate() {
140            if !indices.contains(&i) && results.results.len() < args.limit {
141                results.results.push(result);
142            }
143        }
144    }
145
146    // Output same format as direct search
147    print!("{}", format_results(&results, format));
148
149    Ok(())
150}