greppy/cli/
search.rs

1//! Search command implementation
2
3use crate::ai::{claude::ClaudeClient, gemini::GeminiClient};
4use crate::auth::{self, Provider};
5use crate::cli::{OutputFormat, SearchArgs};
6use crate::core::error::Result;
7use crate::core::project::Project;
8use crate::daemon::client;
9use crate::index::TantivyIndex;
10use crate::output::format_results;
11use crate::search::SearchQuery;
12use std::env;
13use tracing::debug;
14
15/// Run the search command
16pub async fn run(args: SearchArgs) -> Result<()> {
17    let project_path = args
18        .project
19        .clone()
20        .unwrap_or_else(|| env::current_dir().expect("Failed to get current directory"));
21
22    let project = Project::detect(&project_path)?;
23    let format = if args.json {
24        OutputFormat::Json
25    } else {
26        OutputFormat::Human
27    };
28
29    // Direct mode: BM25 only
30    if args.direct {
31        return run_direct_search(&args, &project, format).await;
32    }
33
34    // Semantic mode: check OAuth, search, then AI
35    run_semantic_search(&args, &project, format).await
36}
37
38/// Direct BM25 search (no AI)
39async fn run_direct_search(
40    args: &SearchArgs,
41    project: &Project,
42    format: OutputFormat,
43) -> Result<()> {
44    // Try daemon first
45    if let Ok(true) = client::is_running() {
46        debug!("Using daemon for direct search");
47        if let Ok(results) = client::search(&args.query, &project.root, args.limit).await {
48            print!("{}", format_results(&results, format));
49            return Ok(());
50        }
51        debug!("Daemon search failed, falling back to direct");
52    }
53
54    // Direct mode (blocking, but fine for CLI)
55    let index = TantivyIndex::open(&project.root)?;
56    let query = SearchQuery::new(&args.query).with_limit(args.limit);
57    let results = query.execute(&index)?;
58    print!("{}", format_results(&results, format));
59
60    Ok(())
61}
62
63/// Semantic search (BM25 + AI reranking)
64async fn run_semantic_search(
65    args: &SearchArgs,
66    project: &Project,
67    format: OutputFormat,
68) -> Result<()> {
69    // Check which provider is authenticated
70    let providers = auth::get_authenticated_providers();
71
72    if providers.is_empty() {
73        eprintln!("Not logged in. Run 'greppy login' to enable semantic search.");
74        eprintln!("Using direct BM25 search instead.\n");
75        return run_direct_search(args, project, format).await;
76    }
77
78    // Get BM25 results first (fetch more than needed for reranking)
79    let fetch_limit = (args.limit * 2).min(20); // Fetch 2x for better reranking, max 20
80    let mut results = if let Ok(true) = client::is_running() {
81        debug!("Using daemon for search");
82        client::search(&args.query, &project.root, fetch_limit).await?
83    } else {
84        let index = TantivyIndex::open(&project.root)?;
85        let query = SearchQuery::new(&args.query).with_limit(fetch_limit);
86        query.execute(&index)?
87    };
88
89    // If no results, nothing to rerank
90    if results.results.is_empty() {
91        println!("No results found for: {}", args.query);
92        return Ok(());
93    }
94
95    // Build chunks for reranking
96    let chunks: Vec<String> = results
97        .results
98        .iter()
99        .map(|r| {
100            format!(
101                "// {}\n{}",
102                r.path,
103                r.content.chars().take(1500).collect::<String>()
104            )
105        })
106        .collect();
107
108    // Call AI to rerank
109    let indices = if providers.contains(&Provider::Anthropic) {
110        let token = auth::get_anthropic_token()?;
111        let client = ClaudeClient::new(token);
112        client.rerank(&args.query, &chunks).await?
113    } else {
114        let token = auth::get_google_token()?;
115        let client = GeminiClient::new(token);
116        client.rerank(&args.query, &chunks).await?
117    };
118
119    // Reorder results based on AI ranking
120    let original_results = std::mem::take(&mut results.results);
121    for &idx in indices.iter().take(args.limit) {
122        if idx < original_results.len() {
123            results.results.push(original_results[idx].clone());
124        }
125    }
126
127    // If AI returned fewer indices than requested, fill with remaining
128    if results.results.len() < args.limit {
129        for (i, result) in original_results.into_iter().enumerate() {
130            if !indices.contains(&i) && results.results.len() < args.limit {
131                results.results.push(result);
132            }
133        }
134    }
135
136    // Output same format as direct search
137    print!("{}", format_results(&results, format));
138
139    Ok(())
140}