1use crate::ai::{claude::ClaudeClient, gemini::GeminiClient};
4use crate::auth::{self, Provider};
5use crate::cli::login::{get_ollama_client, is_ollama_configured};
6use crate::cli::{OutputFormat, SearchArgs};
7use crate::core::error::Result;
8use crate::core::project::Project;
9use crate::daemon::client;
10use crate::index::TantivyIndex;
11use crate::output::format_results;
12use crate::search::SearchQuery;
13use std::env;
14use tracing::debug;
15
16pub async fn run(args: SearchArgs) -> Result<()> {
18 let project_path = args
19 .project
20 .clone()
21 .unwrap_or_else(|| env::current_dir().expect("Failed to get current directory"));
22
23 let project = Project::detect(&project_path)?;
24 let format = if args.json {
25 OutputFormat::Json
26 } else {
27 OutputFormat::Human
28 };
29
30 if args.direct {
32 return run_direct_search(&args, &project, format).await;
33 }
34
35 run_semantic_search(&args, &project, format).await
37}
38
39async fn run_direct_search(
41 args: &SearchArgs,
42 project: &Project,
43 format: OutputFormat,
44) -> Result<()> {
45 if let Ok(true) = client::is_running() {
47 debug!("Using daemon for direct search");
48 if let Ok(results) = client::search(&args.query, &project.root, args.limit).await {
49 print!("{}", format_results(&results, format));
50 return Ok(());
51 }
52 debug!("Daemon search failed, falling back to direct");
53 }
54
55 let index = TantivyIndex::open(&project.root)?;
57 let query = SearchQuery::new(&args.query).with_limit(args.limit);
58 let results = query.execute(&index)?;
59 print!("{}", format_results(&results, format));
60
61 Ok(())
62}
63
64async fn run_semantic_search(
66 args: &SearchArgs,
67 project: &Project,
68 format: OutputFormat,
69) -> Result<()> {
70 let providers = auth::get_authenticated_providers();
72 let ollama_configured = is_ollama_configured();
73
74 if providers.is_empty() && !ollama_configured {
75 eprintln!("Not logged in. Run 'greppy login' to enable semantic search.");
76 eprintln!("Using direct BM25 search instead.\n");
77 return run_direct_search(args, project, format).await;
78 }
79
80 let fetch_limit = (args.limit * 2).min(20); let mut results = if let Ok(true) = client::is_running() {
83 debug!("Using daemon for search");
84 client::search(&args.query, &project.root, fetch_limit).await?
85 } else {
86 let index = TantivyIndex::open(&project.root)?;
87 let query = SearchQuery::new(&args.query).with_limit(fetch_limit);
88 query.execute(&index)?
89 };
90
91 if results.results.is_empty() {
93 println!("No results found for: {}", args.query);
94 return Ok(());
95 }
96
97 let chunks: Vec<String> = results
99 .results
100 .iter()
101 .map(|r| {
102 format!(
103 "// {}\n{}",
104 r.path,
105 r.content.chars().take(1500).collect::<String>()
106 )
107 })
108 .collect();
109
110 let indices = if ollama_configured {
112 if let Some(client) = get_ollama_client() {
113 debug!("Using Ollama for reranking");
114 client.rerank(&args.query, &chunks).await?
115 } else {
116 (0..chunks.len()).collect()
118 }
119 } else if providers.contains(&Provider::Anthropic) {
120 let token = auth::get_anthropic_token()?;
121 let client = ClaudeClient::new(token);
122 client.rerank(&args.query, &chunks).await?
123 } else {
124 let token = auth::get_google_token()?;
125 let client = GeminiClient::new(token);
126 client.rerank(&args.query, &chunks).await?
127 };
128
129 let original_results = std::mem::take(&mut results.results);
131 for &idx in indices.iter().take(args.limit) {
132 if idx < original_results.len() {
133 results.results.push(original_results[idx].clone());
134 }
135 }
136
137 if results.results.len() < args.limit {
139 for (i, result) in original_results.into_iter().enumerate() {
140 if !indices.contains(&i) && results.results.len() < args.limit {
141 results.results.push(result);
142 }
143 }
144 }
145
146 print!("{}", format_results(&results, format));
148
149 Ok(())
150}