use crate::args::Cli;
use crate::commands::graph::loader::{GraphLoadConfig, load_unified_graph_for_cli};
use crate::index_discovery::find_nearest_index;
use crate::output::{
DisplaySymbol, FormatterMetadata, JsonSymbol, OutputStreams, create_formatter,
};
use crate::progress::PlainProgressReporter;
use anyhow::{Context, Result};
use regex::RegexBuilder;
use sqry_core::graph::unified::concurrent::CodeGraph;
use sqry_core::graph::unified::node::{NodeId, NodeKind};
use sqry_core::graph::unified::storage::metadata::MacroNodeMetadata;
use sqry_core::json_response::{Filters, FuzzyFilters, Stats, StreamEvent};
use sqry_core::progress::{ProgressStage, SharedReporter};
use sqry_core::search::fuzzy::{CandidateGenerator, FuzzyConfig};
use sqry_core::search::matcher::{FuzzyMatcher, MatchAlgorithm, MatchConfig};
use sqry_core::search::trigram::TrigramIndex;
use std::collections::{BTreeMap, HashMap};
use std::io::Write as _;
use std::path::{Path, PathBuf};
use std::sync::{Arc, OnceLock};
use std::time::{Duration, Instant};
type ScoredSymbol = (DisplaySymbol, f64);
const DAEMON_SEARCH_RPC_TIMEOUT: Duration = Duration::from_millis(250);
fn verbose_from_env() -> bool {
const VARS: &[&str] = &["SQRY_LOG", "RUST_LOG"];
const RELEVANT_TARGETS: &[&str] = &["sqry_cli", "sqry_cli::progress"];
fn level_is_verbose(level: &str) -> bool {
matches!(
level.trim().to_ascii_lowercase().as_str(),
"info" | "debug" | "trace"
)
}
for var in VARS {
let Ok(val) = std::env::var(var) else {
continue;
};
for token in val.split(',') {
let token = token.trim();
if token.is_empty() {
continue;
}
match token.rsplit_once('=') {
Some((target, level)) => {
if RELEVANT_TARGETS.contains(&target.trim()) && level_is_verbose(level) {
return true;
}
}
None => {
if level_is_verbose(token) {
return true;
}
}
}
}
}
false
}
fn maybe_emit_daemon_fallback_diagnostic(verbose: bool) {
if !verbose {
return;
}
static EMITTED: OnceLock<()> = OnceLock::new();
if EMITTED.get().is_some() {
return;
}
let Some(socket_path) = sqry_daemon::config::DaemonConfig::load()
.ok()
.map(|c| c.socket_path())
else {
return;
};
if !probe_daemon_reachable_bounded(&socket_path, Duration::from_millis(250)) {
return;
}
let _ = EMITTED.set(());
let socket_str = socket_path.display().to_string();
let mut out = std::io::stderr().lock();
if std::env::var("SQRY_OUTPUT_FORMAT")
.ok()
.is_some_and(|v| v.eq_ignore_ascii_case("json"))
{
let escaped = socket_str.replace('\\', "\\\\").replace('"', "\\\"");
let ts = unix_millis_for_diagnostic();
let _ = writeln!(
out,
"{{\"event\":\"daemon_fallback\",\"socket\":\"{escaped}\",\"ts\":{ts}}}"
);
} else {
let _ = writeln!(
out,
"[sqry] note: sqryd is running at {socket_str} but search uses in-process load"
);
}
}
fn probe_daemon_reachable_bounded(socket_path: &Path, timeout: Duration) -> bool {
let path = socket_path.to_path_buf();
let (tx, rx) = std::sync::mpsc::sync_channel::<bool>(1);
let phase_timeout = {
let third = timeout / 3;
if third.is_zero() { timeout } else { third }
};
std::thread::spawn(move || {
let Ok(rt) = tokio::runtime::Builder::new_current_thread()
.enable_all()
.build()
else {
let _ = tx.send(false);
return;
};
let reachable = rt.block_on(async move {
let Ok(mut client) = sqry_daemon_client::DaemonClient::connect_with_timeouts(
&path,
phase_timeout,
phase_timeout,
)
.await
else {
return false;
};
tokio::time::timeout(phase_timeout, client.status())
.await
.is_ok_and(|status_result| status_result.is_ok())
});
let _ = tx.send(reachable);
});
rx.recv_timeout(timeout).unwrap_or(false)
}
fn unix_millis_for_diagnostic() -> u64 {
std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.map(|d| u64::try_from(d.as_millis()).unwrap_or(u64::MAX))
.unwrap_or(0)
}
fn should_attempt_daemon(cli: &Cli, macro_flags: &MacroBoundaryFlags<'_>) -> bool {
cli.exact
&& !cli.fuzzy
&& !cli.json_stream
&& macro_flags.cfg_filter.is_none()
&& !macro_flags.macro_boundaries
}
fn emit_daemon_verbose(verbose: bool, body: &str) {
if !verbose {
return;
}
let mut out = std::io::stderr().lock();
let _ = writeln!(out, "[sqry] {body}");
}
fn workspace_is_loaded_for(status_envelope: &serde_json::Value, search_path: &Path) -> bool {
let Ok(canonical) = std::fs::canonicalize(search_path) else {
return false;
};
let workspaces = status_envelope
.get("result")
.and_then(|r| r.get("workspaces"))
.and_then(|w| w.as_array())
.or_else(|| status_envelope.get("workspaces").and_then(|w| w.as_array()));
let Some(workspaces) = workspaces else {
return false;
};
workspaces.iter().any(|ws| {
let state_ok = ws
.get("state")
.and_then(|s| s.as_str())
.is_some_and(|s| s == "Loaded");
if !state_ok {
return false;
}
let Some(root_str) = ws.get("index_root").and_then(|v| v.as_str()) else {
return false;
};
let root = PathBuf::from(root_str);
canonical == root || canonical.starts_with(&root)
})
}
fn build_daemon_search_request(
cli: &Cli,
pattern: &str,
search_path: &str,
macro_flags: &MacroBoundaryFlags<'_>,
) -> sqry_daemon_protocol::SearchRequest {
sqry_daemon_protocol::SearchRequest {
envelope_version: sqry_daemon_protocol::ENVELOPE_VERSION,
pattern: pattern.to_string(),
search_path: search_path.to_string(),
mode: sqry_daemon_protocol::SearchMode::Exact,
kind: cli.kind.map(|k| k.to_string().to_lowercase()),
lang: cli.lang.clone(),
limit: cli.limit.map(|l| u32::try_from(l).unwrap_or(u32::MAX)),
include_generated: macro_flags.include_generated,
}
}
fn try_daemon_search(
cli: &Cli,
pattern: &str,
search_path: &str,
macro_flags: &MacroBoundaryFlags<'_>,
verbose: bool,
) -> Option<sqry_daemon_protocol::SearchResult> {
let socket_path = sqry_daemon::config::DaemonConfig::load()
.ok()
.map(|c| c.socket_path())?;
let rt = tokio::runtime::Builder::new_current_thread()
.enable_all()
.build()
.ok()?;
let req = build_daemon_search_request(cli, pattern, search_path, macro_flags);
let search_path_owned = PathBuf::from(search_path);
rt.block_on(async move {
emit_daemon_verbose(
verbose,
&format!("attaching to daemon at {}", socket_path.display()),
);
let probe = Duration::from_millis(250);
let mut client = match sqry_daemon_client::DaemonClient::connect_with_timeouts(
&socket_path,
probe,
probe,
)
.await
{
Ok(c) => c,
Err(_) => return None,
};
let status_val = tokio::time::timeout(DAEMON_SEARCH_RPC_TIMEOUT, client.status())
.await
.ok()?
.ok()?;
if !workspace_is_loaded_for(&status_val, &search_path_owned) {
emit_daemon_verbose(verbose, "workspace not loaded in daemon; using in-process");
return None;
}
emit_daemon_verbose(verbose, "attached, querying via daemon");
let req_value = serde_json::to_value(&req).ok()?;
let started = Instant::now();
let resp_value = tokio::time::timeout(
DAEMON_SEARCH_RPC_TIMEOUT,
client.send_request("daemon/search", req_value),
)
.await
.ok()?
.ok()?;
let elapsed = started.elapsed();
let envelope: sqry_daemon_protocol::ResponseEnvelope<sqry_daemon_protocol::SearchResult> =
serde_json::from_value(resp_value).ok()?;
emit_daemon_verbose(
verbose,
&format!("daemon search complete in {}ms", elapsed.as_millis()),
);
Some(envelope.result)
})
}
fn search_item_to_display_symbol(item: sqry_daemon_protocol::SearchItem) -> DisplaySymbol {
let file_path = PathBuf::from(&item.file_path);
let mut metadata = HashMap::new();
metadata.insert(
"__raw_file_path".to_string(),
file_path.to_string_lossy().to_string(),
);
metadata.insert("__raw_language".to_string(), item.language.clone());
DisplaySymbol {
name: item.name,
qualified_name: item.qualified_name,
kind: item.kind,
file_path,
start_line: item.start_line as usize,
start_column: item.start_column as usize,
end_line: item.end_line as usize,
end_column: item.end_column as usize,
metadata,
caller_identity: None,
callee_identity: None,
}
}
fn finalize_daemon_search(
cli: &Cli,
pattern: &str,
result: sqry_daemon_protocol::SearchResult,
started: Instant,
) -> Result<()> {
let total_matches = result.total as usize;
if cli.count {
println!("{total_matches} matches found");
return Ok(());
}
let mut symbols: Vec<DisplaySymbol> = result
.items
.into_iter()
.map(search_item_to_display_symbol)
.collect();
if let Some(sort_field) = cli.sort {
crate::commands::sort::sort_symbols(&mut symbols, sort_field);
}
let limit = cli.limit.unwrap_or(100);
let execution_time = started.elapsed();
let metadata = build_search_metadata(cli, pattern, None, None, total_matches, execution_time);
let formatter = create_formatter(cli);
let mut streams = OutputStreams::with_pager(cli.pager_config());
formatter.format(&symbols, Some(&metadata), &mut streams)?;
if !cli.json && total_matches > limit {
eprintln!("\nShowing {limit} of {total_matches} matches (use --limit to adjust)");
}
streams.finish_checked()
}
fn apply_search_filters(cli: &Cli, symbols: &mut Vec<DisplaySymbol>) {
if let Some(kind) = cli.kind {
let target_type_str = kind.to_string().to_lowercase();
symbols.retain(|s| s.kind.to_lowercase() == target_type_str);
}
if let Some(ref lang) = cli.lang {
symbols.retain(|s| {
s.file_path
.extension()
.and_then(|ext| ext.to_str())
.is_some_and(|ext| matches_language(ext, lang))
});
}
}
#[derive(Debug, Clone, Copy)]
struct MacroBoundaryFlags<'a> {
cfg_filter: Option<&'a str>,
include_generated: bool,
macro_boundaries: bool,
}
fn macro_boundary_keeps_node(
metadata: Option<&MacroNodeMetadata>,
flags: MacroBoundaryFlags<'_>,
) -> bool {
if !flags.include_generated && metadata.is_some_and(|m| m.macro_generated == Some(true)) {
return false;
}
if let Some(filter) = flags.cfg_filter {
let actual = metadata.and_then(|m| m.cfg_condition.as_deref());
if actual != Some(filter) {
return false;
}
}
true
}
fn filter_nodes_by_macro_boundary(
graph: &CodeGraph,
candidates: Vec<NodeId>,
flags: MacroBoundaryFlags<'_>,
) -> Vec<NodeId> {
if flags.include_generated && flags.cfg_filter.is_none() {
return candidates;
}
let store = graph.macro_metadata();
candidates
.into_iter()
.filter(|node_id| macro_boundary_keeps_node(store.get(*node_id), flags))
.collect()
}
fn enrich_with_macro_metadata(symbol: &mut DisplaySymbol, metadata: Option<&MacroNodeMetadata>) {
let Some(meta) = metadata else { return };
if let Some(true) = meta.macro_generated {
symbol
.metadata
.insert("macro_generated".to_string(), "true".to_string());
}
if let Some(cfg) = meta.cfg_condition.as_deref() {
symbol
.metadata
.insert("cfg_condition".to_string(), cfg.to_string());
}
if let Some(source) = meta.macro_source.as_deref() {
symbol
.metadata
.insert("macro_source".to_string(), source.to_string());
}
}
fn group_results_by_macro_source(symbols: Vec<DisplaySymbol>) -> Vec<DisplaySymbol> {
let mut grouped: BTreeMap<String, Vec<DisplaySymbol>> = BTreeMap::new();
for mut symbol in symbols {
let key = symbol
.metadata
.get("macro_source")
.cloned()
.unwrap_or_default();
symbol
.metadata
.insert("macro_boundary_group".to_string(), key.clone());
grouped.entry(key).or_default().push(symbol);
}
grouped.into_values().flatten().collect()
}
fn group_scored_results_by_macro_source(symbols: Vec<ScoredSymbol>) -> Vec<ScoredSymbol> {
let mut grouped: BTreeMap<String, Vec<ScoredSymbol>> = BTreeMap::new();
for (mut symbol, score) in symbols {
let key = symbol
.metadata
.get("macro_source")
.cloned()
.unwrap_or_default();
symbol
.metadata
.insert("macro_boundary_group".to_string(), key.clone());
grouped.entry(key).or_default().push((symbol, score));
}
grouped.into_values().flatten().collect()
}
fn build_search_metadata(
cli: &Cli,
pattern: &str,
scope_info: Option<&FuzzySearchScopeInfo>,
index_age_seconds: Option<u64>,
total_matches: usize,
execution_time: std::time::Duration,
) -> FormatterMetadata {
let (used_ancestor_index, filtered_to) = if let Some(scope) = scope_info {
let used_ancestor = if scope.used_ancestor_index || scope.filtered_to.is_some() {
Some(scope.used_ancestor_index)
} else {
None
};
(used_ancestor, scope.filtered_to.clone())
} else {
(None, None)
};
FormatterMetadata {
pattern: Some(pattern.to_string()),
total_matches,
execution_time,
filters: build_filters(cli),
index_age_seconds,
used_ancestor_index,
filtered_to,
}
}
pub fn run_search(
cli: &Cli,
pattern: &str,
search_path: &str,
cfg_filter: Option<&str>,
include_generated: bool,
macro_boundaries: bool,
verbose: bool,
) -> Result<()> {
let macro_flags = MacroBoundaryFlags {
cfg_filter,
include_generated,
macro_boundaries,
};
let verbose_effective = verbose || verbose_from_env();
let progress: SharedReporter = PlainProgressReporter::for_search(verbose_effective);
if should_attempt_daemon(cli, ¯o_flags) {
let daemon_started = Instant::now();
if let Some(result) =
try_daemon_search(cli, pattern, search_path, ¯o_flags, verbose_effective)
{
return finalize_daemon_search(cli, pattern, result, daemon_started);
}
}
maybe_emit_daemon_fallback_diagnostic(verbose_effective);
if cli.json_stream {
return run_json_stream_search(cli, pattern, search_path, macro_flags, &progress);
}
let start_time = Instant::now();
let (mut all_symbols, index_age_seconds, scope_info) = if cli.fuzzy {
let (scored_symbols, age, scope) =
run_fuzzy_search(cli, pattern, search_path, macro_flags, &progress)?;
let symbols = scored_symbols.into_iter().map(|(s, _)| s).collect();
(symbols, Some(age), Some(scope))
} else {
(
run_regular_search(cli, pattern, search_path, macro_flags, &progress)?,
None,
None,
)
};
let filter_stage = ProgressStage::start(&progress, "apply filters");
apply_search_filters(cli, &mut all_symbols);
filter_stage.finish();
if macro_flags.macro_boundaries {
all_symbols = group_results_by_macro_source(all_symbols);
}
if cli.count {
println!("{} matches found", all_symbols.len());
return Ok(());
}
let total_matches = all_symbols.len();
if let Some(sort_field) = cli.sort {
crate::commands::sort::sort_symbols(&mut all_symbols, sort_field);
}
let limit = cli.limit.unwrap_or(if cli.fuzzy { 50 } else { 100 });
let symbols_to_output = if all_symbols.len() > limit {
all_symbols.truncate(limit);
all_symbols
} else {
all_symbols
};
let execution_time = start_time.elapsed();
let metadata = build_search_metadata(
cli,
pattern,
scope_info.as_ref(),
index_age_seconds,
total_matches,
execution_time,
);
let formatter = create_formatter(cli);
let mut streams = OutputStreams::with_pager(cli.pager_config());
formatter.format(&symbols_to_output, Some(&metadata), &mut streams)?;
if !cli.json && total_matches > limit {
eprintln!("\nShowing {limit} of {total_matches} matches (use --limit to adjust)");
}
streams.finish_checked()
}
fn build_filters(cli: &Cli) -> Filters {
Filters {
kind: cli.kind.map(|k| k.to_string()),
lang: cli.lang.clone(),
ignore_case: cli.ignore_case,
exact: cli.exact,
fuzzy: if cli.fuzzy {
Some(FuzzyFilters {
algorithm: cli.fuzzy_algorithm.clone(),
threshold: cli.fuzzy_threshold,
max_candidates: Some(cli.fuzzy_max_candidates),
})
} else {
None
},
}
}
fn language_from_path(path: &Path) -> &'static str {
path.extension()
.and_then(|ext| ext.to_str())
.map_or("unknown", |ext| match ext.to_lowercase().as_str() {
"rs" => "rust",
"js" | "mjs" | "cjs" => "javascript",
"ts" | "mts" | "cts" => "typescript",
"jsx" => "javascriptreact",
"tsx" => "typescriptreact",
"py" | "pyw" => "python",
"rb" => "ruby",
"go" => "go",
"java" => "java",
"kt" | "kts" => "kotlin",
"scala" | "sc" => "scala",
"c" | "h" => "c",
"cpp" | "cc" | "cxx" | "hpp" | "hxx" => "cpp",
"cs" => "csharp",
"php" => "php",
"swift" => "swift",
"sql" => "sql",
"dart" => "dart",
"lua" => "lua",
"sh" | "bash" | "zsh" => "shell",
"pl" | "pm" => "perl",
"groovy" | "gvy" => "groovy",
"ex" | "exs" => "elixir",
"r" | "R" => "r",
"hs" | "lhs" => "haskell",
"svelte" => "svelte",
"vue" => "vue",
"zig" => "zig",
"css" | "scss" | "sass" | "less" => "css",
"html" | "htm" => "html",
"tf" | "tfvars" => "terraform",
"pp" => "puppet",
"pls" | "plb" | "pck" => "plsql",
"cls" | "trigger" => "apex",
"abap" => "abap",
_ => "unknown",
})
}
fn matches_language(ext: &str, lang: &str) -> bool {
let ext_lower = ext.to_lowercase();
let lang_lower = lang.to_lowercase();
match lang_lower.as_str() {
"rust" | "rs" => ext_lower == "rs",
"javascript" | "js" => matches!(ext_lower.as_str(), "js" | "jsx" | "mjs" | "cjs"),
"typescript" | "ts" => matches!(ext_lower.as_str(), "ts" | "tsx"),
"python" | "py" => matches!(ext_lower.as_str(), "py" | "pyi" | "pyw"),
"go" => ext_lower == "go",
"java" => ext_lower == "java",
"swift" => ext_lower == "swift",
"c" => matches!(ext_lower.as_str(), "c" | "h"),
"cpp" | "c++" | "cxx" => {
matches!(
ext_lower.as_str(),
"cpp" | "cc" | "cxx" | "hpp" | "hh" | "hxx" | "h"
)
}
"csharp" | "c#" | "cs" => matches!(ext_lower.as_str(), "cs" | "csx"),
"dart" => ext_lower == "dart",
"kotlin" | "kt" => matches!(ext_lower.as_str(), "kt" | "kts"),
"ruby" | "rb" => matches!(ext_lower.as_str(), "rb" | "rake" | "gemspec"),
"scala" => matches!(ext_lower.as_str(), "scala" | "sc"),
"php" => ext_lower == "php",
"lua" => ext_lower == "lua",
"elixir" | "ex" => matches!(ext_lower.as_str(), "ex" | "exs"),
"haskell" | "hs" => matches!(ext_lower.as_str(), "hs" | "lhs"),
"perl" | "pl" => matches!(ext_lower.as_str(), "pl" | "pm"),
"r" => ext_lower == "r",
"shell" | "sh" | "bash" => matches!(ext_lower.as_str(), "sh" | "bash" | "zsh"),
"zig" => ext_lower == "zig",
"groovy" => matches!(ext_lower.as_str(), "groovy" | "gvy" | "gy" | "gsh"),
"vue" => ext_lower == "vue",
"svelte" => ext_lower == "svelte",
"html" => matches!(ext_lower.as_str(), "html" | "htm"),
"css" => matches!(ext_lower.as_str(), "css" | "scss" | "sass" | "less"),
"terraform" | "tf" | "hcl" => {
matches!(ext_lower.as_str(), "tf" | "tfvars" | "hcl")
}
"puppet" | "pp" => ext_lower == "pp",
"sql" => ext_lower == "sql",
"servicenow" | "servicenow-xanadu" | "servicenow-xanadu-js" | "snjs" => ext_lower == "snjs",
"apex" | "salesforce" => matches!(ext_lower.as_str(), "cls" | "trigger"),
"abap" => ext_lower == "abap",
"plsql" | "oracle-plsql" => matches!(ext_lower.as_str(), "pks" | "pkb" | "pls"),
_ => ext_lower == lang_lower,
}
}
fn run_regular_search(
cli: &Cli,
pattern: &str,
search_path: &str,
macro_flags: MacroBoundaryFlags<'_>,
progress: &SharedReporter,
) -> Result<Vec<DisplaySymbol>> {
let search_path_path = Path::new(search_path);
let index_location = find_nearest_index(search_path_path);
let index_root = index_location
.as_ref()
.map_or(search_path_path, |loc| loc.index_root.as_path());
let config = GraphLoadConfig::default();
let graph = load_unified_graph_for_cli(index_root, &config, cli, Arc::clone(progress))
.context("Failed to load graph. Run 'sqry index' to build the graph.")?;
let pattern_regex = build_pattern_regex(cli, pattern)?;
let mut matches = Vec::new();
let strings = graph.strings();
let indices = graph.indices();
if let Some(regex) = pattern_regex {
let stage = ProgressStage::start(progress, "regex scan");
for (str_id, s) in strings.iter() {
if regex.is_match(s) {
matches.extend_from_slice(indices.by_qualified_name(str_id));
matches.extend_from_slice(indices.by_name(str_id));
}
}
stage.finish();
} else {
debug_assert!(
cli.exact,
"non-exact path is owned by the regex branch above"
);
let stage = ProgressStage::start(progress, "exact name lookup");
let node_ids = graph.snapshot().find_by_exact_name(pattern);
matches.extend(node_ids);
stage.finish();
}
matches.sort_unstable();
matches.dedup();
let matches = filter_nodes_by_macro_boundary(&graph, matches, macro_flags);
let mut all_symbols = Vec::with_capacity(matches.len());
for node_id in matches {
if let Some(symbol) = convert_node_to_display_symbol(&graph, node_id) {
all_symbols.push(symbol);
}
}
Ok(all_symbols)
}
fn build_pattern_regex(cli: &Cli, pattern: &str) -> Result<Option<regex::Regex>> {
if cli.exact {
return Ok(None);
}
sqry_core::query::cost_gate::check_regex_pattern_text(
pattern,
usize::MAX,
&sqry_core::query::cost_gate::CostGateConfig::default(),
)
.map_err(anyhow::Error::from)?;
let regex = RegexBuilder::new(pattern)
.case_insensitive(cli.ignore_case)
.build()
.context("Invalid regex pattern")?;
Ok(Some(regex))
}
fn convert_node_to_display_symbol(
graph: &CodeGraph,
node_id: sqry_core::graph::unified::node::NodeId,
) -> Option<DisplaySymbol> {
let entry = graph.nodes().get(node_id)?;
let strings = graph.strings();
let files = graph.files();
let name = strings
.resolve(entry.name)
.map(|s| s.to_string())
.unwrap_or_default();
let file_path = files
.resolve(entry.file)
.map(|s| PathBuf::from(s.as_ref()))
.unwrap_or_default();
let language = language_from_path(&file_path).to_string();
let mut metadata = HashMap::new();
metadata.insert(
"__raw_file_path".to_string(),
file_path.to_string_lossy().to_string(),
);
metadata.insert("__raw_language".to_string(), language.clone());
let qualified_name = entry
.qualified_name
.and_then(|id| strings.resolve(id))
.map_or_else(|| name.clone(), |s| s.to_string());
let mut symbol = DisplaySymbol {
name,
qualified_name,
kind: node_kind_to_string(entry.kind).to_string(),
file_path,
start_line: entry.start_line as usize,
start_column: entry.start_column as usize,
end_line: entry.end_line as usize,
end_column: entry.end_column as usize,
metadata,
caller_identity: None,
callee_identity: None,
};
enrich_with_macro_metadata(&mut symbol, graph.macro_metadata().get(node_id));
Some(symbol)
}
fn node_kind_to_string(kind: NodeKind) -> &'static str {
match kind {
NodeKind::Function => "function",
NodeKind::Method => "method",
NodeKind::Class => "class",
NodeKind::Interface => "interface",
NodeKind::Trait => "trait",
NodeKind::Module => "module",
NodeKind::Variable => "variable",
NodeKind::Constant => "constant",
NodeKind::Type => "type",
NodeKind::Struct => "struct",
NodeKind::Enum => "enum",
NodeKind::EnumVariant => "enum_variant",
NodeKind::Macro => "macro",
NodeKind::Parameter => "parameter",
NodeKind::Property => "property",
NodeKind::Import => "import",
NodeKind::Export => "export",
NodeKind::Component => "component",
NodeKind::Service => "service",
NodeKind::Resource => "resource",
NodeKind::Endpoint => "endpoint",
NodeKind::Test => "test",
NodeKind::CallSite => "call_site",
NodeKind::StyleRule => "style_rule",
NodeKind::StyleAtRule => "style_at_rule",
NodeKind::StyleVariable => "style_variable",
NodeKind::Lifetime => "lifetime",
NodeKind::TypeParameter => "type_parameter",
NodeKind::Annotation => "annotation",
NodeKind::AnnotationValue => "annotation_value",
NodeKind::LambdaTarget => "lambda_target",
NodeKind::JavaModule => "java_module",
NodeKind::EnumConstant => "enum_constant",
NodeKind::Other => "other",
}
}
struct FuzzySearchScopeInfo {
used_ancestor_index: bool,
filtered_to: Option<String>,
}
struct FuzzyIndexResolution {
index_root: PathBuf,
scope_filter: Option<PathBuf>,
is_file_query: bool,
scope_info: FuzzySearchScopeInfo,
}
fn resolve_fuzzy_index(search_path: &Path) -> FuzzyIndexResolution {
let index_location = find_nearest_index(search_path);
if let Some(ref loc) = index_location {
let scope = if loc.requires_scope_filter {
loc.relative_scope()
} else {
None
};
let info = FuzzySearchScopeInfo {
used_ancestor_index: loc.is_ancestor,
filtered_to: scope.as_ref().map(|p| {
if loc.is_file_query {
p.to_string_lossy().into_owned()
} else {
format!("{}/**", p.display())
}
}),
};
FuzzyIndexResolution {
index_root: loc.index_root.clone(),
scope_filter: scope,
is_file_query: loc.is_file_query,
scope_info: info,
}
} else {
FuzzyIndexResolution {
index_root: search_path.to_path_buf(),
scope_filter: None,
is_file_query: false,
scope_info: FuzzySearchScopeInfo {
used_ancestor_index: false,
filtered_to: None,
},
}
}
}
fn build_trigram_index_from_graph(graph: &CodeGraph) -> Arc<TrigramIndex> {
let mut trigram_index = TrigramIndex::new();
for (str_id, s) in graph.strings().iter() {
trigram_index.add_symbol(str_id.index() as usize, s);
}
Arc::new(trigram_index)
}
fn run_fuzzy_search(
cli: &Cli,
pattern: &str,
search_path: &str,
macro_flags: MacroBoundaryFlags<'_>,
progress: &SharedReporter,
) -> Result<(Vec<ScoredSymbol>, u64, FuzzySearchScopeInfo)> {
let search_path_path = Path::new(search_path);
let resolution = resolve_fuzzy_index(search_path_path);
let FuzzyIndexResolution {
index_root,
scope_filter,
is_file_query,
scope_info,
} = resolution;
let config = GraphLoadConfig::default();
let graph = load_unified_graph_for_cli(&index_root, &config, cli, Arc::clone(progress))
.context("Failed to load graph. Run 'sqry index' to build the graph.")?;
let age_seconds = 0;
let trigram_index_arc = build_trigram_index_from_graph(&graph);
let algorithm = parse_fuzzy_algorithm(&cli.fuzzy_algorithm)?;
let fuzzy_config = build_fuzzy_config(cli, 0.1);
let match_config = build_match_config(cli, algorithm);
let generator = CandidateGenerator::with_config(trigram_index_arc, fuzzy_config);
maybe_log_fuzzy_config(cli, algorithm);
let fuzzy_stage = ProgressStage::start(progress, "fuzzy match");
let candidate_ids = generator.generate(pattern);
if candidate_ids.is_empty() {
fuzzy_stage.finish();
return Ok((Vec::new(), age_seconds, scope_info));
}
let matcher = FuzzyMatcher::with_config(match_config.clone());
let resolved_candidates: Vec<(usize, Arc<str>)> = candidate_ids
.iter()
.filter_map(|&id| {
let str_id = u32::try_from(id).ok()?;
let str_id = sqry_core::graph::unified::string::StringId::new(str_id);
graph.strings().resolve(str_id).map(|s| (id, s))
})
.collect();
let candidate_targets = resolved_candidates.iter().map(|(id, s)| (*id, s.as_ref()));
let match_results = matcher.match_many(pattern, candidate_targets);
let mut symbols = Vec::new();
let indices = graph.indices();
for result in match_results {
let Ok(str_id) = u32::try_from(result.entry_id) else {
continue;
};
let str_id = sqry_core::graph::unified::string::StringId::new(str_id);
let mut node_ids = Vec::new();
node_ids.extend_from_slice(indices.by_qualified_name(str_id));
node_ids.extend_from_slice(indices.by_name(str_id));
node_ids.sort_unstable();
node_ids.dedup();
let node_ids = filter_nodes_by_macro_boundary(&graph, node_ids, macro_flags);
for node_id in node_ids {
if let Some(symbol) = convert_node_to_display_symbol(&graph, node_id) {
symbols.push((symbol, result.score));
}
}
}
symbols.sort_by(|a, b| b.1.partial_cmp(&a.1).unwrap_or(std::cmp::Ordering::Equal));
maybe_log_fuzzy_results(symbols.len());
let mut final_symbols = symbols;
if let Some(ref scope) = scope_filter {
filter_fuzzy_results_by_scope(&mut final_symbols, scope, is_file_query);
}
fuzzy_stage.finish();
Ok((final_symbols, age_seconds, scope_info))
}
fn filter_fuzzy_results_by_scope(
symbols: &mut Vec<ScoredSymbol>,
scope: &Path,
is_file_query: bool,
) {
symbols.retain(|(symbol, _)| {
if is_file_query {
symbol.file_path == scope
} else {
symbol.file_path.starts_with(scope)
}
});
}
fn run_json_stream_search(
cli: &Cli,
pattern: &str,
search_path: &str,
macro_flags: MacroBoundaryFlags<'_>,
progress: &SharedReporter,
) -> Result<()> {
let (mut symbols, age_seconds, scope_info) =
run_fuzzy_search(cli, pattern, search_path, macro_flags, progress)?;
apply_scored_search_filters(cli, &mut symbols);
if macro_flags.macro_boundaries {
symbols = group_scored_results_by_macro_source(symbols);
}
let limit = cli.limit.unwrap_or(50);
let mut count = 0;
for (symbol, score) in symbols.iter().take(limit) {
let json_symbol = JsonSymbol::from(symbol);
let event = StreamEvent::PartialResult {
result: json_symbol,
score: *score,
};
let json = serde_json::to_string(&event)?;
println!("{json}");
count += 1;
}
emit_stream_summary(symbols.len(), count, age_seconds, Some(&scope_info))?;
Ok(())
}
fn apply_scored_search_filters(cli: &Cli, symbols: &mut Vec<ScoredSymbol>) {
if let Some(kind) = cli.kind {
let target_type_str = kind.to_string().to_lowercase();
symbols.retain(|(s, _)| s.kind.to_lowercase() == target_type_str);
}
if let Some(ref lang) = cli.lang {
symbols.retain(|(s, _)| {
s.file_path
.extension()
.and_then(|ext| ext.to_str())
.is_some_and(|ext| matches_language(ext, lang))
});
}
}
fn parse_fuzzy_algorithm(algorithm: &str) -> Result<MatchAlgorithm> {
match algorithm.to_lowercase().as_str() {
"levenshtein" => Ok(MatchAlgorithm::Levenshtein),
"jaro-winkler" | "jaro_winkler" => Ok(MatchAlgorithm::JaroWinkler),
_ => anyhow::bail!(
"Unknown fuzzy algorithm '{algorithm}'. Use 'levenshtein' or 'jaro-winkler'."
),
}
}
fn build_fuzzy_config(cli: &Cli, min_similarity: f64) -> FuzzyConfig {
FuzzyConfig {
max_candidates: cli.fuzzy_max_candidates,
min_similarity,
}
}
fn build_match_config(cli: &Cli, algorithm: MatchAlgorithm) -> MatchConfig {
MatchConfig {
algorithm,
min_score: cli.fuzzy_threshold,
case_sensitive: !cli.ignore_case,
}
}
fn maybe_log_fuzzy_config(cli: &Cli, algorithm: MatchAlgorithm) {
if std::env::var("RUST_LOG").is_ok() {
eprintln!("[DEBUG] Using fuzzy algorithm: {algorithm:?}");
eprintln!("[DEBUG] Min score threshold: {}", cli.fuzzy_threshold);
}
}
fn maybe_log_fuzzy_results(count: usize) {
if std::env::var("RUST_LOG").is_ok() {
eprintln!("[DEBUG] Found {count} fuzzy matches");
}
}
fn emit_stream_summary(
final_count: usize,
total_streamed: usize,
age_seconds: u64,
scope_info: Option<&FuzzySearchScopeInfo>,
) -> Result<()> {
let mut stats = Stats::new(final_count, total_streamed).with_index_age(age_seconds);
if let Some(scope) = scope_info
&& (scope.used_ancestor_index || scope.filtered_to.is_some())
{
stats = stats.with_scope_info(scope.used_ancestor_index, scope.filtered_to.clone());
}
let summary = StreamEvent::<JsonSymbol>::FinalSummary { stats };
let json = serde_json::to_string(&summary)?;
println!("{json}");
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
fn env_lock() -> std::sync::MutexGuard<'static, ()> {
static LOCK: std::sync::Mutex<()> = std::sync::Mutex::new(());
LOCK.lock().unwrap_or_else(|p| p.into_inner())
}
fn with_env_pair<F: FnOnce() -> bool>(
sqry_log: Option<&str>,
rust_log: Option<&str>,
f: F,
) -> bool {
let _g = env_lock();
let prev_sqry = std::env::var("SQRY_LOG").ok();
let prev_rust = std::env::var("RUST_LOG").ok();
unsafe {
match sqry_log {
Some(v) => std::env::set_var("SQRY_LOG", v),
None => std::env::remove_var("SQRY_LOG"),
}
match rust_log {
Some(v) => std::env::set_var("RUST_LOG", v),
None => std::env::remove_var("RUST_LOG"),
}
}
let result = f();
unsafe {
match prev_sqry {
Some(v) => std::env::set_var("SQRY_LOG", v),
None => std::env::remove_var("SQRY_LOG"),
}
match prev_rust {
Some(v) => std::env::set_var("RUST_LOG", v),
None => std::env::remove_var("RUST_LOG"),
}
}
result
}
#[test]
fn verbose_from_env_disabled_when_unset() {
assert!(!with_env_pair(None, None, verbose_from_env));
}
#[test]
fn verbose_from_env_enabled_for_bare_info_level() {
assert!(with_env_pair(Some("info"), None, verbose_from_env));
assert!(with_env_pair(None, Some("info"), verbose_from_env));
assert!(with_env_pair(Some("debug"), None, verbose_from_env));
assert!(with_env_pair(None, Some("trace"), verbose_from_env));
}
#[test]
fn verbose_from_env_disabled_for_bare_warn_or_error() {
assert!(!with_env_pair(Some("warn"), None, verbose_from_env));
assert!(!with_env_pair(None, Some("error"), verbose_from_env));
}
#[test]
fn verbose_from_env_enabled_for_sqry_cli_targets() {
assert!(with_env_pair(None, Some("sqry_cli=info"), verbose_from_env,));
assert!(with_env_pair(
None,
Some("sqry_cli::progress=info"),
verbose_from_env,
));
assert!(with_env_pair(
None,
Some("sqry_cli::progress=debug"),
verbose_from_env,
));
}
#[test]
fn verbose_from_env_disabled_for_unrelated_target() {
assert!(!with_env_pair(
None,
Some("somecrate=info"),
verbose_from_env,
));
assert!(!with_env_pair(
None,
Some("other::module=trace"),
verbose_from_env,
));
}
#[test]
fn verbose_from_env_handles_mixed_directives() {
assert!(with_env_pair(
None,
Some("warn,sqry_cli::progress=info"),
verbose_from_env,
));
assert!(!with_env_pair(
None,
Some("info,somecrate=debug"),
|| !verbose_from_env(),
));
}
#[test]
fn verbose_from_env_handles_bare_info_mixed_with_targets() {
assert!(with_env_pair(
None,
Some("info,somecrate=debug"),
verbose_from_env,
));
}
#[test]
fn verbose_from_env_sqry_log_takes_precedence_path() {
assert!(with_env_pair(Some("warn"), Some("info"), verbose_from_env,));
}
#[test]
fn test_matches_language_rust() {
assert!(matches_language("rs", "rust"));
assert!(matches_language("rs", "Rust"));
assert!(matches_language("rs", "rs"));
assert!(!matches_language("js", "rust"));
}
#[test]
fn test_matches_language_javascript() {
assert!(matches_language("js", "javascript"));
assert!(matches_language("jsx", "javascript"));
assert!(matches_language("js", "js"));
assert!(!matches_language("ts", "javascript"));
}
#[test]
fn test_matches_language_typescript() {
assert!(matches_language("ts", "typescript"));
assert!(matches_language("tsx", "typescript"));
assert!(matches_language("ts", "ts"));
assert!(!matches_language("js", "typescript"));
}
#[test]
fn test_matches_language_swift() {
assert!(matches_language("swift", "swift"));
assert!(matches_language("swift", "Swift"));
assert!(!matches_language("c", "swift"));
}
#[test]
fn test_matches_language_c() {
assert!(matches_language("c", "c"));
assert!(matches_language("h", "c"));
assert!(matches_language("C", "c"));
assert!(!matches_language("cpp", "c"));
}
#[test]
fn test_matches_language_cpp() {
assert!(matches_language("cpp", "cpp"));
assert!(matches_language("cc", "cpp"));
assert!(matches_language("cxx", "cpp"));
assert!(matches_language("hpp", "cpp"));
assert!(matches_language("hh", "cpp"));
assert!(matches_language("hxx", "cpp"));
assert!(matches_language("h", "cpp")); assert!(matches_language("cpp", "c++")); assert!(!matches_language("c", "cpp"));
}
#[test]
fn test_matches_language_csharp() {
assert!(matches_language("cs", "csharp"));
assert!(matches_language("cs", "c#"));
assert!(matches_language("csx", "csharp"));
assert!(matches_language("cs", "CSharp"));
assert!(!matches_language("cpp", "csharp"));
}
#[test]
fn test_matches_language_dart() {
assert!(matches_language("dart", "dart"));
assert!(matches_language("dart", "Dart"));
assert!(!matches_language("d", "dart"));
}
#[test]
fn test_matches_language_sql() {
assert!(matches_language("sql", "sql"));
assert!(matches_language("sql", "SQL"));
assert!(!matches_language("rs", "sql"));
}
#[test]
fn test_matches_language_servicenow() {
assert!(matches_language("snjs", "servicenow"));
assert!(matches_language("snjs", "ServiceNow-Xanadu"));
assert!(matches_language("snjs", "servicenow-xanadu-js"));
assert!(!matches_language("js", "servicenow"));
}
use sqry_core::graph::unified::NodeEntry;
use sqry_core::graph::unified::concurrent::CodeGraph;
use sqry_core::graph::unified::node::NodeKind;
use sqry_core::graph::unified::storage::metadata::MacroNodeMetadata;
fn add_test_node(graph: &mut CodeGraph, name: &str) -> NodeId {
let name_id = graph.strings_mut().intern(name).expect("intern name");
let file_id = graph
.files_mut()
.register_with_language(Path::new("/synth/test.rs"), None)
.expect("register file");
let entry = NodeEntry::new(NodeKind::Function, name_id, file_id);
let node_id = graph.nodes_mut().alloc(entry).expect("alloc node");
graph
.indices_mut()
.add(node_id, NodeKind::Function, name_id, None, file_id);
node_id
}
fn macro_metadata(
generated: bool,
cfg: Option<&str>,
source: Option<&str>,
) -> MacroNodeMetadata {
MacroNodeMetadata {
macro_generated: Some(generated),
macro_source: source.map(str::to_string),
cfg_condition: cfg.map(str::to_string),
cfg_active: None,
proc_macro_kind: None,
expansion_cached: None,
unresolved_attributes: Vec::new(),
}
}
#[test]
fn run_search_drops_macro_generated_when_include_generated_false() {
let mut graph = CodeGraph::new();
let user = add_test_node(&mut graph, "user_defined");
let derived = add_test_node(&mut graph, "derived_by_macro");
graph
.macro_metadata_mut()
.insert(derived, macro_metadata(true, None, Some("derive_Debug")));
let flags = MacroBoundaryFlags {
cfg_filter: None,
include_generated: false,
macro_boundaries: false,
};
let kept = filter_nodes_by_macro_boundary(&graph, vec![user, derived], flags);
assert_eq!(kept, vec![user], "macro_generated node must be dropped");
}
#[test]
fn run_search_keeps_macro_generated_when_include_generated_true() {
let mut graph = CodeGraph::new();
let user = add_test_node(&mut graph, "user_defined");
let derived = add_test_node(&mut graph, "derived_by_macro");
graph
.macro_metadata_mut()
.insert(derived, macro_metadata(true, None, Some("derive_Debug")));
let flags = MacroBoundaryFlags {
cfg_filter: None,
include_generated: true,
macro_boundaries: false,
};
let kept = filter_nodes_by_macro_boundary(&graph, vec![user, derived], flags);
assert_eq!(kept, vec![user, derived]);
let symbol = convert_node_to_display_symbol(&graph, derived).expect("convert derived node");
assert_eq!(
symbol.metadata.get("macro_generated").map(String::as_str),
Some("true")
);
assert_eq!(
symbol.metadata.get("macro_source").map(String::as_str),
Some("derive_Debug")
);
}
#[test]
fn run_search_filters_by_cfg_condition() {
let mut graph = CodeGraph::new();
let always = add_test_node(&mut graph, "always_present");
let alpha = add_test_node(&mut graph, "alpha_only");
let beta = add_test_node(&mut graph, "beta_only");
graph.macro_metadata_mut().insert(
alpha,
macro_metadata(false, Some("feature = \"alpha\""), None),
);
graph.macro_metadata_mut().insert(
beta,
macro_metadata(false, Some("feature = \"beta\""), None),
);
let flags = MacroBoundaryFlags {
cfg_filter: Some("feature = \"alpha\""),
include_generated: true,
macro_boundaries: false,
};
let kept = filter_nodes_by_macro_boundary(&graph, vec![always, alpha, beta], flags);
assert_eq!(
kept,
vec![alpha],
"only nodes whose cfg_condition matches the filter survive"
);
}
#[test]
fn run_search_groups_results_by_macro_source_when_macro_boundaries() {
let mut graph = CodeGraph::new();
let plain = add_test_node(&mut graph, "plain_fn");
let from_serde = add_test_node(&mut graph, "from_serde");
let from_log = add_test_node(&mut graph, "from_log");
let from_serde_2 = add_test_node(&mut graph, "from_serde_2");
graph.macro_metadata_mut().insert(
from_serde,
macro_metadata(true, None, Some("serde::Serialize")),
);
graph
.macro_metadata_mut()
.insert(from_log, macro_metadata(true, None, Some("log::info")));
graph.macro_metadata_mut().insert(
from_serde_2,
macro_metadata(true, None, Some("serde::Serialize")),
);
let symbols: Vec<DisplaySymbol> = [plain, from_serde, from_log, from_serde_2]
.into_iter()
.map(|nid| convert_node_to_display_symbol(&graph, nid).expect("convert node"))
.collect();
let grouped = group_results_by_macro_source(symbols);
for sym in &grouped {
assert!(
sym.metadata.contains_key("macro_boundary_group"),
"missing macro_boundary_group on {}",
sym.name
);
}
let keys: Vec<&str> = grouped
.iter()
.map(|s| s.metadata["macro_boundary_group"].as_str())
.collect();
let mut seen_starts = std::collections::HashMap::<&str, (usize, usize)>::new();
for (i, k) in keys.iter().enumerate() {
seen_starts
.entry(k)
.and_modify(|(_, last)| *last = i)
.or_insert((i, i));
}
for (k, (first, last)) in &seen_starts {
for i in *first..=*last {
assert_eq!(keys[i], *k, "group `{k}` is not contiguous in {keys:?}");
}
}
let serde_count = grouped
.iter()
.filter(|s| {
s.metadata.get("macro_boundary_group").map(String::as_str)
== Some("serde::Serialize")
})
.count();
assert_eq!(serde_count, 2, "serde group should contain 2 symbols");
}
use crate::args::Cli;
use crate::large_stack_test;
use clap::Parser;
use sqry_daemon_protocol::{SearchItem, SearchMode, SearchResult};
fn default_cli() -> Cli {
Cli::parse_from(["sqry"])
}
large_stack_test! {
#[test]
fn should_attempt_daemon_requires_exact_mode() {
let macro_flags = MacroBoundaryFlags {
cfg_filter: None,
include_generated: false,
macro_boundaries: false,
};
let cli = default_cli();
assert!(!should_attempt_daemon(&cli, ¯o_flags));
let mut cli = default_cli();
cli.exact = true;
assert!(should_attempt_daemon(&cli, ¯o_flags));
}
}
large_stack_test! {
#[test]
fn should_attempt_daemon_skips_fuzzy_and_json_stream() {
let macro_flags = MacroBoundaryFlags {
cfg_filter: None,
include_generated: false,
macro_boundaries: false,
};
let mut cli = default_cli();
cli.fuzzy = true;
assert!(!should_attempt_daemon(&cli, ¯o_flags));
let mut cli = default_cli();
cli.exact = true;
cli.json_stream = true;
assert!(!should_attempt_daemon(&cli, ¯o_flags));
}
}
large_stack_test! {
#[test]
fn should_attempt_daemon_skips_macro_boundary_flags() {
let mut cli = default_cli();
cli.exact = true;
let flags = MacroBoundaryFlags {
cfg_filter: None,
include_generated: false,
macro_boundaries: true,
};
assert!(!should_attempt_daemon(&cli, &flags));
let flags = MacroBoundaryFlags {
cfg_filter: Some("feature = \"alpha\""),
include_generated: false,
macro_boundaries: false,
};
assert!(!should_attempt_daemon(&cli, &flags));
}
}
#[test]
fn workspace_is_loaded_for_matches_exact_root() {
let tmp = tempfile::tempdir().expect("tmpdir");
let path = tmp.path().canonicalize().expect("canon");
let status = serde_json::json!({
"result": {
"workspaces": [
{ "index_root": path.to_string_lossy(), "state": "Loaded" }
]
},
"meta": {}
});
assert!(workspace_is_loaded_for(&status, &path));
}
#[test]
fn workspace_is_loaded_for_accepts_raw_status_shape() {
let tmp = tempfile::tempdir().expect("tmpdir");
let path = tmp.path().canonicalize().expect("canon");
let status = serde_json::json!({
"workspaces": [
{ "index_root": path.to_string_lossy(), "state": "Loaded" }
]
});
assert!(workspace_is_loaded_for(&status, &path));
}
#[test]
fn workspace_is_loaded_for_matches_ancestor_index_root() {
let tmp = tempfile::tempdir().expect("tmpdir");
let root = tmp.path().canonicalize().expect("canon");
let inner = root.join("src");
std::fs::create_dir(&inner).expect("mkdir src");
let inner_canonical = inner.canonicalize().expect("canon inner");
let status = serde_json::json!({
"result": {
"workspaces": [
{ "index_root": root.to_string_lossy(), "state": "Loaded" }
]
},
"meta": {}
});
assert!(workspace_is_loaded_for(&status, &inner_canonical));
}
#[test]
fn workspace_is_loaded_for_rejects_non_loaded_state() {
let tmp = tempfile::tempdir().expect("tmpdir");
let path = tmp.path().canonicalize().expect("canon");
for state in ["Loading", "Rebuilding", "Evicted", "Failed", "Unloaded"] {
let status = serde_json::json!({
"result": {
"workspaces": [
{ "index_root": path.to_string_lossy(), "state": state }
]
},
"meta": {}
});
assert!(
!workspace_is_loaded_for(&status, &path),
"state {state} must NOT be considered loaded"
);
}
}
#[test]
fn workspace_is_loaded_for_rejects_unknown_workspace() {
let tmp = tempfile::tempdir().expect("tmpdir");
let path = tmp.path().canonicalize().expect("canon");
let other = tempfile::tempdir().expect("tmpdir other");
let other_path = other.path().canonicalize().expect("canon other");
let status = serde_json::json!({
"result": {
"workspaces": [
{ "index_root": other_path.to_string_lossy(), "state": "Loaded" }
]
},
"meta": {}
});
assert!(!workspace_is_loaded_for(&status, &path));
}
#[test]
fn workspace_is_loaded_for_handles_malformed_status() {
let tmp = tempfile::tempdir().expect("tmpdir");
let path = tmp.path().canonicalize().expect("canon");
let status = serde_json::json!({ "result": {}, "meta": {} });
assert!(!workspace_is_loaded_for(&status, &path));
let status = serde_json::json!({ "result": { "workspaces": "nope" }, "meta": {} });
assert!(!workspace_is_loaded_for(&status, &path));
let status = serde_json::json!({
"result": {
"workspaces": [ { "index_root": path.to_string_lossy() } ]
},
"meta": {}
});
assert!(!workspace_is_loaded_for(&status, &path));
}
large_stack_test! {
#[test]
fn build_daemon_search_request_threads_cli_filters() {
let mut cli = default_cli();
cli.exact = true;
cli.lang = Some("rust".to_string());
cli.limit = Some(25);
let flags = MacroBoundaryFlags {
cfg_filter: None,
include_generated: false,
macro_boundaries: false,
};
let req = build_daemon_search_request(&cli, "needle", "/tmp/ws", &flags);
assert_eq!(req.pattern, "needle");
assert_eq!(req.search_path, "/tmp/ws");
assert_eq!(req.mode, SearchMode::Exact);
assert_eq!(req.lang.as_deref(), Some("rust"));
assert_eq!(req.limit, Some(25));
assert_eq!(req.envelope_version, sqry_daemon_protocol::ENVELOPE_VERSION);
assert!(
!req.include_generated,
"default --exact has include_generated=false; wire must thread it"
);
}
}
large_stack_test! {
#[test]
fn build_daemon_search_request_saturates_oversized_limit() {
let mut cli = default_cli();
cli.exact = true;
cli.limit = Some(usize::MAX);
let flags = MacroBoundaryFlags {
cfg_filter: None,
include_generated: false,
macro_boundaries: false,
};
let req = build_daemon_search_request(&cli, "x", "/tmp/ws", &flags);
assert_eq!(req.limit, Some(u32::MAX));
}
}
large_stack_test! {
#[test]
fn build_daemon_search_request_threads_include_generated_true() {
let mut cli = default_cli();
cli.exact = true;
let flags = MacroBoundaryFlags {
cfg_filter: None,
include_generated: true,
macro_boundaries: false,
};
let req = build_daemon_search_request(&cli, "x", "/tmp/ws", &flags);
assert!(req.include_generated);
}
}
#[test]
fn search_item_to_display_symbol_populates_raw_metadata() {
let item = SearchItem {
name: "alpha".into(),
qualified_name: "crate::alpha".into(),
kind: "function".into(),
language: "rust".into(),
file_path: "/repo/src/lib.rs".into(),
start_line: 10,
start_column: 4,
end_line: 12,
end_column: 1,
score: None,
};
let symbol = search_item_to_display_symbol(item);
assert_eq!(symbol.name, "alpha");
assert_eq!(symbol.qualified_name, "crate::alpha");
assert_eq!(symbol.kind, "function");
assert_eq!(symbol.file_path, PathBuf::from("/repo/src/lib.rs"));
assert_eq!(symbol.start_line, 10);
assert_eq!(symbol.start_column, 4);
assert_eq!(symbol.end_line, 12);
assert_eq!(symbol.end_column, 1);
assert_eq!(
symbol.metadata.get("__raw_language").map(String::as_str),
Some("rust"),
);
assert_eq!(
symbol.metadata.get("__raw_file_path").map(String::as_str),
Some("/repo/src/lib.rs"),
);
assert!(!symbol.metadata.contains_key("macro_generated"));
assert!(!symbol.metadata.contains_key("cfg_condition"));
assert!(!symbol.metadata.contains_key("macro_source"));
}
large_stack_test! {
#[test]
fn finalize_daemon_search_count_mode_uses_pre_truncate_total() {
let mut cli = default_cli();
cli.exact = true;
cli.count = true;
let result = SearchResult {
items: vec![SearchItem {
name: "alpha".into(),
qualified_name: "alpha".into(),
kind: "function".into(),
language: "rust".into(),
file_path: "a.rs".into(),
start_line: 1,
start_column: 0,
end_line: 1,
end_column: 1,
score: None,
}],
total: 7,
truncated: true,
cursor: None,
};
let out = finalize_daemon_search(&cli, "alpha", result, Instant::now());
assert!(out.is_ok(), "count-mode finalize must succeed: {out:?}");
}
}
}