#[cfg(target_env = "musl")]
#[global_allocator]
static GLOBAL: tikv_jemallocator::Jemalloc = tikv_jemallocator::Jemalloc;
mod args;
mod commands;
mod error;
mod index_discovery;
mod output;
mod persistence;
mod plugin_defaults;
mod progress;
#[cfg(test)]
macro_rules! large_stack_test {
($(#[$attr:meta])* fn $name:ident() $body:block) => {
$(#[$attr])*
fn $name() {
let result = std::thread::Builder::new()
.stack_size(16 * 1024 * 1024)
.spawn(move || $body)
.expect("spawn test thread")
.join();
if let Err(panic) = result {
std::panic::resume_unwind(panic);
}
}
};
}
#[cfg(test)]
pub(crate) use large_stack_test;
use anyhow::{Context, Result};
use args::{Cli, Command, ValidationMode};
use clap::FromArgMatches;
use miette::{GraphicalReportHandler, GraphicalTheme};
use output::OutputStreams;
use sqry_core::query::error::{ExecutionError, QueryError, RichQueryError, ValidationError};
fn main() {
reset_sigpipe();
if version_flag_present() {
println!("sqry {}", env!("CARGO_PKG_VERSION"));
return;
}
let json_output = json_output_requested();
let exit_code = match run() {
Ok(()) => 0,
Err(err) => handle_run_error(&err, json_output),
};
std::process::exit(exit_code);
}
#[cfg(unix)]
fn reset_sigpipe() {
unsafe {
libc::signal(libc::SIGPIPE, libc::SIG_DFL);
}
}
#[cfg(not(unix))]
fn reset_sigpipe() {
}
fn version_flag_present() -> bool {
std::env::args()
.skip(1)
.any(|arg| arg == "--version" || arg == "-V")
}
fn json_output_requested() -> bool {
std::env::args().any(|arg| arg == "--json" || arg == "-j")
}
fn resolve_graph_format(format: Option<&str>, json: bool) -> Result<String> {
match (format, json) {
(None, false) => Ok("text".to_string()),
(None, true) => Ok("json".to_string()),
(Some(fmt), false) => Ok(fmt.to_string()),
(Some(fmt), true) => {
if fmt.eq_ignore_ascii_case("json") {
Ok("json".to_string())
} else {
anyhow::bail!(
"conflicting output format: --format {fmt} cannot be combined with --json. \
The global --json flag is an alias for --format json on graph * subcommands; \
drop --json or change --format to json (or text/dot/mermaid/d2 without --json) \
to resolve the conflict."
);
}
}
}
}
fn handle_run_error(err: &anyhow::Error, json_output: bool) -> i32 {
if let Some(cli_error) = err.downcast_ref::<error::CliError>() {
return handle_cli_error(cli_error, json_output);
}
if let Some(rich_error) = err
.chain()
.find_map(|cause| cause.downcast_ref::<RichQueryError>())
{
return handle_rich_query_error(rich_error, json_output);
}
if let Some(query_error) = err
.chain()
.find_map(|cause| cause.downcast_ref::<QueryError>())
{
return handle_query_error(query_error, json_output);
}
if let Some(validation_error) = err
.chain()
.find_map(|cause| cause.downcast_ref::<ValidationError>())
{
return handle_validation_error(validation_error, json_output);
}
handle_other_error(err, json_output)
}
fn handle_cli_error(cli_error: &error::CliError, json_output: bool) -> i32 {
if let error::CliError::PagerExit(code) = cli_error {
return *code;
}
if let error::CliError::OnnxRuntimeMissing { hint } = cli_error {
let mut streams = OutputStreams::new();
if json_output {
write_json_error(&mut streams, "sqry::onnx_runtime_missing", hint);
} else {
let _ = streams.write_diagnostic("error: ONNX Runtime not found\n");
let _ = streams.write_diagnostic(&format!("hint: {hint}\n"));
}
return cli_error.exit_code();
}
let mut streams = OutputStreams::new();
if json_output {
let code = match cli_error {
error::CliError::RuntimeError(_) => "sqry::runtime",
error::CliError::PagerExit(_) | error::CliError::OnnxRuntimeMissing { .. } => {
unreachable!() }
};
write_json_error(&mut streams, code, &cli_error.to_string());
} else {
let _ = streams.write_diagnostic(&format!("Error: {cli_error}"));
}
cli_error.exit_code()
}
fn handle_rich_query_error(rich_error: &RichQueryError, json_output: bool) -> i32 {
let mut streams = OutputStreams::new();
if json_output {
let json_error = rich_error.to_json_value();
let _ = streams.write_result(&serde_json::to_string_pretty(&json_error).unwrap_or_else(
|_| {
r#"{"error":{"code":"sqry::internal","message":"Failed to serialize error"}}"#
.to_string()
},
));
} else {
let handler = GraphicalReportHandler::new_themed(GraphicalTheme::unicode());
let mut output = String::new();
if handler
.render_report(&mut output, rich_error as &dyn miette::Diagnostic)
.is_ok()
{
let _ = streams.write_diagnostic(&output);
} else {
let _ = streams.write_diagnostic(&format!("Error: {rich_error}"));
}
}
rich_error.exit_code()
}
fn handle_query_error(query_error: &QueryError, json_output: bool) -> i32 {
let mut streams = OutputStreams::new();
if json_output {
let code = match query_error {
QueryError::Lex(_) => "sqry::syntax",
QueryError::Parse(_) => "sqry::parse",
QueryError::Validation(_) => "sqry::validation",
QueryError::Execution(_) => "sqry::execution",
QueryError::Cancelled => "sqry::cancelled",
};
write_json_error(&mut streams, code, &query_error.to_string());
} else {
if let QueryError::Execution(exec_err) = query_error
&& let ExecutionError::LegacyIndexMissingRelations { path, .. } = exec_err
{
let warning = format!(
"Warning: Legacy index detected at {}. Rebuild with `sqry index --force {}` to enable relation queries.",
path.display(),
path.display()
);
let _ = streams.write_diagnostic(&warning);
}
let _ = streams.write_diagnostic(&format!("Error: {query_error}"));
}
query_error.exit_code()
}
fn handle_validation_error(validation_error: &ValidationError, json_output: bool) -> i32 {
let mut streams = OutputStreams::new();
if json_output {
write_json_error(
&mut streams,
"sqry::validation",
&validation_error.to_string(),
);
} else {
let _ = streams.write_diagnostic(&format!("Error: {validation_error}"));
}
2
}
fn handle_other_error(err: &anyhow::Error, json_output: bool) -> i32 {
if json_output {
println!(r#"{{"error":{{"code":"sqry::internal","message":"{err:#}"}}}}"#);
} else {
eprintln!("Error: {err:#}");
}
1
}
fn write_json_error(streams: &mut OutputStreams, code: &str, message: &str) {
let json_error = serde_json::json!({
"error": {
"code": code,
"message": message,
}
});
let _ = streams.write_result(
&serde_json::to_string_pretty(&json_error).unwrap_or_else(|_| {
format!(r#"{{"error":{{"code":"{code}","message":"{message}"}}}}"#)
}),
);
}
#[allow(clippy::too_many_lines)] fn run() -> Result<()> {
let expanded_args = expand_alias_args()?;
let history_argv: Vec<String> = expanded_args[1..].to_vec();
let cmd = args::headings::normalize(Cli::command_with_taxonomy());
let matches = cmd
.try_get_matches_from(expanded_args)
.unwrap_or_else(|e| e.exit());
let cli = Cli::from_arg_matches(&matches).unwrap_or_else(|e| e.exit());
if let Some(error) = cli.validate() {
anyhow::bail!("{error}");
}
if cli.list_languages {
list_enabled_languages(&cli)?;
return Ok(());
}
match cli.command.as_deref() {
Some(Command::Visualize(cmd)) => {
commands::run_visualize(&cli, cmd).context("Visualize command failed")?;
}
Some(Command::Graph {
operation,
path,
format,
verbose,
..
}) => {
let search_path = path.as_deref().unwrap_or(cli.search_path());
let effective_format = resolve_graph_format(format.as_deref(), cli.json)?;
commands::run_graph(&cli, operation, search_path, &effective_format, *verbose)
.context("Graph command failed")?;
}
Some(Command::Search {
pattern,
path,
save_as,
global,
description,
validate,
cfg_filter,
include_generated,
macro_boundaries,
}) => {
let args = SearchCommandArgs {
cli: &cli,
pattern,
path: path.as_deref(),
save_as: save_as.as_deref(),
global: *global,
description: description.as_deref(),
validate: *validate,
history_argv: &history_argv,
cfg_filter: cfg_filter.as_deref(),
include_generated: *include_generated,
macro_boundaries: *macro_boundaries,
};
handle_search_command(&args)?;
}
Some(Command::Query {
query,
path,
explain,
verbose,
session,
no_parallel,
save_as,
global,
description,
timeout,
limit,
validate,
var,
..
}) => handle_query_command(
&cli,
query,
path.as_deref(),
*explain,
*verbose,
*session,
*no_parallel,
save_as.as_deref(),
*global,
description.as_deref(),
*timeout,
*limit,
*validate,
var,
&history_argv,
)?,
Some(Command::PlanQuery { query, path, limit }) => {
commands::run_planner_query(&cli, query, path.as_deref(), *limit)
.context("Plan-query command failed")?;
}
Some(Command::Shell { path }) => {
commands::run_shell(&cli, path.as_deref().unwrap_or("."))
.context("Shell command failed")?;
}
Some(Command::Batch(batch_cmd)) => {
commands::run_batch(
&cli,
batch_cmd.path.as_deref().unwrap_or("."),
batch_cmd.queries.as_path(),
batch_cmd.output,
batch_cmd.output_file.as_deref(),
batch_cmd.continue_on_error,
batch_cmd.stats,
batch_cmd.sequential,
)
.context("Batch command failed")?;
}
Some(Command::Index {
path,
force,
threads,
status,
add_to_gitignore,
no_incremental,
cache_dir,
metrics_format,
enable_macro_expansion,
cfg_flags,
expand_cache,
classpath,
no_classpath,
classpath_depth,
classpath_file,
build_system,
force_classpath,
allow_nested,
..
}) => handle_index_command(
&cli,
path.as_deref(),
*force,
*threads,
*status,
*add_to_gitignore,
*no_incremental,
cache_dir.as_deref(),
*metrics_format,
*enable_macro_expansion,
cfg_flags,
expand_cache.as_deref(),
*classpath,
*no_classpath,
*classpath_depth,
classpath_file.as_deref(),
build_system.as_deref(),
*force_classpath,
*allow_nested,
)?,
Some(Command::Analyze {
path,
force,
threads,
label_budget,
density_threshold,
budget_exceeded_policy,
no_labels,
}) => {
commands::run_analyze(
&cli,
path.as_deref(),
*force,
*threads,
*label_budget,
*density_threshold,
budget_exceeded_policy.as_deref(),
*no_labels,
)
.context("Analyze command failed")?;
}
Some(Command::Lsp { options }) => {
let mut lsp_options = options.clone();
if lsp_options.workspace.is_none() {
lsp_options.workspace.clone_from(&cli.workspace);
}
sqry_lsp::run(lsp_options).context("LSP command failed")?;
}
Some(Command::Update {
path,
threads,
stats,
no_incremental,
cache_dir,
classpath,
no_classpath,
classpath_depth,
classpath_file,
build_system,
force_classpath,
..
}) => {
let update_path = path.as_deref().unwrap_or(cli.search_path());
commands::run_update(
&cli,
update_path,
*threads,
*stats,
*no_incremental,
cache_dir.as_deref(),
*classpath,
*no_classpath,
*classpath_depth,
classpath_file.as_deref(),
build_system.as_deref(),
*force_classpath,
)
.context("Update command failed")?;
}
Some(Command::Watch {
path,
threads,
debounce,
stats,
build,
classpath,
no_classpath,
classpath_depth,
classpath_file,
build_system,
force_classpath,
..
}) => {
commands::run_watch(
&cli,
path.clone(),
*threads,
*debounce,
*stats,
*build,
*classpath,
*no_classpath,
*classpath_depth,
classpath_file.clone(),
build_system.clone(),
*force_classpath,
)
.context("Watch command failed")?;
}
Some(Command::Repair {
path,
fix_orphans,
fix_dangling,
recompute_checksum,
fix_all,
dry_run,
}) => {
let repair_path = path.as_deref().unwrap_or(cli.search_path());
commands::run_repair(
&cli,
repair_path,
*fix_orphans,
*fix_dangling,
*recompute_checksum,
*fix_all,
*dry_run,
)
.context("Repair command failed")?;
}
Some(Command::Cache { action }) => {
commands::run_cache(&cli, action).context("Cache command failed")?;
}
Some(Command::Config { action }) => {
handle_config_command(action)?;
}
Some(Command::Completions(completions_cmd)) => {
commands::run_completions(completions_cmd.shell)
.context("Completions command failed")?;
}
Some(Command::Workspace { action }) => {
if cli.workspace.is_some() {
anyhow::bail!(
"the global `--workspace` flag (and `SQRY_WORKSPACE_FILE` env var) \
conflicts with the `sqry workspace` subcommand. \
The subcommand has its own positional `<workspace>` argument; \
drop the global flag (or unset `SQRY_WORKSPACE_FILE`) and pass \
the workspace path positionally instead."
);
}
commands::run_workspace(&cli, action).context("Workspace command failed")?;
}
Some(Command::Alias { action }) => {
commands::run_alias(&cli, action).context("Alias command failed")?;
}
Some(Command::History { action }) => {
commands::run_history(&cli, action).context("History command failed")?;
}
Some(Command::Insights { action }) => {
commands::run_insights(&cli, action).context("Insights command failed")?;
}
Some(Command::Troubleshoot {
output,
dry_run,
include_trace,
window,
}) => {
commands::run_troubleshoot(&cli, output.as_deref(), *dry_run, *include_trace, window)
.context("Troubleshoot command failed")?;
}
Some(Command::Ask {
query,
path,
auto_execute,
dry_run,
threshold,
model_dir,
allow_unverified_model,
allow_model_download,
}) => {
let search_path = path.as_deref().unwrap_or(cli.search_path());
commands::run_ask(
&cli,
query,
search_path,
*auto_execute,
*dry_run,
*threshold,
model_dir.as_deref(),
*allow_unverified_model,
*allow_model_download,
)
.context("Ask command failed")?;
}
Some(Command::Duplicates {
path,
r#type,
threshold,
max_results,
exact,
}) => {
commands::run_duplicates(
&cli,
path.as_deref(),
r#type,
*threshold,
*max_results,
*exact,
)
.context("Duplicates command failed")?;
}
Some(Command::Cycles {
path,
r#type,
min_depth,
max_depth,
include_self,
max_results,
}) => {
commands::run_cycles(
&cli,
path.as_deref(),
r#type,
*min_depth,
*max_depth,
*include_self,
*max_results,
)
.context("Cycles command failed")?;
}
Some(Command::Unused {
path,
scope,
lang,
kind,
max_results,
}) => {
commands::run_unused(
&cli,
path.as_deref(),
scope,
lang.as_deref(),
kind.as_deref(),
*max_results,
)
.context("Unused command failed")?;
}
Some(Command::Export {
path,
format,
direction,
filter_lang,
filter_edge,
highlight_cross,
show_details,
show_labels,
output,
}) => {
commands::run_export(
&cli,
path.as_deref(),
format,
direction,
filter_lang.as_deref(),
filter_edge.as_deref(),
*highlight_cross,
*show_details,
*show_labels,
output.as_deref(),
)
.context("Export command failed")?;
}
Some(Command::Explain {
file,
symbol,
path,
no_context,
no_relations,
}) => {
commands::run_explain(
&cli,
file,
symbol,
path.as_deref(),
!no_context,
!no_relations,
)
.context("Explain command failed")?;
}
Some(Command::Similar {
file,
symbol,
path,
threshold,
limit,
}) => {
commands::run_similar(&cli, file, symbol, path.as_deref(), *threshold, *limit)
.context("Similar command failed")?;
}
Some(Command::Subgraph {
symbols,
path,
depth,
max_nodes,
no_callers,
no_callees,
include_imports,
}) => {
commands::run_subgraph(
&cli,
symbols,
path.as_deref(),
*depth,
*max_nodes,
!no_callers,
!no_callees,
*include_imports,
)
.context("Subgraph command failed")?;
}
Some(Command::Impact {
symbol,
path,
in_file,
depth,
limit,
direct_only,
show_files,
}) => {
commands::run_impact(
&cli,
symbol,
path.as_deref(),
in_file.as_deref(),
*depth,
*limit,
!direct_only,
*show_files,
)
.context("Impact command failed")?;
}
Some(Command::Diff {
base,
target,
path,
limit,
kind,
change_type,
..
}) => {
let kinds: Vec<String> = kind
.as_ref()
.map(|s| s.split(',').map(String::from).collect())
.unwrap_or_default();
let change_types: Vec<String> = change_type
.as_ref()
.map(|s| s.split(',').map(String::from).collect())
.unwrap_or_default();
commands::run_diff(
&cli,
base,
target,
path.as_deref(),
*limit,
&kinds,
&change_types,
)
.context("Diff command failed")?;
}
Some(Command::Hier {
query,
path,
limit,
max_files,
context,
kind,
lang,
}) => {
let kinds: Vec<String> = kind
.as_ref()
.map(|s| s.split(',').map(String::from).collect())
.unwrap_or_default();
let languages: Vec<String> = lang
.as_ref()
.map(|s| s.split(',').map(String::from).collect())
.unwrap_or_default();
commands::run_hier_search(
&cli,
query,
path.as_deref(),
*limit,
*max_files,
*context,
&kinds,
&languages,
)
.context("Hierarchical search command failed")?;
}
Some(Command::Mcp { command }) => {
commands::mcp::run(command).context("MCP command failed")?;
}
Some(Command::Daemon { action }) => {
commands::daemon::run(&cli, action).context("Daemon command failed")?;
}
None => handle_no_command(&cli, &history_argv)?,
}
Ok(())
}
fn list_enabled_languages(cli: &Cli) -> Result<()> {
let root = std::path::Path::new(cli.search_path());
let manager = plugin_defaults::resolve_plugin_selection(
cli,
root,
plugin_defaults::PluginSelectionMode::FreshWrite,
)?
.plugin_manager;
println!("Enabled languages ({}):", manager.plugins().len());
for p in manager.plugins() {
let m = p.metadata();
let exts = p.extensions().join(", ");
println!("- {} (id: {}, v{}): [{}]", m.name, m.id, m.version, exts);
}
Ok(())
}
struct SearchCommandArgs<'a> {
cli: &'a Cli,
pattern: &'a str,
path: Option<&'a str>,
save_as: Option<&'a str>,
global: bool,
description: Option<&'a str>,
validate: ValidationMode,
history_argv: &'a [String],
cfg_filter: Option<&'a str>,
include_generated: bool,
macro_boundaries: bool,
}
fn handle_search_command(args: &SearchCommandArgs<'_>) -> Result<()> {
let search_path = args.path.unwrap_or(args.cli.search_path());
if let Err(code) =
validate_index_if_requested(args.cli, search_path, args.validate, args.cli.auto_rebuild)
{
std::process::exit(code);
}
let result = commands::run_search(
args.cli,
args.pattern,
search_path,
args.cfg_filter,
args.include_generated,
args.macro_boundaries,
);
record_history(search_path, "search", args.history_argv, result.is_ok());
result.context("Search command failed")?;
if let Some(alias_name) = args.save_as {
commands::save_search_alias(
args.cli,
alias_name,
args.pattern,
args.global,
args.description,
)
.context("Failed to save alias")?;
}
Ok(())
}
#[allow(clippy::too_many_arguments)]
#[allow(clippy::fn_params_excessive_bools)] fn handle_query_command(
cli: &Cli,
query: &str,
path: Option<&str>,
explain: bool,
verbose: bool,
session: bool,
no_parallel: bool,
save_as: Option<&str>,
global: bool,
description: Option<&str>,
timeout: Option<u64>,
limit: Option<usize>,
validate: ValidationMode,
variables: &[String],
history_argv: &[String],
) -> Result<()> {
if session && no_parallel {
anyhow::bail!(
"--session and --no-parallel are mutually exclusive. \
Session mode caches the executor configuration, so subsequent queries \
cannot toggle parallel execution. Use either --session (for performance) \
or --no-parallel (for A/B testing), but not both."
);
}
let search_path = cli.resolve_subcommand_path(path)?;
if let Err(code) = validate_index_if_requested(cli, search_path, validate, cli.auto_rebuild) {
std::process::exit(code);
}
let result = commands::run_query(
cli,
query,
search_path,
explain,
verbose,
session,
no_parallel,
timeout,
limit,
variables,
);
record_history(search_path, "query", history_argv, result.is_ok());
result?;
if let Some(alias_name) = save_as {
commands::save_query_alias(cli, alias_name, query, global, description)
.context("Failed to save alias")?;
}
Ok(())
}
fn validate_index_if_requested(
cli: &Cli,
search_path: &str,
validate: ValidationMode,
auto_rebuild: bool,
) -> Result<(), i32> {
use commands::graph::loader::{GraphLoadConfig, load_unified_graph_for_cli};
use std::path::Path;
const ORPHAN_THRESHOLD: f64 = 0.20;
if matches!(validate, ValidationMode::Off) {
return Ok(());
}
let search_root = Path::new(search_path);
let storage = sqry_core::graph::unified::persistence::GraphStorage::new(search_root);
if !storage.exists() {
return Ok(());
}
let config = GraphLoadConfig::default();
let Ok(graph) = load_unified_graph_for_cli(search_root, &config, cli) else {
return Ok(());
};
let files = graph.files();
let mut total_files = 0usize;
let mut orphaned_files = 0usize;
for (_file_id, file_path) in files.iter() {
total_files += 1;
let full_path = if file_path.is_absolute() {
file_path.to_path_buf()
} else {
search_root.join(file_path.as_ref())
};
if !full_path.exists() {
orphaned_files += 1;
}
}
let orphan_ratio = if total_files > 0 {
let orphaned_f = f64::from(u32::try_from(orphaned_files).unwrap_or(u32::MAX));
let total_f = f64::from(u32::try_from(total_files).unwrap_or(u32::MAX));
orphaned_f / total_f
} else {
0.0
};
let is_stale = orphan_ratio > ORPHAN_THRESHOLD;
match validate {
ValidationMode::Fail if is_stale => {
if auto_rebuild {
eprintln!(
"Index is stale ({:.1}% of files missing). Rebuilding because --auto-rebuild is set.",
orphan_ratio * 100.0
);
if let Err(err) = commands::run_index(
cli,
search_path,
true,
None,
false,
false,
None,
false,
&[],
None,
false,
false,
crate::args::ClasspathDepthArg::Full,
None,
None,
false,
false,
) {
eprintln!("Error: auto-rebuild failed: {err}");
return Err(2);
}
return Ok(());
}
eprintln!(
"Error: Index is stale ({:.1}% of files missing). \
Run 'sqry index --force' to rebuild.",
orphan_ratio * 100.0
);
Err(2)
}
ValidationMode::Warn if is_stale => {
eprintln!(
"Warning: Index is stale ({:.1}% of files missing). \
Consider running 'sqry index --force' to rebuild.",
orphan_ratio * 100.0
);
Ok(())
}
_ => Ok(()),
}
}
#[allow(clippy::too_many_arguments)]
#[allow(clippy::fn_params_excessive_bools)] #[allow(unused_variables)] #[allow(clippy::used_underscore_binding)] fn handle_index_command(
cli: &Cli,
path: Option<&str>,
force: bool,
threads: Option<usize>,
status: bool,
add_to_gitignore: bool,
no_incremental: bool,
cache_dir: Option<&str>,
metrics_format: crate::args::MetricsFormat,
enable_macro_expansion: bool,
cfg_flags: &[String],
expand_cache: Option<&std::path::Path>,
classpath: bool,
_no_classpath: bool,
classpath_depth: crate::args::ClasspathDepthArg,
classpath_file: Option<&std::path::Path>,
build_system: Option<&str>,
force_classpath: bool,
allow_nested: bool,
) -> Result<()> {
let index_path = cli.resolve_subcommand_path(path)?;
if enable_macro_expansion {
eprintln!("WARNING: Macro expansion enabled. This executes build scripts and proc macros.");
eprintln!(" Only use on trusted codebases.");
}
if status {
commands::run_index_status(cli, index_path, metrics_format)
.context("Index status command failed")?;
} else {
commands::run_index(
cli,
index_path,
force,
threads,
add_to_gitignore,
no_incremental,
cache_dir,
enable_macro_expansion,
cfg_flags,
expand_cache.map(std::path::Path::new),
classpath,
_no_classpath,
classpath_depth,
classpath_file,
build_system,
force_classpath,
allow_nested,
)
.context("Index command failed")?;
}
Ok(())
}
fn handle_config_command(action: &args::ConfigAction) -> Result<()> {
use args::ConfigAction;
match action {
ConfigAction::Init { path, force } => {
commands::run_config_init(path.as_deref(), *force).context("Config init failed")?;
}
ConfigAction::Show { path, json, key } => {
commands::run_config_show(path.as_deref(), *json, key.as_deref())
.context("Config show failed")?;
}
ConfigAction::Set {
path,
key,
value,
yes,
} => {
commands::run_config_set(path.as_deref(), key, value, *yes)
.context("Config set failed")?;
}
ConfigAction::Get { path, key } => {
commands::run_config_get(path.as_deref(), key).context("Config get failed")?;
}
ConfigAction::Validate { path } => {
commands::run_config_validate(path.as_deref()).context("Config validate failed")?;
}
ConfigAction::Alias(alias_action) => {
handle_config_alias_action(alias_action)?;
}
}
Ok(())
}
fn handle_config_alias_action(action: &args::ConfigAliasAction) -> Result<()> {
use args::ConfigAliasAction;
match action {
ConfigAliasAction::Set {
path,
name,
query,
description,
} => {
commands::run_config_alias_set(path.as_deref(), name, query, description.as_deref())
.context("Config alias set failed")?;
}
ConfigAliasAction::List { path, json } => {
commands::run_config_alias_list(path.as_deref(), *json)
.context("Config alias list failed")?;
}
ConfigAliasAction::Remove { path, name } => {
commands::run_config_alias_remove(path.as_deref(), name)
.context("Config alias remove failed")?;
}
}
Ok(())
}
fn handle_no_command(cli: &Cli, history_argv: &[String]) -> Result<()> {
if let Some(pattern) = &cli.pattern {
let result = commands::run_search(cli, pattern, cli.search_path(), None, false, false);
record_history(cli.search_path(), "search", history_argv, result.is_ok());
result.context("Search command failed")?;
return Ok(());
}
eprintln!("Error: No pattern or command provided");
eprintln!();
eprintln!("Usage: sqry <PATTERN> [PATH]");
eprintln!(" sqry search <PATTERN> [PATH]");
eprintln!(" sqry query <QUERY> [PATH]");
eprintln!(" sqry index [PATH]");
eprintln!(" sqry update [PATH]");
eprintln!(" sqry cache <stats|clear>");
eprintln!(" sqry daemon {{start,stop,status,logs}}");
eprintln!();
eprintln!("Try 'sqry --help' for more information.");
std::process::exit(2)
}
const FLAGS_WITH_VALUES: &[&str] = &[
"--columns",
"--limit",
"--format",
"--kind",
"-k",
"--lang",
"-l",
"--max-depth",
"--fuzzy-algorithm",
"--fuzzy-threshold",
"--fuzzy-max-candidates",
"--validate",
"--threshold-dangling-refs",
"--threshold-orphaned-files",
"--threshold-id-gaps",
"--context",
"-C",
"--max-text-results",
"--config-dir",
"--path",
"--type",
];
const FLAGS_WITH_OPTIONAL_VALUES: &[&str] = &["--preview", "-p"];
struct AliasScan {
alias_index: Option<usize>,
remaining_path: Option<String>,
}
fn expand_alias_args() -> Result<Vec<String>> {
use persistence::{AliasManager, PersistenceConfig, open_shared_index};
use std::path::Path;
let args: Vec<String> = std::env::args().collect();
let scan = scan_alias_args(&args);
let Some(idx) = scan.alias_index else {
return Ok(args);
};
let alias_name = alias_name_from_arg(&args[idx])?;
let lookup_path = scan.remaining_path.as_deref().unwrap_or(".");
let config = PersistenceConfig::from_env();
let index = if let Ok(idx) = open_shared_index(Some(Path::new(lookup_path)), config) {
idx
} else {
let config = PersistenceConfig::from_env();
open_shared_index(Some(Path::new(".")), config)?
};
let manager = AliasManager::new(index);
let alias_with_scope = load_alias(&manager, alias_name)?;
Ok(build_expanded_args(
&args,
idx,
scan.remaining_path.as_deref(),
&alias_with_scope,
))
}
fn scan_alias_args(args: &[String]) -> AliasScan {
let mut skip_next = false;
for (i, arg) in args.iter().enumerate().skip(1) {
if skip_next {
skip_next = false;
continue;
}
if arg.starts_with('-') {
if should_skip_next_arg(args, i, arg) {
skip_next = true;
}
continue;
}
if !arg.starts_with('@') {
return AliasScan {
alias_index: None,
remaining_path: None,
};
}
let remaining_path =
(i + 1 < args.len() && !args[i + 1].starts_with('-')).then(|| args[i + 1].clone());
return AliasScan {
alias_index: Some(i),
remaining_path,
};
}
AliasScan {
alias_index: None,
remaining_path: None,
}
}
fn should_skip_next_arg(args: &[String], index: usize, arg: &str) -> bool {
if arg.contains('=') {
return false;
}
if FLAGS_WITH_VALUES.contains(&arg) {
return true;
}
if FLAGS_WITH_OPTIONAL_VALUES.contains(&arg) {
return args
.get(index + 1)
.is_some_and(|next_arg| is_optional_flag_value(next_arg));
}
false
}
fn is_optional_flag_value(arg: &str) -> bool {
!arg.starts_with('@') && !arg.starts_with('-') && arg.parse::<usize>().is_ok()
}
fn alias_name_from_arg(arg: &str) -> Result<&str> {
let alias_name = arg.strip_prefix('@').unwrap_or("");
if alias_name.is_empty() {
anyhow::bail!("Empty alias name: '@' must be followed by an alias name");
}
Ok(alias_name)
}
fn load_alias(
manager: &persistence::AliasManager,
alias_name: &str,
) -> Result<persistence::AliasWithScope> {
use persistence::AliasError;
match manager.get(alias_name) {
Ok(a) => Ok(a),
Err(AliasError::NotFound { name }) => {
anyhow::bail!(
"Unknown alias '@{name}'. Use 'sqry alias list' to see available aliases."
);
}
Err(e) => Err(e.into()),
}
}
fn build_expanded_args(
args: &[String],
alias_index: usize,
remaining_path: Option<&str>,
alias_with_scope: &persistence::AliasWithScope,
) -> Vec<String> {
let mut expanded = vec![args[0].clone()];
expanded.extend(args.iter().take(alias_index).skip(1).cloned());
expanded.push(alias_with_scope.alias.command.clone());
expanded.extend(alias_with_scope.alias.args.iter().cloned());
let has_path = remaining_path.is_some();
if let Some(path) = remaining_path {
expanded.push(path.to_string());
} else {
expanded.push(".".to_string());
}
let path_offset = if has_path { 2 } else { 1 };
expanded.extend(args.iter().skip(alias_index + path_offset).cloned());
expanded
}
fn record_history(search_path: &str, command: &str, argv: &[String], success: bool) {
use persistence::{HistoryManager, PersistenceConfig, open_shared_index};
use std::path::{Path, PathBuf};
let config = PersistenceConfig::from_env();
if !config.history_enabled {
return;
}
let result = (|| -> anyhow::Result<()> {
let index = open_shared_index(Some(Path::new(search_path)), config)?;
let manager = HistoryManager::new(index);
let working_dir =
std::fs::canonicalize(search_path).unwrap_or_else(|_| PathBuf::from(search_path));
manager.record(command, argv, &working_dir, success, None)?;
Ok(())
})();
if let Err(e) = result {
log::debug!("Failed to record history: {e}");
}
}
#[cfg(test)]
mod wiring_tests {
use super::*;
use clap::Parser;
large_stack_test! {
#[test]
fn daemon_start_parses_wiring() {
let cli = args::Cli::parse_from(["sqry", "daemon", "start"]);
if let Some(args::Command::Daemon { action }) = cli.command.as_deref() {
assert!(
matches!(
action.as_ref(),
args::DaemonAction::Start { sqryd_path: None, timeout: 10 }
),
"DaemonAction::Start must have default sqryd_path=None, timeout=10"
);
} else {
panic!("Expected Command::Daemon");
}
}
}
large_stack_test! {
#[test]
fn daemon_stop_parses_wiring() {
let cli = args::Cli::parse_from(["sqry", "daemon", "stop", "--timeout", "30"]);
if let Some(args::Command::Daemon { action }) = cli.command.as_deref() {
assert!(
matches!(action.as_ref(), args::DaemonAction::Stop { timeout: 30 }),
"DaemonAction::Stop must have timeout=30 when --timeout 30 is passed"
);
} else {
panic!("Expected Command::Daemon");
}
}
}
large_stack_test! {
#[test]
fn daemon_status_json_parses_wiring() {
let cli = args::Cli::parse_from(["sqry", "daemon", "status", "--json"]);
if let Some(args::Command::Daemon { action }) = cli.command.as_deref() {
assert!(
matches!(action.as_ref(), args::DaemonAction::Status { json: true }),
"DaemonAction::Status must have json=true when --json is passed"
);
} else {
panic!("Expected Command::Daemon");
}
}
}
large_stack_test! {
#[test]
fn daemon_logs_follow_parses_wiring() {
let cli = args::Cli::parse_from(["sqry", "daemon", "logs", "-f"]);
if let Some(args::Command::Daemon { action }) = cli.command.as_deref() {
assert!(
matches!(action.as_ref(), args::DaemonAction::Logs { follow: true, .. }),
"DaemonAction::Logs must have follow=true when -f is passed"
);
} else {
panic!("Expected Command::Daemon");
}
}
}
}