#[allow(clippy::too_many_arguments)]
#[provable_contracts_macros::contract("pmat-core.yaml", equation = "path_exists")]
pub async fn handle_query(
query: String,
limit: usize,
min_grade: Option<String>,
max_complexity: Option<u32>,
language: Option<String>,
path_pattern: Option<String>,
project_path: PathBuf,
format: QueryOutputFormat,
include_source: bool,
rebuild_index: bool,
exclude_tests: bool,
rank_by: Option<String>,
min_pagerank: Option<f32>,
include_project: Vec<PathBuf>,
churn: bool,
duplicates: bool,
entropy: bool,
faults: bool,
coverage: bool,
uncovered_only: bool,
coverage_diff: Option<PathBuf>,
coverage_file: Option<PathBuf>,
coverage_gaps: bool,
include_excluded: bool,
definition_type: Option<String>,
code: bool,
git_history: bool,
regex: bool,
literal: bool,
search_mode: Option<String>,
raw: bool,
case_sensitive: bool,
ignore_case: bool,
exclude: Vec<String>,
exclude_file: Vec<String>,
files_with_matches: bool,
count: bool,
after_context: Option<usize>,
before_context: Option<usize>,
context_lines: Option<usize>,
ptx_flow: bool,
ptx_diagnostics: bool,
suggest_rename: bool,
apply: bool,
docs: bool,
docs_only: bool,
extract_candidates: bool,
max_module_lines: usize,
) -> anyhow::Result<()> {
debug_assert!(!query.is_empty() || coverage_gaps || extract_candidates,
"query string required unless --coverage-gaps or --extract-candidates");
let quiet = matches!(format, QueryOutputFormat::Json);
let mut profile = QueryProfile::new();
let search_mode_normalized = search_mode
.as_deref()
.map(|s| s.to_lowercase());
let is_search_mode_hybrid =
matches!(search_mode_normalized.as_deref(), Some("hybrid"));
let is_search_mode_lexical =
matches!(search_mode_normalized.as_deref(), Some("lexical"));
let (regex, literal) = if is_search_mode_lexical || is_search_mode_hybrid {
(false, true)
} else {
(regex, literal)
};
if raw {
return handle_raw_search_mode(
&query,
limit,
&format,
quiet,
literal,
ignore_case,
&language,
&exclude_file,
&exclude,
files_with_matches,
count,
context_lines,
after_context,
before_context,
&project_path,
exclude_tests,
);
}
if docs_only {
return handle_docs_search(&query, limit, &project_path, &format, quiet);
}
let mut index = load_query_index(&project_path, rebuild_index, &include_project, quiet)?;
profile.phase("load_index");
let is_regex_or_literal = regex || literal;
let is_ptx = ptx_flow || ptx_diagnostics;
prepare_index_for_mode(&mut index, is_regex_or_literal, is_ptx, &rank_by);
profile.phase("source_load");
emit_index_stats(&index, quiet);
if coverage_gaps {
let siblings = collect_siblings(&project_path, &include_project);
return handle_coverage_gaps_mode(
&index,
&project_path,
&format,
&coverage_file,
&language,
&path_pattern,
exclude_tests,
limit,
quiet,
include_excluded,
files_with_matches,
count,
&siblings,
)
.await;
}
if extract_candidates {
return handle_extract_candidates_mode(
&mut index,
&project_path,
&format,
&language,
&path_pattern,
exclude_tests,
limit,
quiet,
max_module_lines,
)
.await;
}
if suggest_rename {
return handle_suggest_rename_mode(
&index,
&project_path,
&format,
&path_pattern,
limit,
quiet,
apply,
);
}
if let Some(output) = handle_ptx_modes(ptx_flow, ptx_diagnostics, &index, &format) {
print!("{output}");
return Ok(());
}
let effective_include_source = include_source || code || is_regex_or_literal;
let merge_language = if is_regex_or_literal {
language.clone()
} else {
None
};
let merge_exclude_file = if is_regex_or_literal {
exclude_file.clone()
} else {
Vec::new()
};
let merge_exclude = if is_regex_or_literal {
exclude.clone()
} else {
Vec::new()
};
let options = build_query_options(
limit,
min_grade,
max_complexity,
language,
path_pattern,
effective_include_source,
&rank_by,
min_pagerank,
regex,
literal,
case_sensitive,
ignore_case,
exclude,
exclude_file,
);
let mut results = if is_search_mode_hybrid {
let lexical_options = options.clone();
let mut semantic_options = options;
semantic_options.search_mode =
crate::services::agent_context::SearchMode::Semantic;
let lexical_results = index
.query(&query, lexical_options)
.map_err(|e| anyhow::anyhow!("{}", e))?;
let semantic_results = index
.query(&query, semantic_options)
.map_err(|e| anyhow::anyhow!("{}", e))?;
rrf_fuse(lexical_results, semantic_results, limit)
} else {
index
.query(&query, options)
.map_err(|e| anyhow::anyhow!("{}", e))?
};
profile.phase("query");
apply_result_filters(&mut results, exclude_tests, &definition_type);
apply_all_enrichments(
&mut results,
&project_path,
quiet,
churn,
duplicates,
entropy,
faults,
coverage,
uncovered_only,
&coverage_file,
&coverage_diff,
)
.await;
profile.phase("enrich");
apply_post_enrichment_sort(&mut results, &rank_by);
let git_data = fetch_git_data(git_history, &project_path, &query, limit, &index, quiet)?;
profile.phase("git_history");
if !is_regex_or_literal {
backfill_results_source(&mut results, &index);
}
let merge_ctx = MergeContext {
query: &query,
literal,
ignore_case,
language: &merge_language,
exclude_file: &merge_exclude_file,
exclude: &merge_exclude,
project_path: &project_path,
is_regex_or_literal,
};
let raw_results = merge_raw_results(
is_regex_or_literal,
quiet,
&query,
limit,
&merge_ctx,
context_lines,
after_context,
before_context,
&results,
);
emit_query_output(
&results,
&raw_results,
&git_data,
&query,
&format,
effective_include_source,
coverage,
files_with_matches,
count,
context_lines,
after_context,
before_context,
&merge_ctx,
&project_path,
&index,
)?;
profile.phase("output");
if docs && !is_regex_or_literal {
emit_docs_section(&query, limit, &project_path, &format, quiet)?;
profile.phase("docs");
}
profile.emit(quiet);
Ok(())
}
fn rrf_fuse(
lexical: Vec<crate::services::agent_context::QueryResult>,
semantic: Vec<crate::services::agent_context::QueryResult>,
limit: usize,
) -> Vec<crate::services::agent_context::QueryResult> {
use std::collections::HashMap;
const K: f32 = 60.0;
let mut fused: HashMap<(String, String), (f32, crate::services::agent_context::QueryResult)> =
HashMap::new();
for (rank, result) in lexical.into_iter().enumerate() {
let key = (result.file_path.clone(), result.function_name.clone());
let rrf = 1.0_f32 / (K + (rank as f32 + 1.0));
fused
.entry(key)
.and_modify(|(score, _)| *score += rrf)
.or_insert((rrf, result));
}
for (rank, result) in semantic.into_iter().enumerate() {
let key = (result.file_path.clone(), result.function_name.clone());
let rrf = 1.0_f32 / (K + (rank as f32 + 1.0));
fused
.entry(key)
.and_modify(|(score, _)| *score += rrf)
.or_insert((rrf, result));
}
let mut merged: Vec<(f32, crate::services::agent_context::QueryResult)> =
fused.into_values().collect();
merged.sort_by(|a, b| b.0.partial_cmp(&a.0).unwrap_or(std::cmp::Ordering::Equal));
merged
.into_iter()
.take(limit)
.map(|(score, mut r)| {
r.relevance_score = score;
r
})
.collect()
}