use std::collections::HashMap;
use std::path::Path;
use anyhow::{Context, Result};
use colored::Colorize;
use cqs::reference::TaggedResult;
use cqs::store::{ParentContext, UnifiedResult};
pub fn read_context_lines(
file: &Path,
line_start: u32,
line_end: u32,
context: usize,
) -> Result<(Vec<String>, Vec<String>)> {
let path_str = file.to_string_lossy();
if path_str.starts_with('/') || (path_str.len() >= 2 && path_str.as_bytes()[1] == b':') {
anyhow::bail!("Absolute path blocked: {}", file.display());
}
if path_str.contains("..") {
if let (Ok(canonical), Ok(cwd)) = (
dunce::canonicalize(file),
std::env::current_dir().and_then(dunce::canonicalize),
) {
if !canonical.starts_with(&cwd) {
anyhow::bail!("Path traversal blocked: {}", file.display());
}
}
}
const MAX_DISPLAY_FILE_SIZE: u64 = 10 * 1024 * 1024;
if let Ok(meta) = std::fs::metadata(file) {
if meta.len() > MAX_DISPLAY_FILE_SIZE {
anyhow::bail!(
"File too large for context display: {}MB (limit {}MB)",
meta.len() / (1024 * 1024),
MAX_DISPLAY_FILE_SIZE / (1024 * 1024)
);
}
}
let content = std::fs::read_to_string(file)
.with_context(|| format!("Failed to read {}", file.display()))?;
let lines: Vec<&str> = content.lines().map(|l| l.trim_end_matches('\r')).collect();
let line_start = line_start.max(1);
let line_end = line_end.max(line_start);
let max_idx = lines.len().saturating_sub(1);
let start_idx = (line_start as usize).saturating_sub(1).min(max_idx);
let end_idx = (line_end as usize).saturating_sub(1).min(max_idx);
let context_start = start_idx.saturating_sub(context);
let before: Vec<String> = if start_idx <= lines.len() {
lines[context_start..start_idx]
.iter()
.map(|s| s.to_string())
.collect()
} else {
vec![]
};
let context_end = end_idx
.saturating_add(context)
.saturating_add(1)
.min(lines.len());
let after: Vec<String> = if end_idx + 1 < lines.len() {
lines[(end_idx + 1)..context_end]
.iter()
.map(|s| s.to_string())
.collect()
} else {
vec![]
};
Ok((before, after))
}
pub fn display_unified_results(
results: &[UnifiedResult],
root: &Path,
no_content: bool,
context: Option<usize>,
parents: Option<&HashMap<String, ParentContext>>,
) -> Result<()> {
for result in results {
match result {
UnifiedResult::Code(r) => {
let rel_path = cqs::rel_display(&r.chunk.file, root);
let parent_tag = if r.chunk.parent_id.is_some() {
" [has parent]"
} else {
""
};
let header = format!(
"{}:{} ({} {}) [{}] [{:.2}]{}",
rel_path,
r.chunk.line_start,
r.chunk.chunk_type,
r.chunk.name,
r.chunk.language,
r.score,
parent_tag
);
println!("{}", header.cyan());
if !no_content {
println!("{}", "─".repeat(50));
if let Some(n) = context {
if n > 0 {
let abs_path = root.join(&r.chunk.file);
match read_context_lines(
&abs_path,
r.chunk.line_start,
r.chunk.line_end,
n,
) {
Ok((before, _)) => {
for line in &before {
println!("{}", format!(" {}", line).dimmed());
}
}
Err(e) => {
tracing::trace!(
error = %e,
file = %abs_path.display(),
"Failed to read context lines (before)"
);
}
}
}
}
if r.chunk.content.lines().count() <= 10 {
println!("{}", r.chunk.content);
} else {
for line in r.chunk.content.lines().take(8) {
println!("{}", line);
}
println!(" ...");
}
if let Some(n) = context {
if n > 0 {
let abs_path = root.join(&r.chunk.file);
match read_context_lines(
&abs_path,
r.chunk.line_start,
r.chunk.line_end,
n,
) {
Ok((_, after)) => {
for line in &after {
println!("{}", format!(" {}", line).dimmed());
}
}
Err(e) => {
tracing::trace!(
error = %e,
file = %abs_path.display(),
"Failed to read context lines (after)"
);
}
}
}
}
if let Some(parent) = parents.and_then(|p| p.get(&r.chunk.id)) {
let parent_header = format!(
" Parent context: {} ({}:{}-{})",
parent.name, rel_path, parent.line_start, parent.line_end,
);
println!("{}", parent_header.dimmed());
println!("{}", " ────────────────────────────────".dimmed());
for line in parent.content.lines().take(20) {
println!("{}", format!(" {}", line).dimmed());
}
if parent.content.lines().count() > 20 {
println!("{}", " ...".dimmed());
}
}
println!();
}
}
}
}
println!("{} results", results.len());
Ok(())
}
pub fn display_unified_results_json(
results: &[UnifiedResult],
query: &str,
parents: Option<&HashMap<String, ParentContext>>,
token_info: Option<(usize, usize)>,
) -> Result<()> {
let json_results: Vec<_> = results
.iter()
.map(|r| {
let mut obj = r.to_json();
let UnifiedResult::Code(sr) = r;
if let Some(parent) = parents.and_then(|p| p.get(&sr.chunk.id)) {
obj["parent_name"] = serde_json::json!(parent.name);
obj["parent_content"] = serde_json::json!(parent.content);
obj["parent_line_start"] = serde_json::json!(parent.line_start);
obj["parent_line_end"] = serde_json::json!(parent.line_end);
}
obj
})
.collect();
let mut output = serde_json::json!({
"results": json_results,
"query": query,
"total": results.len(),
});
if let Some((used, budget)) = token_info {
output["token_count"] = serde_json::json!(used);
output["token_budget"] = serde_json::json!(budget);
}
println!("{}", serde_json::to_string_pretty(&output)?);
Ok(())
}
pub fn display_tagged_results(
results: &[TaggedResult],
root: &Path,
no_content: bool,
context: Option<usize>,
parents: Option<&HashMap<String, ParentContext>>,
) -> Result<()> {
for tagged in results {
match &tagged.result {
UnifiedResult::Code(r) => {
let rel_path = cqs::rel_display(&r.chunk.file, root);
let source_prefix = tagged
.source
.as_ref()
.map(|s| format!("[{}] ", s))
.unwrap_or_default();
let parent_tag = if r.chunk.parent_id.is_some() {
" [has parent]"
} else {
""
};
let header = format!(
"{}{}:{} ({} {}) [{}] [{:.2}]{}",
source_prefix,
rel_path,
r.chunk.line_start,
r.chunk.chunk_type,
r.chunk.name,
r.chunk.language,
r.score,
parent_tag
);
println!("{}", header.cyan());
if !no_content {
println!("{}", "─".repeat(50));
if tagged.source.is_none() {
if let Some(n) = context {
if n > 0 {
let abs_path = root.join(&r.chunk.file);
match read_context_lines(
&abs_path,
r.chunk.line_start,
r.chunk.line_end,
n,
) {
Ok((before, _)) => {
for line in &before {
println!("{}", format!(" {}", line).dimmed());
}
}
Err(e) => {
tracing::trace!(
error = %e,
file = %abs_path.display(),
"Failed to read context lines (before)"
);
}
}
}
}
}
if r.chunk.content.lines().count() <= 10 {
println!("{}", r.chunk.content);
} else {
for line in r.chunk.content.lines().take(8) {
println!("{}", line);
}
println!(" ...");
}
if tagged.source.is_none() {
if let Some(n) = context {
if n > 0 {
let abs_path = root.join(&r.chunk.file);
match read_context_lines(
&abs_path,
r.chunk.line_start,
r.chunk.line_end,
n,
) {
Ok((_, after)) => {
for line in &after {
println!("{}", format!(" {}", line).dimmed());
}
}
Err(e) => {
tracing::trace!(
error = %e,
file = %abs_path.display(),
"Failed to read context lines (after)"
);
}
}
}
}
}
if let Some(parent) = parents.and_then(|p| p.get(&r.chunk.id)) {
let parent_header = format!(
" Parent context: {} ({}:{}-{})",
parent.name, rel_path, parent.line_start, parent.line_end,
);
println!("{}", parent_header.dimmed());
println!("{}", " ────────────────────────────────".dimmed());
for line in parent.content.lines().take(20) {
println!("{}", format!(" {}", line).dimmed());
}
if parent.content.lines().count() > 20 {
println!("{}", " ...".dimmed());
}
}
println!();
}
}
}
}
println!("{} results", results.len());
Ok(())
}
pub fn display_similar_results_json(
results: &[cqs::store::SearchResult],
target: &str,
) -> Result<()> {
let json_results: Vec<_> = results.iter().map(|r| r.to_json()).collect();
let output = serde_json::json!({
"target": target,
"results": json_results,
"total": results.len(),
});
println!("{}", serde_json::to_string_pretty(&output)?);
Ok(())
}
pub fn display_tagged_results_json(
results: &[TaggedResult],
query: &str,
parents: Option<&HashMap<String, ParentContext>>,
token_info: Option<(usize, usize)>,
) -> Result<()> {
let json_results: Vec<_> = results
.iter()
.map(|t| {
let mut json = t.result.to_json();
let UnifiedResult::Code(sr) = &t.result;
if let Some(parent) = parents.and_then(|p| p.get(&sr.chunk.id)) {
json["parent_name"] = serde_json::json!(parent.name);
json["parent_content"] = serde_json::json!(parent.content);
json["parent_line_start"] = serde_json::json!(parent.line_start);
json["parent_line_end"] = serde_json::json!(parent.line_end);
}
if let Some(source) = &t.source {
json["source"] = serde_json::json!(source);
}
json
})
.collect();
let mut output = serde_json::json!({
"results": json_results,
"query": query,
"total": results.len(),
});
if let Some((used, budget)) = token_info {
output["token_count"] = serde_json::json!(used);
output["token_budget"] = serde_json::json!(budget);
}
println!("{}", serde_json::to_string_pretty(&output)?);
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
fn write_test_file(lines: &[&str]) -> (tempfile::TempDir, std::path::PathBuf) {
let dir = tempfile::TempDir::new().unwrap();
let file_path = dir.path().join("test.rs");
let content = lines.join("\n");
std::fs::write(&file_path, &content).unwrap();
(dir, file_path)
}
fn read_context_lines_test(
file: &Path,
line_start: u32,
line_end: u32,
context: usize,
) -> anyhow::Result<(Vec<String>, Vec<String>)> {
let content = std::fs::read_to_string(file)
.with_context(|| format!("Failed to read {}", file.display()))?;
let lines: Vec<&str> = content.lines().map(|l| l.trim_end_matches('\r')).collect();
let line_start = line_start.max(1);
let line_end = line_end.max(line_start);
let max_idx = lines.len().saturating_sub(1);
let start_idx = (line_start as usize).saturating_sub(1).min(max_idx);
let end_idx = (line_end as usize).saturating_sub(1).min(max_idx);
let context_start = start_idx.saturating_sub(context);
let before: Vec<String> = if start_idx <= lines.len() {
lines[context_start..start_idx]
.iter()
.map(|s| s.to_string())
.collect()
} else {
vec![]
};
let context_end = end_idx
.saturating_add(context)
.saturating_add(1)
.min(lines.len());
let after: Vec<String> = if end_idx + 1 < lines.len() {
lines[(end_idx + 1)..context_end]
.iter()
.map(|s| s.to_string())
.collect()
} else {
vec![]
};
Ok((before, after))
}
#[test]
fn test_read_context_lines_basic() {
let lines = vec![
"line 1", "line 2", "line 3", "line 4", "line 5", "line 6", "line 7",
];
let (_dir, path) = write_test_file(&lines);
let (before, after) = read_context_lines_test(&path, 3, 5, 1).unwrap();
assert_eq!(before.len(), 1, "Should have 1 line before");
assert_eq!(before[0], "line 2");
assert_eq!(after.len(), 1, "Should have 1 line after");
assert_eq!(after[0], "line 6");
}
#[test]
fn test_read_context_lines_at_start() {
let lines = vec!["first", "second", "third", "fourth"];
let (_dir, path) = write_test_file(&lines);
let (before, after) = read_context_lines_test(&path, 1, 1, 2).unwrap();
assert!(before.is_empty(), "No lines before line 1");
assert_eq!(after.len(), 2, "Should have 2 lines after");
assert_eq!(after[0], "second");
assert_eq!(after[1], "third");
}
#[test]
fn test_read_context_lines_at_end() {
let lines = vec!["first", "second", "third", "last"];
let (_dir, path) = write_test_file(&lines);
let (before, after) = read_context_lines_test(&path, 4, 4, 2).unwrap();
assert_eq!(before.len(), 2, "Should have 2 lines before");
assert_eq!(before[0], "second");
assert_eq!(before[1], "third");
assert!(after.is_empty(), "No lines after last line");
}
#[test]
fn test_read_context_lines_zero_context() {
let lines = vec!["line 1", "line 2", "line 3"];
let (_dir, path) = write_test_file(&lines);
let (before, after) = read_context_lines_test(&path, 2, 2, 0).unwrap();
assert!(before.is_empty());
assert!(after.is_empty());
}
#[test]
fn test_read_context_lines_single_line_file() {
let (_dir, path) = write_test_file(&["only line"]);
let (before, after) = read_context_lines_test(&path, 1, 1, 5).unwrap();
assert!(before.is_empty());
assert!(after.is_empty());
}
#[test]
fn test_read_context_lines_line_zero_normalized() {
let lines = vec!["first", "second"];
let (_dir, path) = write_test_file(&lines);
let (before, after) = read_context_lines_test(&path, 0, 1, 1).unwrap();
assert!(before.is_empty(), "Line 0 normalizes to 1, nothing before");
assert_eq!(after.len(), 1);
assert_eq!(after[0], "second");
}
#[test]
fn test_read_context_lines_nonexistent_file() {
let result = read_context_lines(Path::new("nonexistent/file.rs"), 1, 5, 2);
assert!(result.is_err(), "Should fail for nonexistent file");
}
#[test]
fn test_read_context_lines_absolute_path_blocked() {
let result = read_context_lines(Path::new("/etc/passwd"), 1, 5, 2);
assert!(result.is_err(), "Should block absolute paths");
let err = result.unwrap_err().to_string();
assert!(
err.contains("Absolute path blocked"),
"Expected absolute path error, got: {err}"
);
}
#[test]
fn test_read_context_lines_multi_line_range() {
let lines = vec!["a", "b", "c", "d", "e", "f", "g", "h"];
let (_dir, path) = write_test_file(&lines);
let (before, after) = read_context_lines_test(&path, 3, 6, 1).unwrap();
assert_eq!(before.len(), 1);
assert_eq!(before[0], "b");
assert_eq!(after.len(), 1);
assert_eq!(after[0], "g");
}
fn make_search_result(
name: &str,
score: f32,
parent_id: Option<&str>,
) -> cqs::store::SearchResult {
cqs::store::SearchResult {
chunk: cqs::store::ChunkSummary {
id: format!("id-{name}"),
file: std::path::PathBuf::from(format!("src/{name}.rs")),
language: cqs::parser::Language::Rust,
chunk_type: cqs::parser::ChunkType::Function,
name: name.to_string(),
signature: format!("fn {name}()"),
content: format!("fn {name}() {{}}"),
doc: None,
line_start: 10,
line_end: 20,
parent_id: parent_id.map(|s| s.to_string()),
parent_type_name: None,
content_hash: String::new(),
window_idx: None,
},
score,
}
}
#[test]
fn test_display_similar_results_json_returns_ok() {
let results = vec![
make_search_result("alpha", 0.95, None),
make_search_result("beta", 0.80, Some("parent-1")),
];
let result = super::display_similar_results_json(&results, "my_target");
assert!(
result.is_ok(),
"display_similar_results_json should succeed"
);
}
#[test]
fn test_display_similar_results_json_empty() {
let results: Vec<cqs::store::SearchResult> = vec![];
let result = super::display_similar_results_json(&results, "no_matches");
assert!(result.is_ok(), "should succeed with empty results");
}
#[test]
fn test_display_similar_results_json_structure() {
let results = vec![
make_search_result("alpha", 0.95, None),
make_search_result("beta", 0.80, Some("parent-1")),
];
let json_results: Vec<_> = results.iter().map(|r| r.to_json()).collect();
let output = serde_json::json!({
"target": "my_target",
"results": json_results,
"total": results.len(),
});
assert!(output.get("target").is_some(), "missing 'target'");
assert!(output.get("results").is_some(), "missing 'results'");
assert!(output.get("total").is_some(), "missing 'total'");
assert_eq!(output["target"], "my_target");
assert_eq!(output["total"], 2);
let arr = output["results"].as_array().unwrap();
assert_eq!(arr.len(), 2);
for (i, item) in arr.iter().enumerate() {
let obj = item.as_object().unwrap_or_else(|| {
panic!("result[{i}] should be an object");
});
for field in [
"file",
"line_start",
"line_end",
"name",
"signature",
"language",
"chunk_type",
"score",
"content",
] {
assert!(
obj.contains_key(field),
"result[{i}] missing field '{field}'"
);
}
}
assert_eq!(arr[0]["name"], "alpha");
assert_eq!(arr[1]["name"], "beta");
assert_eq!(arr[0]["line_start"], 10);
assert_eq!(arr[0]["line_end"], 20);
assert_eq!(arr[0]["language"], "rust");
assert_eq!(arr[0]["chunk_type"], "function");
let s0 = arr[0]["score"].as_f64().unwrap();
assert!((s0 - 0.95).abs() < 1e-4, "alpha score should be ~0.95");
let s1 = arr[1]["score"].as_f64().unwrap();
assert!((s1 - 0.80).abs() < 1e-4, "beta score should be ~0.80");
}
}