mod fields;
pub mod fts;
mod markdown;
pub use fields::extract_body_keywords;
pub use fts::{normalize_for_fts, tokenize_identifier};
#[allow(unused_imports)]
pub use markdown::{parse_jsdoc_tags, strip_markdown_noise, JsDocInfo};
use crate::parser::{Chunk, ChunkType, Language};
use fields::{extract_field_names, extract_member_method_names};
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum NlTemplate {
Compact,
DocFirst,
}
#[derive(Debug, Default)]
pub struct CallContext {
pub callers: Vec<String>,
pub callees: Vec<String>,
}
pub fn generate_nl_with_call_context(
chunk: &Chunk,
ctx: &CallContext,
callee_doc_freq: &std::collections::HashMap<String, f32>,
max_callers: usize,
max_callees: usize,
) -> String {
generate_nl_with_call_context_and_summary(
chunk,
ctx,
callee_doc_freq,
max_callers,
max_callees,
None,
None,
)
}
pub fn generate_nl_with_call_context_and_summary(
chunk: &Chunk,
ctx: &CallContext,
callee_doc_freq: &std::collections::HashMap<String, f32>,
max_callers: usize,
max_callees: usize,
summary: Option<&str>,
hyde: Option<&str>,
) -> String {
tracing::trace!(
callers = ctx.callers.len(),
callees = ctx.callees.len(),
has_summary = summary.is_some(),
has_hyde = hyde.is_some(),
"generate_nl_with_call_context_and_summary"
);
let base = generate_nl_description(chunk);
let mut extras = Vec::new();
if !ctx.callers.is_empty() && !is_enrichment_skipped("callgraph") {
let caller_words: Vec<String> = ctx
.callers
.iter()
.take(max_callers)
.map(|c| tokenize_identifier(c).join(" "))
.collect();
if !caller_words.is_empty() {
extras.push(format!("Called by: {}", caller_words.join(", ")));
}
}
if !ctx.callees.is_empty() && !is_enrichment_skipped("callgraph") {
let callee_words: Vec<String> = ctx
.callees
.iter()
.filter(|c| {
!callee_doc_freq
.get(c.as_str())
.is_some_and(|&freq| (freq as f64) >= 0.10_f64)
})
.take(max_callees)
.map(|c| tokenize_identifier(c).join(" "))
.collect();
if !callee_words.is_empty() {
extras.push(format!("Calls: {}", callee_words.join(", ")));
}
}
let nl = if extras.is_empty() {
base
} else {
format!("{}. {}", base, extras.join(". "))
};
let nl = if !is_enrichment_skipped("summary") {
match summary {
Some(s) if !s.is_empty() => format!("{} {}", s, nl),
_ => nl,
}
} else {
nl
};
if is_enrichment_skipped("hyde") {
return nl;
}
match hyde {
Some(h) if !h.is_empty() => {
let queries: String = h
.lines()
.map(|l| l.trim())
.filter(|l| !l.is_empty())
.collect::<Vec<_>>()
.join(", ");
if queries.is_empty() {
nl
} else {
format!("{}. Queries: {}", nl, queries)
}
}
_ => nl,
}
}
pub fn generate_nl_description(chunk: &Chunk) -> String {
generate_nl_with_template(chunk, NlTemplate::Compact)
}
fn is_enrichment_skipped(layer: &str) -> bool {
static SKIP: std::sync::OnceLock<Vec<String>> = std::sync::OnceLock::new();
let skipped = SKIP.get_or_init(|| {
std::env::var("CQS_SKIP_ENRICHMENT")
.unwrap_or_default()
.split(',')
.map(|s| s.trim().to_lowercase())
.filter(|s| !s.is_empty())
.collect()
});
skipped.iter().any(|s| s == layer)
}
pub fn generate_nl_with_template(chunk: &Chunk, template: NlTemplate) -> String {
if chunk.chunk_type == ChunkType::Section {
let mut parts = Vec::new();
if !chunk.signature.is_empty() {
parts.push(chunk.signature.clone());
}
parts.push(chunk.name.clone());
static MAX_SEQ: std::sync::OnceLock<usize> = std::sync::OnceLock::new();
let max_seq = *MAX_SEQ.get_or_init(|| {
std::env::var("CQS_MAX_SEQ_LENGTH")
.ok()
.and_then(|v| v.parse().ok())
.unwrap_or(512)
});
let char_budget = max_seq.saturating_mul(4).saturating_sub(200).max(400);
let preview: String = strip_markdown_noise(&chunk.content)
.chars()
.take(char_budget)
.collect();
parts.push(preview);
return parts.join(". ");
}
if chunk.name.is_empty() && chunk.doc.is_none() && chunk.signature.is_empty() {
return if chunk.file.as_os_str().is_empty() {
"(unnamed)".to_string()
} else {
chunk.file.display().to_string()
};
}
let mut parts = Vec::new();
if template == NlTemplate::Compact && !is_enrichment_skipped("filecontext") {
let file_context = extract_file_context(&chunk.file);
if !file_context.is_empty() {
parts.push(file_context);
}
}
let has_doc = if !is_enrichment_skipped("doc") {
if let Some(ref doc) = chunk.doc {
let doc_trimmed = doc.trim();
if !doc_trimmed.is_empty() {
parts.push(doc_trimmed.to_string());
true
} else {
false
}
} else {
false
}
} else {
false
};
let name_words = tokenize_identifier(&chunk.name).join(" ");
if template == NlTemplate::DocFirst && has_doc {
parts.push(name_words);
return parts.join(". ");
}
if chunk.chunk_type == ChunkType::Method && !is_enrichment_skipped("parent") {
if let Some(ref parent_name) = chunk.parent_type_name {
let parent_words = tokenize_identifier(parent_name).join(" ");
parts.push(format!("{} method", parent_words));
}
}
if chunk.chunk_type == ChunkType::Constructor {
if let Some(ref parent_name) = chunk.parent_type_name {
let parent_words = tokenize_identifier(parent_name).join(" ");
parts.push(format!("constructor for {}", parent_words));
} else {
let name_tokens = tokenize_identifier(&chunk.name).join(" ");
parts.push(format!("constructor that initializes {}", name_tokens));
}
}
parts.push(name_words);
if chunk.chunk_type == ChunkType::Extension {
let name_tokens = tokenize_identifier(&chunk.name).join(" ");
parts.push(format!("extension of {}", name_tokens));
}
if matches!(
chunk.chunk_type,
ChunkType::Struct | ChunkType::Enum | ChunkType::Class | ChunkType::Extension
) {
let fields = extract_field_names(&chunk.content, chunk.language);
if !fields.is_empty() {
parts.push(format!("Fields: {}", fields.join(", ")));
}
}
if matches!(
chunk.chunk_type,
ChunkType::Class | ChunkType::Struct | ChunkType::Interface | ChunkType::Extension
) {
let methods = extract_member_method_names(&chunk.content, chunk.language);
if !methods.is_empty() {
let method_words: Vec<String> = methods
.iter()
.take(10)
.map(|m| tokenize_identifier(m).join(" "))
.collect();
parts.push(format!("Methods: {}", method_words.join(", ")));
}
}
let jsdoc_info = if chunk.language == Language::JavaScript {
chunk.doc.as_ref().map(|d| parse_jsdoc_tags(d))
} else {
None
};
if !is_enrichment_skipped("signatures") {
if let Some(params_desc) = extract_params_nl(&chunk.signature) {
parts.push(params_desc);
} else if let Some(ref info) = jsdoc_info {
if !info.params.is_empty() {
let param_strs: Vec<String> = info
.params
.iter()
.map(|(name, ty)| format!("{} ({})", name, ty))
.collect();
parts.push(format!("Takes parameters: {}", param_strs.join(", ")));
}
}
if let Some(return_desc) = extract_return_nl(&chunk.signature, chunk.language) {
parts.push(return_desc);
} else if let Some(ref info) = jsdoc_info {
if let Some(ref ret) = info.returns {
parts.push(format!("Returns {}", ret));
}
}
}
{
let keywords = extract_body_keywords(&chunk.content, chunk.language);
if !keywords.is_empty() {
let kw_strs: Vec<&str> = keywords.iter().map(|s| s.as_str()).collect();
parts.push(format!("Uses: {}", kw_strs.join(", ")));
}
}
if !chunk.signature.is_empty() && !is_enrichment_skipped("signatures") {
parts.push(format!("Signature: {}", chunk.signature));
}
parts.join(". ")
}
fn extract_params_nl(signature: &str) -> Option<String> {
let start = signature.find('(')?;
let end = signature.rfind(')')?;
if start >= end {
return None;
}
let params_str = &signature[start + 1..end];
if params_str.trim().is_empty() {
return Some("Takes no parameters".to_string());
}
let params: String = params_str
.split(',')
.filter_map(|p| {
let p = p.trim();
if p.is_empty() {
return None;
}
let filtered: String = tokenize_identifier(p)
.into_iter()
.filter(|w| !["self", "mut"].contains(&w.as_str()))
.collect::<Vec<_>>()
.join(" ");
if filtered.is_empty() {
None
} else {
Some(filtered)
}
})
.collect::<Vec<_>>()
.join(", ");
if params.is_empty() {
None
} else {
Some(format!("Takes parameters: {}", params))
}
}
fn extract_return_nl(signature: &str, lang: Language) -> Option<String> {
(lang.def().extract_return_nl)(signature)
}
fn extract_file_context(path: &std::path::Path) -> String {
use std::path::Component;
let skip = [
"src",
"lib",
".",
"test",
"tests",
"spec",
"specs",
"fixtures",
"fixture",
"testdata",
"internal",
"pkg",
"cmd",
"app",
"eval",
"bench",
"benches",
"examples",
"example",
"vendor",
"third_party",
];
let components: Vec<&str> = path
.components()
.filter_map(|c| match c {
Component::Normal(s) => s.to_str(),
_ => None,
})
.filter(|c| !c.is_empty() && !skip.contains(c))
.collect();
let generic_stems = [
"mod",
"index",
"lib",
"main",
"utils",
"helpers",
"common",
"types",
"config",
"constants",
"init",
];
if components.is_empty() {
return String::new();
}
let mut result: Vec<String> = Vec::new();
for (i, c) in components.iter().enumerate() {
let c = if i == components.len() - 1 {
let stem = c.rsplit_once('.').map_or(*c, |(s, _)| s);
if generic_stems.contains(&stem) {
continue;
}
stem
} else {
c
};
result.extend(tokenize_identifier(c));
}
if result.is_empty() {
return String::new();
}
result.join(" ")
}
#[cfg(test)]
mod tests {
use super::*;
use std::path::PathBuf;
#[test]
fn test_extract_params_nl() {
assert_eq!(
extract_params_nl("fn foo(x: i32, y: String)"),
Some("Takes parameters: x: i32, y: string".to_string())
);
assert_eq!(
extract_params_nl("fn bar()"),
Some("Takes no parameters".to_string())
);
assert_eq!(
extract_params_nl("fn baz(self, x: i32)"),
Some("Takes parameters: x: i32".to_string())
);
}
#[test]
fn test_extract_return_nl() {
assert_eq!(
extract_return_nl("fn foo() -> String", Language::Rust),
Some("Returns string".to_string())
);
assert_eq!(
extract_return_nl("function foo(): string", Language::TypeScript),
Some("Returns string".to_string())
);
assert_eq!(
extract_return_nl("def foo() -> str:", Language::Python),
Some("Returns str".to_string())
);
assert_eq!(
extract_return_nl("function foo()", Language::JavaScript),
None
);
}
#[test]
fn test_extract_return_nl_go() {
assert_eq!(
extract_return_nl("func foo() string {", Language::Go),
Some("Returns string".to_string())
);
assert_eq!(
extract_return_nl("func foo() (string, error) {", Language::Go),
Some("Returns (string, error)".to_string())
);
assert_eq!(extract_return_nl("func foo() {", Language::Go), None);
assert_eq!(
extract_return_nl("func (s *Server) Start() error {", Language::Go),
Some("Returns error".to_string())
);
}
#[test]
fn test_generate_nl_description() {
let chunk = Chunk {
id: "test.rs:1:abcd1234".to_string(),
file: PathBuf::from("test.rs"),
language: Language::Rust,
chunk_type: ChunkType::Function,
name: "parseConfig".to_string(),
signature: "fn parseConfig(path: &str) -> Config".to_string(),
content: "{}".to_string(),
line_start: 1,
line_end: 1,
doc: Some("/// Load config from path".to_string()),
content_hash: "abcd1234".to_string(),
parent_id: None,
window_idx: None,
parent_type_name: None,
};
let nl = generate_nl_description(&chunk);
assert!(nl.contains("Load config from path"));
assert!(nl.contains("parse config"));
assert!(nl.contains("Takes parameters:"));
assert!(nl.contains("Returns config"));
}
#[test]
fn test_generate_nl_with_jsdoc() {
let chunk = Chunk {
id: "test.js:1:abcd1234".to_string(),
file: PathBuf::from("test.js"),
language: Language::JavaScript,
chunk_type: ChunkType::Function,
name: "validateEmail".to_string(),
signature: "function validateEmail(email)".to_string(),
content: "{}".to_string(),
line_start: 1,
line_end: 1,
doc: Some(
r#"/**
* Validates an email address
* @param {string} email - The email to check
* @returns {boolean} True if valid
*/"#
.to_string(),
),
content_hash: "abcd1234".to_string(),
parent_id: None,
window_idx: None,
parent_type_name: None,
};
let nl = generate_nl_description(&chunk);
assert!(nl.contains("Validates an email"));
assert!(nl.contains("validate email"));
assert!(
nl.contains("Takes parameters: email"),
"Should have param from signature: {}",
nl
);
assert!(
nl.contains("Returns boolean"),
"Should have JSDoc return: {}",
nl
);
}
fn make_section_chunk(content: &str, signature: &str, name: &str) -> Chunk {
Chunk {
id: "test.md:1:abcd1234".to_string(),
file: PathBuf::from("test.md"),
language: Language::Markdown,
chunk_type: ChunkType::Section,
name: name.to_string(),
signature: signature.to_string(),
content: content.to_string(),
line_start: 1,
line_end: 10,
doc: None,
content_hash: "abcd1234".to_string(),
parent_id: None,
window_idx: None,
parent_type_name: None,
}
}
#[test]
fn test_markdown_nl_uses_full_content() {
let content = "a".repeat(3000);
let chunk = make_section_chunk(&content, "Title > Section", "Section");
let nl = generate_nl_description(&chunk);
assert!(nl.contains("Title > Section"));
assert!(nl.contains("Section"));
assert!(nl.len() > 500, "NL should be >500 chars, got {}", nl.len());
assert!(
nl.len() < 2500,
"NL should be <2500 chars, got {}",
nl.len()
);
}
#[test]
fn test_markdown_nl_short_content() {
let chunk = make_section_chunk("Short section content here.", "Guide > Intro", "Intro");
let nl = generate_nl_description(&chunk);
assert!(nl.contains("Guide > Intro"));
assert!(nl.contains("Intro"));
assert!(nl.contains("Short section content here."));
}
#[test]
fn test_method_nl_includes_parent_type() {
let chunk = Chunk {
id: "test.rs:1:abcd1234".to_string(),
file: PathBuf::from("test.rs"),
language: Language::Rust,
chunk_type: ChunkType::Method,
name: "should_allow".to_string(),
signature: "fn should_allow(&self) -> bool".to_string(),
content: "{}".to_string(),
line_start: 1,
line_end: 1,
doc: Some("/// Check if calls should be allowed".to_string()),
content_hash: "abcd1234".to_string(),
parent_id: None,
window_idx: None,
parent_type_name: Some("CircuitBreaker".to_string()),
};
let nl = generate_nl_description(&chunk);
assert!(
nl.contains("circuit breaker method"),
"NL should contain tokenized parent type: {}",
nl
);
assert!(nl.contains("Check if calls should be allowed"));
}
#[test]
fn test_method_nl_without_parent_type() {
let chunk = Chunk {
id: "test.rs:1:abcd1234".to_string(),
file: PathBuf::from("test.rs"),
language: Language::Rust,
chunk_type: ChunkType::Method,
name: "process".to_string(),
signature: "fn process(&self)".to_string(),
content: "{}".to_string(),
line_start: 1,
line_end: 1,
doc: None,
content_hash: "abcd1234".to_string(),
parent_id: None,
window_idx: None,
parent_type_name: None,
};
let nl = generate_nl_description(&chunk);
assert!(nl.contains("process"));
assert!(
!nl.starts_with("method"),
"Should not start with orphan 'method' prefix: {}",
nl
);
}
#[test]
fn test_function_ignores_parent_type() {
let chunk = Chunk {
id: "test.rs:1:abcd1234".to_string(),
file: PathBuf::from("test.rs"),
language: Language::Rust,
chunk_type: ChunkType::Function,
name: "standalone".to_string(),
signature: "fn standalone()".to_string(),
content: "{}".to_string(),
line_start: 1,
line_end: 1,
doc: None,
content_hash: "abcd1234".to_string(),
parent_id: None,
window_idx: None,
parent_type_name: None,
};
let nl = generate_nl_description(&chunk);
assert!(nl.contains("standalone"));
}
#[test]
fn test_docfirst_template_skips_parent_type() {
let chunk = Chunk {
id: "test.rs:1:abcd1234".to_string(),
file: PathBuf::from("test.rs"),
language: Language::Rust,
chunk_type: ChunkType::Method,
name: "should_allow".to_string(),
signature: "fn should_allow(&self) -> bool".to_string(),
content: "{}".to_string(),
line_start: 1,
line_end: 1,
doc: Some("/// Check if allowed".to_string()),
content_hash: "abcd1234".to_string(),
parent_id: None,
window_idx: None,
parent_type_name: Some("CircuitBreaker".to_string()),
};
let nl = generate_nl_with_template(&chunk, NlTemplate::DocFirst);
assert!(
!nl.contains("circuit breaker"),
"DocFirst should skip parent type: {}",
nl
);
assert!(nl.contains("Check if allowed"));
}
mod fuzz {
use super::*;
use proptest::prelude::*;
proptest! {
#[test]
fn fuzz_extract_params_no_panic(sig in "\\PC{0,200}") {
let _ = extract_params_nl(&sig);
}
#[test]
fn fuzz_extract_return_no_panic(sig in "\\PC{0,200}") {
for lang in Language::all_variants() {
let _ = extract_return_nl(&sig, *lang);
}
}
}
}
fn test_chunk(name: &str) -> Chunk {
Chunk {
id: name.to_string(),
file: PathBuf::from("src/test.rs"),
language: Language::Rust,
chunk_type: ChunkType::Function,
name: name.to_string(),
signature: format!("fn {}()", name),
content: String::new(),
doc: None,
line_start: 1,
line_end: 10,
content_hash: String::new(),
parent_id: None,
window_idx: None,
parent_type_name: None,
}
}
#[test]
fn test_call_context_callers_only() {
let chunk = test_chunk("handle_request");
let ctx = CallContext {
callers: vec!["main".to_string(), "serve".to_string()],
callees: vec![],
};
let freq = std::collections::HashMap::new();
let nl = generate_nl_with_call_context(&chunk, &ctx, &freq, 5, 5);
assert!(nl.contains("Called by: main, serve"), "got: {}", nl);
assert!(!nl.contains("Calls:"), "got: {}", nl);
}
#[test]
fn test_call_context_callees_with_idf_filter() {
let chunk = test_chunk("process");
let ctx = CallContext {
callers: vec![],
callees: vec![
"validate".to_string(),
"log".to_string(),
"save".to_string(),
],
};
let mut freq = std::collections::HashMap::new();
freq.insert("log".to_string(), 0.15_f32); freq.insert("validate".to_string(), 0.05_f32); freq.insert("save".to_string(), 0.02_f32); let nl = generate_nl_with_call_context(&chunk, &ctx, &freq, 5, 5);
assert!(nl.contains("Calls: validate, save"), "got: {}", nl);
assert!(!nl.contains("log"), "log should be filtered, got: {}", nl);
}
#[test]
fn test_call_context_max_callers_truncation() {
let chunk = test_chunk("f");
let ctx = CallContext {
callers: vec![
"a".to_string(),
"b".to_string(),
"c".to_string(),
"d".to_string(),
],
callees: vec![],
};
let freq = std::collections::HashMap::new();
let nl = generate_nl_with_call_context(&chunk, &ctx, &freq, 2, 5);
assert!(nl.contains("Called by: a, b"), "got: {}", nl);
assert!(!nl.contains(", c"), "c should be truncated, got: {}", nl);
}
#[test]
fn test_call_context_empty_returns_base() {
let chunk = test_chunk("lonely");
let ctx = CallContext::default();
let freq = std::collections::HashMap::new();
let base = generate_nl_description(&chunk);
let enriched = generate_nl_with_call_context(&chunk, &ctx, &freq, 5, 5);
assert_eq!(base, enriched);
}
#[test]
fn test_call_context_and_summary_prepends_summary_appends_hyde() {
let chunk = test_chunk("process_data");
let ctx = CallContext {
callers: vec!["main".to_string()],
callees: vec!["validate".to_string()],
};
let freq = std::collections::HashMap::new();
let summary = "Processes raw data into structured output";
let hyde = "how to process data\ntransform raw input";
let nl = generate_nl_with_call_context_and_summary(
&chunk,
&ctx,
&freq,
5,
5,
Some(summary),
Some(hyde),
);
assert!(
nl.starts_with(summary),
"Summary should be prepended, got: {}",
nl
);
assert!(
nl.contains("Queries: how to process data, transform raw input"),
"HyDE queries should be appended, got: {}",
nl
);
assert!(nl.contains("Called by: main"), "got: {}", nl);
assert!(nl.contains("Calls: validate"), "got: {}", nl);
}
#[test]
fn test_callee_idf_filtering_above_threshold() {
let chunk = test_chunk("my_func");
let ctx = CallContext {
callers: vec![],
callees: vec!["log".to_string(), "rare_fn".to_string()],
};
let mut freq = std::collections::HashMap::new();
freq.insert("log".to_string(), 0.15);
freq.insert("rare_fn".to_string(), 0.02);
let nl = generate_nl_with_call_context(&chunk, &ctx, &freq, 5, 5);
assert!(
!nl.contains("Calls: log"),
"High-frequency callee 'log' should be filtered, got: {}",
nl
);
assert!(
nl.contains("rare fn"),
"Low-frequency callee 'rare_fn' should be kept, got: {}",
nl
);
}
#[test]
fn enrichment_nl_includes_callers_and_callees() {
let chunk = test_chunk("process_data");
let ctx = CallContext {
callers: vec!["handle_request".to_string(), "run_pipeline".to_string()],
callees: vec!["validate_input".to_string(), "transform_record".to_string()],
};
let freq = std::collections::HashMap::new();
let nl = generate_nl_with_call_context_and_summary(&chunk, &ctx, &freq, 5, 5, None, None);
assert!(
nl.contains("Called by:"),
"NL must contain 'Called by:' section, got: {nl}"
);
assert!(
nl.contains("handle request"),
"Caller 'handle_request' should appear tokenized, got: {nl}"
);
assert!(
nl.contains("run pipeline"),
"Caller 'run_pipeline' should appear tokenized, got: {nl}"
);
assert!(
nl.contains("Calls:"),
"NL must contain 'Calls:' section, got: {nl}"
);
assert!(
nl.contains("validate input"),
"Callee 'validate_input' should appear tokenized, got: {nl}"
);
assert!(
nl.contains("transform record"),
"Callee 'transform_record' should appear tokenized, got: {nl}"
);
}
#[test]
fn enrichment_nl_filters_high_freq_callees() {
let chunk = test_chunk("my_func");
let ctx = CallContext {
callers: vec!["caller_a".to_string()],
callees: vec![
"log".to_string(),
"rare_fn".to_string(),
"unwrap".to_string(),
],
};
let mut freq = std::collections::HashMap::new();
freq.insert("log".to_string(), 0.15);
freq.insert("unwrap".to_string(), 0.12);
freq.insert("rare_fn".to_string(), 0.02);
let nl = generate_nl_with_call_context_and_summary(&chunk, &ctx, &freq, 5, 5, None, None);
assert!(
!nl.contains("log"),
"High-freq callee 'log' (15%) must be filtered, got: {nl}"
);
assert!(
!nl.contains("unwrap"),
"High-freq callee 'unwrap' (12%) must be filtered, got: {nl}"
);
assert!(
nl.contains("rare fn"),
"Low-freq callee 'rare_fn' (2%) should be kept, got: {nl}"
);
}
#[test]
fn enrichment_nl_with_summary_and_call_context() {
let chunk = test_chunk("search_index");
let ctx = CallContext {
callers: vec!["query_handler".to_string()],
callees: vec!["embed_text".to_string()],
};
let freq = std::collections::HashMap::new();
let summary = "Searches the HNSW index for nearest neighbors";
let hyde = "find similar code\nsemantic search";
let nl = generate_nl_with_call_context_and_summary(
&chunk,
&ctx,
&freq,
5,
5,
Some(summary),
Some(hyde),
);
assert!(
nl.starts_with(summary),
"Summary must be prepended, got: {nl}"
);
assert!(
nl.contains("Called by: query handler"),
"Caller must appear, got: {nl}"
);
assert!(
nl.contains("Calls: embed text"),
"Callee must appear, got: {nl}"
);
assert!(
nl.contains("Queries: find similar code, semantic search"),
"HyDE queries must be appended, got: {nl}"
);
}
}