use std::fmt::Write;
use crate::plan::{choose_driving_table, execution_hints, shape_signature};
use crate::{
ComparisonOp, DrivingTable, ExpansionSlot, Predicate, QueryAst, QueryStep, ScalarValue,
TraverseDirection,
};
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum BindValue {
Text(String),
Integer(i64),
Bool(bool),
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub struct ShapeHash(pub u64);
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct CompiledQuery {
pub sql: String,
pub binds: Vec<BindValue>,
pub shape_hash: ShapeHash,
pub driving_table: DrivingTable,
pub hints: crate::ExecutionHints,
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct CompiledGroupedQuery {
pub root: CompiledQuery,
pub expansions: Vec<ExpansionSlot>,
pub shape_hash: ShapeHash,
pub hints: crate::ExecutionHints,
}
#[derive(Clone, Debug, PartialEq, Eq, thiserror::Error)]
pub enum CompileError {
#[error("multiple traversal steps are not supported in v1")]
TooManyTraversals,
#[error("flat query compilation does not support expansions; use compile_grouped")]
FlatCompileDoesNotSupportExpansions,
#[error("duplicate expansion slot name: {0}")]
DuplicateExpansionSlot(String),
#[error("expansion slot name must be non-empty")]
EmptyExpansionSlotName,
#[error("too many expansion slots: max {MAX_EXPANSION_SLOTS}, got {0}")]
TooManyExpansionSlots(usize),
#[error("too many bind parameters: max 15, got {0}")]
TooManyBindParameters(usize),
#[error("traversal depth {0} exceeds maximum of {MAX_TRAVERSAL_DEPTH}")]
TraversalTooDeep(usize),
#[error("invalid JSON path: must match $(.key)+ pattern, got {0:?}")]
InvalidJsonPath(String),
}
fn sanitize_fts5_query(raw: &str) -> String {
let tokens: Vec<String> = raw
.split_whitespace()
.map(|token| {
let escaped = token.replace('"', "\"\"");
format!("\"{escaped}\"")
})
.collect();
tokens.join(" ")
}
fn validate_json_path(path: &str) -> Result<(), CompileError> {
let valid = path.starts_with('$')
&& path.len() > 1
&& path[1..].split('.').all(|segment| {
segment.is_empty()
|| segment
.chars()
.all(|c| c.is_ascii_alphanumeric() || c == '_')
&& !segment.is_empty()
})
&& path.contains('.');
if !valid {
return Err(CompileError::InvalidJsonPath(path.to_owned()));
}
Ok(())
}
const MAX_BIND_PARAMETERS: usize = 15;
const MAX_EXPANSION_SLOTS: usize = 8;
const MAX_TRAVERSAL_DEPTH: usize = 50;
#[allow(clippy::too_many_lines)]
pub fn compile_query(ast: &QueryAst) -> Result<CompiledQuery, CompileError> {
if !ast.expansions.is_empty() {
return Err(CompileError::FlatCompileDoesNotSupportExpansions);
}
let traversals = ast
.steps
.iter()
.filter(|step| matches!(step, QueryStep::Traverse { .. }))
.count();
if traversals > 1 {
return Err(CompileError::TooManyTraversals);
}
let excessive_depth = ast.steps.iter().find_map(|step| {
if let QueryStep::Traverse { max_depth, .. } = step
&& *max_depth > MAX_TRAVERSAL_DEPTH
{
return Some(*max_depth);
}
None
});
if let Some(depth) = excessive_depth {
return Err(CompileError::TraversalTooDeep(depth));
}
let driving_table = choose_driving_table(ast);
let hints = execution_hints(ast);
let shape_hash = ShapeHash(hash_signature(&shape_signature(ast)));
let base_limit = ast
.steps
.iter()
.find_map(|step| match step {
QueryStep::VectorSearch { limit, .. } | QueryStep::TextSearch { limit, .. } => {
Some(*limit)
}
_ => None,
})
.or(ast.final_limit)
.unwrap_or(25);
let final_limit = ast.final_limit.unwrap_or(base_limit);
let traversal = ast.steps.iter().find_map(|step| {
if let QueryStep::Traverse {
direction,
label,
max_depth,
} = step
{
Some((*direction, label.as_str(), *max_depth))
} else {
None
}
});
let mut binds = Vec::new();
let base_candidates = match driving_table {
DrivingTable::VecNodes => {
let query = ast
.steps
.iter()
.find_map(|step| {
if let QueryStep::VectorSearch { query, .. } = step {
Some(query.as_str())
} else {
None
}
})
.unwrap_or_else(|| unreachable!("VecNodes chosen but no VectorSearch step in AST"));
binds.push(BindValue::Text(query.to_owned()));
binds.push(BindValue::Text(ast.root_kind.clone()));
format!(
"base_candidates AS (
SELECT DISTINCT src.logical_id
FROM (
SELECT chunk_id FROM vec_nodes_active
WHERE embedding MATCH ?1
LIMIT {base_limit}
) vc
JOIN chunks c ON c.id = vc.chunk_id
JOIN nodes src ON src.logical_id = c.node_logical_id AND src.superseded_at IS NULL
WHERE src.kind = ?2
)"
)
}
DrivingTable::FtsNodes => {
let raw_query = ast
.steps
.iter()
.find_map(|step| {
if let QueryStep::TextSearch { query, .. } = step {
Some(query.as_str())
} else {
None
}
})
.unwrap_or_else(|| unreachable!("FtsNodes chosen but no TextSearch step in AST"));
binds.push(BindValue::Text(sanitize_fts5_query(raw_query)));
binds.push(BindValue::Text(ast.root_kind.clone()));
format!(
"base_candidates AS (
SELECT DISTINCT src.logical_id
FROM fts_nodes f
JOIN chunks c ON c.id = f.chunk_id
JOIN nodes src ON src.logical_id = c.node_logical_id AND src.superseded_at IS NULL
WHERE fts_nodes MATCH ?1
AND src.kind = ?2
LIMIT {base_limit}
)"
)
}
DrivingTable::Nodes => {
binds.push(BindValue::Text(ast.root_kind.clone()));
let mut sql = "base_candidates AS (
SELECT DISTINCT src.logical_id
FROM nodes src
WHERE src.superseded_at IS NULL
AND src.kind = ?1"
.to_owned();
for step in &ast.steps {
if let QueryStep::Filter(predicate) = step {
match predicate {
Predicate::LogicalIdEq(logical_id) => {
binds.push(BindValue::Text(logical_id.clone()));
let bind_index = binds.len();
let _ = write!(
&mut sql,
"\n AND src.logical_id = ?{bind_index}"
);
}
Predicate::JsonPathEq { path, value } => {
validate_json_path(path)?;
binds.push(BindValue::Text(path.clone()));
let path_index = binds.len();
binds.push(match value {
ScalarValue::Text(text) => BindValue::Text(text.clone()),
ScalarValue::Integer(integer) => BindValue::Integer(*integer),
ScalarValue::Bool(boolean) => BindValue::Bool(*boolean),
});
let value_index = binds.len();
let _ = write!(
&mut sql,
"\n AND json_extract(src.properties, ?{path_index}) = ?{value_index}"
);
}
Predicate::JsonPathCompare { path, op, value } => {
validate_json_path(path)?;
binds.push(BindValue::Text(path.clone()));
let path_index = binds.len();
binds.push(match value {
ScalarValue::Text(text) => BindValue::Text(text.clone()),
ScalarValue::Integer(integer) => BindValue::Integer(*integer),
ScalarValue::Bool(boolean) => BindValue::Bool(*boolean),
});
let value_index = binds.len();
let operator = match op {
ComparisonOp::Gt => ">",
ComparisonOp::Gte => ">=",
ComparisonOp::Lt => "<",
ComparisonOp::Lte => "<=",
};
let _ = write!(
&mut sql,
"\n AND json_extract(src.properties, ?{path_index}) {operator} ?{value_index}"
);
}
Predicate::SourceRefEq(source_ref) => {
binds.push(BindValue::Text(source_ref.clone()));
let bind_index = binds.len();
let _ = write!(
&mut sql,
"\n AND src.source_ref = ?{bind_index}"
);
}
Predicate::KindEq(_) => {
}
}
}
}
let _ = write!(
&mut sql,
"\n LIMIT {base_limit}\n )"
);
sql
}
};
let mut sql = format!("WITH RECURSIVE\n{base_candidates}");
let source_alias = if traversal.is_some() { "t" } else { "bc" };
if let Some((direction, label, max_depth)) = traversal {
binds.push(BindValue::Text(label.to_owned()));
let label_index = binds.len();
let (join_condition, next_logical_id) = match direction {
TraverseDirection::Out => ("e.source_logical_id = t.logical_id", "e.target_logical_id"),
TraverseDirection::In => ("e.target_logical_id = t.logical_id", "e.source_logical_id"),
};
let _ = write!(
&mut sql,
",
traversed(logical_id, depth, visited) AS (
SELECT bc.logical_id, 0, printf(',%s,', bc.logical_id)
FROM base_candidates bc
UNION ALL
SELECT {next_logical_id}, t.depth + 1, t.visited || {next_logical_id} || ','
FROM traversed t
JOIN edges e ON {join_condition}
AND e.kind = ?{label_index}
AND e.superseded_at IS NULL
WHERE t.depth < {max_depth}
AND instr(t.visited, printf(',%s,', {next_logical_id})) = 0
LIMIT {}
)",
hints.hard_limit
);
}
let _ = write!(
&mut sql,
"
SELECT DISTINCT n.row_id, n.logical_id, n.kind, n.properties
FROM {} {source_alias}
JOIN nodes n ON n.logical_id = {source_alias}.logical_id
AND n.superseded_at IS NULL
WHERE 1 = 1",
if traversal.is_some() {
"traversed"
} else {
"base_candidates"
}
);
for step in &ast.steps {
if let QueryStep::Filter(predicate) = step {
if driving_table == DrivingTable::Nodes {
if let Predicate::KindEq(kind) = predicate {
binds.push(BindValue::Text(kind.clone()));
let bind_index = binds.len();
let _ = write!(&mut sql, "\n AND n.kind = ?{bind_index}");
}
continue;
}
match predicate {
Predicate::LogicalIdEq(logical_id) => {
binds.push(BindValue::Text(logical_id.clone()));
let bind_index = binds.len();
let _ = write!(&mut sql, "\n AND n.logical_id = ?{bind_index}");
}
Predicate::KindEq(kind) => {
binds.push(BindValue::Text(kind.clone()));
let bind_index = binds.len();
let _ = write!(&mut sql, "\n AND n.kind = ?{bind_index}");
}
Predicate::JsonPathEq { path, value } => {
validate_json_path(path)?;
binds.push(BindValue::Text(path.clone()));
let path_index = binds.len();
binds.push(match value {
ScalarValue::Text(text) => BindValue::Text(text.clone()),
ScalarValue::Integer(integer) => BindValue::Integer(*integer),
ScalarValue::Bool(boolean) => BindValue::Bool(*boolean),
});
let value_index = binds.len();
let _ = write!(
&mut sql,
"\n AND json_extract(n.properties, ?{path_index}) = ?{value_index}",
);
}
Predicate::JsonPathCompare { path, op, value } => {
validate_json_path(path)?;
binds.push(BindValue::Text(path.clone()));
let path_index = binds.len();
binds.push(match value {
ScalarValue::Text(text) => BindValue::Text(text.clone()),
ScalarValue::Integer(integer) => BindValue::Integer(*integer),
ScalarValue::Bool(boolean) => BindValue::Bool(*boolean),
});
let value_index = binds.len();
let operator = match op {
ComparisonOp::Gt => ">",
ComparisonOp::Gte => ">=",
ComparisonOp::Lt => "<",
ComparisonOp::Lte => "<=",
};
let _ = write!(
&mut sql,
"\n AND json_extract(n.properties, ?{path_index}) {operator} ?{value_index}",
);
}
Predicate::SourceRefEq(source_ref) => {
binds.push(BindValue::Text(source_ref.clone()));
let bind_index = binds.len();
let _ = write!(&mut sql, "\n AND n.source_ref = ?{bind_index}");
}
}
}
}
let _ = write!(&mut sql, "\nLIMIT {final_limit}");
if binds.len() > MAX_BIND_PARAMETERS {
return Err(CompileError::TooManyBindParameters(binds.len()));
}
Ok(CompiledQuery {
sql,
binds,
shape_hash,
driving_table,
hints,
})
}
pub fn compile_grouped_query(ast: &QueryAst) -> Result<CompiledGroupedQuery, CompileError> {
if ast.expansions.len() > MAX_EXPANSION_SLOTS {
return Err(CompileError::TooManyExpansionSlots(ast.expansions.len()));
}
let mut seen = std::collections::BTreeSet::new();
for expansion in &ast.expansions {
if expansion.slot.trim().is_empty() {
return Err(CompileError::EmptyExpansionSlotName);
}
if expansion.max_depth > MAX_TRAVERSAL_DEPTH {
return Err(CompileError::TraversalTooDeep(expansion.max_depth));
}
if !seen.insert(expansion.slot.clone()) {
return Err(CompileError::DuplicateExpansionSlot(expansion.slot.clone()));
}
}
let mut root_ast = ast.clone();
root_ast.expansions.clear();
let root = compile_query(&root_ast)?;
let hints = execution_hints(ast);
let shape_hash = ShapeHash(hash_signature(&shape_signature(ast)));
Ok(CompiledGroupedQuery {
root,
expansions: ast.expansions.clone(),
shape_hash,
hints,
})
}
fn hash_signature(signature: &str) -> u64 {
const OFFSET: u64 = 0xcbf2_9ce4_8422_2325;
const PRIME: u64 = 0x0000_0100_0000_01b3;
let mut hash = OFFSET;
for byte in signature.bytes() {
hash ^= u64::from(byte);
hash = hash.wrapping_mul(PRIME);
}
hash
}
#[cfg(test)]
#[allow(clippy::expect_used, clippy::items_after_statements)]
mod tests {
use rstest::rstest;
use crate::{
CompileError, DrivingTable, QueryBuilder, TraverseDirection, compile_grouped_query,
compile_query,
};
#[test]
fn vector_query_compiles_to_chunk_resolution() {
let compiled = compile_query(
&QueryBuilder::nodes("Meeting")
.vector_search("budget", 5)
.limit(5)
.into_ast(),
)
.expect("compiled query");
assert_eq!(compiled.driving_table, DrivingTable::VecNodes);
assert!(compiled.sql.contains("JOIN chunks c ON c.id = vc.chunk_id"));
assert!(
compiled
.sql
.contains("JOIN nodes src ON src.logical_id = c.node_logical_id")
);
}
#[rstest]
#[case(5, 7)]
#[case(3, 11)]
fn structural_limits_change_shape_hash(#[case] left: usize, #[case] right: usize) {
let left_compiled = compile_query(
&QueryBuilder::nodes("Meeting")
.text_search("budget", left)
.limit(left)
.into_ast(),
)
.expect("left query");
let right_compiled = compile_query(
&QueryBuilder::nodes("Meeting")
.text_search("budget", right)
.limit(right)
.into_ast(),
)
.expect("right query");
assert_ne!(left_compiled.shape_hash, right_compiled.shape_hash);
}
#[test]
fn traversal_query_is_depth_bounded() {
let compiled = compile_query(
&QueryBuilder::nodes("Meeting")
.text_search("budget", 5)
.traverse(TraverseDirection::Out, "HAS_TASK", 3)
.limit(10)
.into_ast(),
)
.expect("compiled traversal");
assert!(compiled.sql.contains("WITH RECURSIVE"));
assert!(compiled.sql.contains("WHERE t.depth < 3"));
}
#[test]
fn logical_id_filter_is_compiled() {
let compiled = compile_query(
&QueryBuilder::nodes("Meeting")
.filter_logical_id_eq("meeting-123")
.filter_json_text_eq("$.status", "active")
.limit(1)
.into_ast(),
)
.expect("compiled query");
assert!(compiled.sql.contains("n.logical_id ="));
assert!(compiled.sql.contains("src.logical_id ="));
assert!(compiled.sql.contains("json_extract"));
use crate::BindValue;
assert_eq!(
compiled
.binds
.iter()
.filter(|b| matches!(b, BindValue::Text(s) if s == "meeting-123"))
.count(),
1
);
}
#[test]
fn compile_rejects_invalid_json_path() {
use crate::{Predicate, QueryStep, ScalarValue};
let mut ast = QueryBuilder::nodes("Meeting").into_ast();
ast.steps.push(QueryStep::Filter(Predicate::JsonPathEq {
path: "$') OR 1=1 --".to_owned(),
value: ScalarValue::Text("x".to_owned()),
}));
use crate::CompileError;
let result = compile_query(&ast);
assert!(
matches!(result, Err(CompileError::InvalidJsonPath(_))),
"expected InvalidJsonPath, got {result:?}"
);
}
#[test]
fn compile_accepts_valid_json_paths() {
use crate::{Predicate, QueryStep, ScalarValue};
for valid_path in ["$.status", "$.foo.bar", "$.a_b.c2"] {
let mut ast = QueryBuilder::nodes("Meeting").into_ast();
ast.steps.push(QueryStep::Filter(Predicate::JsonPathEq {
path: valid_path.to_owned(),
value: ScalarValue::Text("v".to_owned()),
}));
assert!(
compile_query(&ast).is_ok(),
"expected valid path {valid_path:?} to compile"
);
}
}
#[test]
fn compile_rejects_too_many_bind_parameters() {
use crate::{Predicate, QueryStep, ScalarValue};
let mut ast = QueryBuilder::nodes("Meeting").into_ast();
for i in 0..8 {
ast.steps.push(QueryStep::Filter(Predicate::JsonPathEq {
path: format!("$.f{i}"),
value: ScalarValue::Text("v".to_owned()),
}));
}
use crate::CompileError;
let result = compile_query(&ast);
assert!(
matches!(result, Err(CompileError::TooManyBindParameters(17))),
"expected TooManyBindParameters(17), got {result:?}"
);
}
#[test]
fn compile_rejects_excessive_traversal_depth() {
let result = compile_query(
&QueryBuilder::nodes("Meeting")
.text_search("budget", 5)
.traverse(TraverseDirection::Out, "HAS_TASK", 51)
.limit(10)
.into_ast(),
);
assert!(
matches!(result, Err(CompileError::TraversalTooDeep(51))),
"expected TraversalTooDeep(51), got {result:?}"
);
}
#[test]
fn grouped_queries_with_same_structure_share_shape_hash() {
let left = compile_grouped_query(
&QueryBuilder::nodes("Meeting")
.text_search("budget", 5)
.expand("tasks", TraverseDirection::Out, "HAS_TASK", 1)
.limit(10)
.into_ast(),
)
.expect("left grouped query");
let right = compile_grouped_query(
&QueryBuilder::nodes("Meeting")
.text_search("planning", 5)
.expand("tasks", TraverseDirection::Out, "HAS_TASK", 1)
.limit(10)
.into_ast(),
)
.expect("right grouped query");
assert_eq!(left.shape_hash, right.shape_hash);
}
#[test]
fn compile_grouped_rejects_duplicate_expansion_slot_names() {
let result = compile_grouped_query(
&QueryBuilder::nodes("Meeting")
.expand("tasks", TraverseDirection::Out, "HAS_TASK", 1)
.expand("tasks", TraverseDirection::Out, "HAS_DECISION", 1)
.into_ast(),
);
assert!(
matches!(result, Err(CompileError::DuplicateExpansionSlot(ref slot)) if slot == "tasks"),
"expected DuplicateExpansionSlot(\"tasks\"), got {result:?}"
);
}
#[test]
fn flat_compile_rejects_queries_with_expansions() {
let result = compile_query(
&QueryBuilder::nodes("Meeting")
.expand("tasks", TraverseDirection::Out, "HAS_TASK", 1)
.into_ast(),
);
assert!(
matches!(
result,
Err(CompileError::FlatCompileDoesNotSupportExpansions)
),
"expected FlatCompileDoesNotSupportExpansions, got {result:?}"
);
}
#[test]
fn json_path_compiled_as_bind_parameter() {
let compiled = compile_query(
&QueryBuilder::nodes("Meeting")
.filter_json_text_eq("$.status", "active")
.limit(1)
.into_ast(),
)
.expect("compiled query");
assert!(
!compiled.sql.contains("'$.status'"),
"JSON path must not appear as a SQL string literal"
);
assert!(
compiled.sql.contains("json_extract(src.properties, ?"),
"JSON path must be a bind parameter (pushed into base_candidates for Nodes driver)"
);
use crate::BindValue;
assert!(
compiled
.binds
.iter()
.any(|b| matches!(b, BindValue::Text(s) if s == "$.status"))
);
assert!(
compiled
.binds
.iter()
.any(|b| matches!(b, BindValue::Text(s) if s == "active"))
);
}
#[test]
fn sanitize_fts5_plain_tokens() {
use super::sanitize_fts5_query;
assert_eq!(
sanitize_fts5_query("budget meeting"),
"\"budget\" \"meeting\""
);
}
#[test]
fn sanitize_fts5_apostrophe() {
use super::sanitize_fts5_query;
assert_eq!(sanitize_fts5_query("User's name"), "\"User's\" \"name\"");
}
#[test]
fn sanitize_fts5_embedded_double_quotes() {
use super::sanitize_fts5_query;
assert_eq!(
sanitize_fts5_query(r#"say "hello" world"#),
"\"say\" \"\"\"hello\"\"\" \"world\""
);
}
#[test]
fn sanitize_fts5_operators_neutralized() {
use super::sanitize_fts5_query;
assert_eq!(
sanitize_fts5_query("cats AND dogs OR fish"),
"\"cats\" \"AND\" \"dogs\" \"OR\" \"fish\""
);
}
#[test]
fn sanitize_fts5_special_chars() {
use super::sanitize_fts5_query;
assert_eq!(sanitize_fts5_query("prefix*"), "\"prefix*\"");
assert_eq!(sanitize_fts5_query("col:value"), "\"col:value\"");
assert_eq!(sanitize_fts5_query("(a OR b)"), "\"(a\" \"OR\" \"b)\"");
assert_eq!(sanitize_fts5_query("a NEAR b"), "\"a\" \"NEAR\" \"b\"");
}
#[test]
fn sanitize_fts5_empty_input() {
use super::sanitize_fts5_query;
assert_eq!(sanitize_fts5_query(""), "");
assert_eq!(sanitize_fts5_query(" "), "");
}
#[test]
fn nodes_driver_pushes_json_eq_filter_into_base_candidates() {
let compiled = compile_query(
&QueryBuilder::nodes("Meeting")
.filter_json_text_eq("$.status", "active")
.limit(5)
.into_ast(),
)
.expect("compiled query");
assert_eq!(compiled.driving_table, DrivingTable::Nodes);
assert!(
compiled.sql.contains("json_extract(src.properties, ?"),
"json_extract must reference src (base_candidates), got:\n{}",
compiled.sql,
);
assert!(
!compiled.sql.contains("json_extract(n.properties, ?"),
"json_extract must NOT appear in outer WHERE for Nodes driver, got:\n{}",
compiled.sql,
);
}
#[test]
fn nodes_driver_pushes_json_compare_filter_into_base_candidates() {
let compiled = compile_query(
&QueryBuilder::nodes("Meeting")
.filter_json_integer_gte("$.priority", 5)
.limit(10)
.into_ast(),
)
.expect("compiled query");
assert_eq!(compiled.driving_table, DrivingTable::Nodes);
assert!(
compiled.sql.contains("json_extract(src.properties, ?"),
"comparison filter must be in base_candidates, got:\n{}",
compiled.sql,
);
assert!(
!compiled.sql.contains("json_extract(n.properties, ?"),
"comparison filter must NOT be in outer WHERE for Nodes driver",
);
assert!(
compiled.sql.contains(">= ?"),
"expected >= operator in SQL, got:\n{}",
compiled.sql,
);
}
#[test]
fn nodes_driver_pushes_source_ref_filter_into_base_candidates() {
let compiled = compile_query(
&QueryBuilder::nodes("Meeting")
.filter_source_ref_eq("ref-123")
.limit(5)
.into_ast(),
)
.expect("compiled query");
assert_eq!(compiled.driving_table, DrivingTable::Nodes);
assert!(
compiled.sql.contains("src.source_ref = ?"),
"source_ref filter must be in base_candidates, got:\n{}",
compiled.sql,
);
assert!(
!compiled.sql.contains("n.source_ref = ?"),
"source_ref filter must NOT be in outer WHERE for Nodes driver",
);
}
#[test]
fn nodes_driver_pushes_multiple_filters_into_base_candidates() {
let compiled = compile_query(
&QueryBuilder::nodes("Meeting")
.filter_logical_id_eq("meeting-1")
.filter_json_text_eq("$.status", "active")
.filter_json_integer_gte("$.priority", 5)
.filter_source_ref_eq("ref-abc")
.limit(1)
.into_ast(),
)
.expect("compiled query");
assert_eq!(compiled.driving_table, DrivingTable::Nodes);
assert!(
compiled.sql.contains("src.logical_id = ?"),
"logical_id filter must be in base_candidates",
);
assert!(
compiled.sql.contains("json_extract(src.properties, ?"),
"JSON filters must be in base_candidates",
);
assert!(
compiled.sql.contains("src.source_ref = ?"),
"source_ref filter must be in base_candidates",
);
use crate::BindValue;
assert_eq!(
compiled
.binds
.iter()
.filter(|b| matches!(b, BindValue::Text(s) if s == "meeting-1"))
.count(),
1,
"logical_id bind must not be duplicated"
);
assert_eq!(
compiled
.binds
.iter()
.filter(|b| matches!(b, BindValue::Text(s) if s == "ref-abc"))
.count(),
1,
"source_ref bind must not be duplicated"
);
}
#[test]
fn fts_driver_keeps_json_filter_in_outer_where() {
let compiled = compile_query(
&QueryBuilder::nodes("Meeting")
.text_search("budget", 5)
.filter_json_text_eq("$.status", "active")
.limit(5)
.into_ast(),
)
.expect("compiled query");
assert_eq!(compiled.driving_table, DrivingTable::FtsNodes);
assert!(
compiled.sql.contains("json_extract(n.properties, ?"),
"JSON filter must be in outer WHERE for FTS driver, got:\n{}",
compiled.sql,
);
assert!(
!compiled.sql.contains("json_extract(src.properties, ?"),
"JSON filter must NOT be in base_candidates for FTS driver",
);
}
#[test]
fn fts5_query_bind_is_sanitized() {
let compiled = compile_query(
&QueryBuilder::nodes("Meeting")
.text_search("User's name", 5)
.limit(5)
.into_ast(),
)
.expect("compiled query");
use crate::BindValue;
assert!(
compiled
.binds
.iter()
.any(|b| matches!(b, BindValue::Text(s) if s == "\"User's\" \"name\"")),
"FTS5 query bind should be sanitized; got {:?}",
compiled.binds
);
}
}