use crate::algebra::Algebra;
use crate::cardinality_estimator::{CardinalityEstimator, EstimatorConfig};
use anyhow::Result;
use dashmap::DashMap;
use parking_lot::RwLock;
use scirs2_core::metrics::MetricsRegistry;
use scirs2_core::profiling::Profiler;
use serde::{Deserialize, Serialize};
use std::fmt;
use std::sync::Arc;
use std::time::{Duration, Instant};
use tracing::{debug, info, warn};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct JitCompilerConfig {
pub enabled: bool,
pub optimization_level: usize,
pub enable_caching: bool,
pub max_cache_size: usize,
pub cache_ttl: Duration,
pub adaptive_optimization: bool,
pub min_executions_for_reopt: usize,
pub compilation_timeout: Duration,
pub enable_profiling: bool,
pub enable_specialization: bool,
pub max_plan_complexity: usize,
}
impl Default for JitCompilerConfig {
fn default() -> Self {
Self {
enabled: true,
optimization_level: 2,
enable_caching: true,
max_cache_size: 512 * 1024 * 1024, cache_ttl: Duration::from_secs(3600), adaptive_optimization: true,
min_executions_for_reopt: 10,
compilation_timeout: Duration::from_secs(30),
enable_profiling: true,
enable_specialization: true,
max_plan_complexity: 1000,
}
}
}
#[derive(Clone)]
pub struct CompiledQuery {
pub id: String,
pub algebra: Arc<Algebra>,
pub plan: Arc<ExecutionPlan>,
pub compiled_at: Instant,
pub stats: Arc<RwLock<ExecutionStats>>,
pub metadata: QueryMetadata,
}
impl fmt::Debug for CompiledQuery {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("CompiledQuery")
.field("id", &self.id)
.field("compiled_at", &self.compiled_at.elapsed())
.field("stats", &self.stats)
.field("metadata", &self.metadata)
.finish()
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ExecutionPlan {
pub operations: Vec<PlanOperation>,
pub estimated_cost: f64,
pub estimated_memory: usize,
pub optimization_hints: Vec<String>,
pub specializations: Vec<Specialization>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum PlanOperation {
ScanTriples {
pattern_id: usize,
pattern_type: PatternType,
estimated_cardinality: usize,
},
HashJoin {
left_id: usize,
right_id: usize,
join_variables: Vec<String>,
strategy: JitJoinStrategy,
},
NestedLoopJoin {
left_id: usize,
right_id: usize,
join_variables: Vec<String>,
},
Filter {
expr_id: usize,
filter_type: FilterType,
},
Project { variables: Vec<String> },
Sort {
variables: Vec<String>,
ascending: Vec<bool>,
},
Limit { limit: usize },
Offset { offset: usize },
Distinct,
Union { branches: Vec<usize> },
Optional { left_id: usize, right_id: usize },
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
pub enum PatternType {
AllVariables,
SubjectBound,
PredicateBound,
ObjectBound,
SubjectPredicateBound,
SubjectObjectBound,
PredicateObjectBound,
FullyBound,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
pub enum JitJoinStrategy {
Hash,
SortMerge,
IndexNestedLoop,
Bind,
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub enum FilterType {
Equality,
NumericComparison,
StringOperation,
Regex,
BooleanLogic,
Complex,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Specialization {
pub spec_type: SpecializationType,
pub description: String,
pub speedup_factor: f64,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
pub enum SpecializationType {
PatternScanning,
JoinStrategy,
FilterPushdown,
IndexUsage,
SimdVectorization,
ParallelExecution,
}
#[derive(Debug, Default, Clone, Serialize, Deserialize)]
pub struct ExecutionStats {
pub execution_count: usize,
pub total_execution_time: Duration,
pub avg_execution_time: Duration,
pub min_execution_time: Option<Duration>,
pub max_execution_time: Option<Duration>,
pub total_results: usize,
pub avg_results: f64,
#[serde(skip)]
pub last_executed: Option<Instant>,
pub compilation_time: Duration,
pub reoptimization_count: usize,
}
impl ExecutionStats {
pub fn record_execution(&mut self, duration: Duration, result_count: usize) {
self.execution_count += 1;
self.total_execution_time += duration;
self.avg_execution_time = self.total_execution_time / self.execution_count as u32;
self.total_results += result_count;
self.avg_results = self.total_results as f64 / self.execution_count as f64;
self.last_executed = Some(Instant::now());
self.min_execution_time = Some(
self.min_execution_time
.map_or(duration, |min| min.min(duration)),
);
self.max_execution_time = Some(
self.max_execution_time
.map_or(duration, |max| max.max(duration)),
);
}
pub fn should_reoptimize(&self, min_executions: usize) -> bool {
self.execution_count >= min_executions
&& self.avg_execution_time > Duration::from_millis(100)
}
}
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
pub struct QueryMetadata {
pub complexity: usize,
pub estimated_memory: usize,
pub triple_pattern_count: usize,
pub join_count: usize,
pub filter_count: usize,
pub has_aggregation: bool,
pub has_optional: bool,
pub has_union: bool,
pub optimization_opportunities: Vec<String>,
}
pub struct QueryJitCompiler {
config: JitCompilerConfig,
query_cache: Arc<DashMap<String, Arc<CompiledQuery>>>,
#[allow(dead_code)]
metrics: Arc<MetricsRegistry>,
#[allow(dead_code)]
profiler: Arc<Profiler>,
cardinality_estimator: Arc<CardinalityEstimator>,
_rng_seed: u64,
stats: Arc<RwLock<CompilerStats>>,
}
#[derive(Debug, Default, Clone, Serialize, Deserialize)]
pub struct CompilerStats {
pub total_compilations: usize,
pub total_compilation_time: Duration,
pub cache_hits: usize,
pub cache_misses: usize,
pub cache_evictions: usize,
pub failed_compilations: usize,
pub avg_compilation_time: Duration,
}
impl QueryJitCompiler {
pub fn new(config: JitCompilerConfig) -> Result<Self> {
let metrics = Arc::new(MetricsRegistry::new());
let profiler = Arc::new(Profiler::new());
let cardinality_estimator = Arc::new(CardinalityEstimator::new(EstimatorConfig::default()));
Ok(Self {
config,
query_cache: Arc::new(DashMap::new()),
metrics,
profiler,
cardinality_estimator,
_rng_seed: 42,
stats: Arc::new(RwLock::new(CompilerStats::default())),
})
}
pub fn compile(&mut self, algebra: &Algebra) -> Result<Arc<CompiledQuery>> {
let start_time = Instant::now();
let query_id = self.generate_query_id(algebra);
if self.config.enable_caching {
if let Some(cached) = self.query_cache.get(&query_id) {
self.record_cache_hit();
debug!("JIT cache hit for query: {}", query_id);
return Ok(cached.clone());
}
}
self.record_cache_miss();
info!("Compiling query: {}", query_id);
let metadata = self.analyze_query(algebra)?;
if metadata.complexity > self.config.max_plan_complexity {
warn!(
"Query complexity ({}) exceeds threshold ({}), using basic execution",
metadata.complexity, self.config.max_plan_complexity
);
}
let plan = self.generate_execution_plan(algebra, &metadata)?;
let compiled = Arc::new(CompiledQuery {
id: query_id.clone(),
algebra: Arc::new(algebra.clone()),
plan: Arc::new(plan),
compiled_at: Instant::now(),
stats: Arc::new(RwLock::new(ExecutionStats {
compilation_time: start_time.elapsed(),
..Default::default()
})),
metadata,
});
if self.config.enable_caching {
self.insert_into_cache(query_id.clone(), compiled.clone())?;
}
let compilation_time = start_time.elapsed();
self.record_compilation(compilation_time);
info!(
"Query compiled successfully in {:?}: {}",
compilation_time, query_id
);
Ok(compiled)
}
fn generate_query_id(&self, algebra: &Algebra) -> String {
use std::collections::hash_map::DefaultHasher;
use std::hash::{Hash, Hasher};
let mut hasher = DefaultHasher::new();
format!("{:?}", algebra).hash(&mut hasher);
format!("query_{:x}", hasher.finish())
}
fn analyze_query(&self, algebra: &Algebra) -> Result<QueryMetadata> {
let mut metadata = QueryMetadata::default();
self.analyze_algebra_recursive(algebra, &mut metadata);
metadata.complexity = self.calculate_complexity(&metadata);
metadata.estimated_memory = self.estimate_memory(&metadata);
metadata.optimization_opportunities = self.identify_optimizations(&metadata);
Ok(metadata)
}
#[allow(clippy::only_used_in_recursion)]
fn analyze_algebra_recursive(&self, algebra: &Algebra, metadata: &mut QueryMetadata) {
match algebra {
Algebra::Bgp(patterns) => {
metadata.triple_pattern_count += patterns.len();
}
Algebra::Join { left, right } => {
metadata.join_count += 1;
self.analyze_algebra_recursive(left, metadata);
self.analyze_algebra_recursive(right, metadata);
}
Algebra::Filter { pattern, .. } => {
metadata.filter_count += 1;
self.analyze_algebra_recursive(pattern, metadata);
}
Algebra::LeftJoin { left, right, .. } => {
metadata.has_optional = true;
metadata.join_count += 1;
self.analyze_algebra_recursive(left, metadata);
self.analyze_algebra_recursive(right, metadata);
}
Algebra::Union { left, right } => {
metadata.has_union = true;
self.analyze_algebra_recursive(left, metadata);
self.analyze_algebra_recursive(right, metadata);
}
Algebra::Group { pattern, .. } => {
metadata.has_aggregation = true;
self.analyze_algebra_recursive(pattern, metadata);
}
Algebra::Project { pattern, .. }
| Algebra::Distinct { pattern }
| Algebra::Reduced { pattern }
| Algebra::OrderBy { pattern, .. } => {
self.analyze_algebra_recursive(pattern, metadata);
}
Algebra::Slice {
pattern,
offset: _,
limit: _,
} => {
self.analyze_algebra_recursive(pattern, metadata);
}
Algebra::Graph { graph: _, pattern } => {
self.analyze_algebra_recursive(pattern, metadata);
}
Algebra::Extend { pattern, .. } => {
self.analyze_algebra_recursive(pattern, metadata);
}
Algebra::Minus { left, right } => {
self.analyze_algebra_recursive(left, metadata);
self.analyze_algebra_recursive(right, metadata);
}
_ => {}
}
}
fn calculate_complexity(&self, metadata: &QueryMetadata) -> usize {
let mut score = 0;
score += metadata.triple_pattern_count * 10;
score += metadata.join_count.pow(2) * 20;
score += metadata.filter_count * 15;
if metadata.has_aggregation {
score += 50;
}
if metadata.has_optional {
score += 30;
}
if metadata.has_union {
score += 25;
}
score.min(1000) }
fn estimate_memory(&self, metadata: &QueryMetadata) -> usize {
let base_memory = 1024 * 1024;
let pattern_memory = metadata.triple_pattern_count * 100 * 1024;
let join_memory = if metadata.join_count > 0 {
2_usize.pow(metadata.join_count as u32) * 50 * 1024 } else {
0
};
base_memory + pattern_memory + join_memory
}
fn identify_optimizations(&self, metadata: &QueryMetadata) -> Vec<String> {
let mut opportunities = Vec::new();
if metadata.join_count > 2 {
opportunities.push("Consider join reordering".to_string());
}
if metadata.filter_count > 0 {
opportunities.push("Filter pushdown optimization".to_string());
}
if metadata.triple_pattern_count > 5 {
opportunities.push("Pattern specialization".to_string());
}
if metadata.has_aggregation {
opportunities.push("Streaming aggregation".to_string());
}
opportunities
}
fn generate_execution_plan(
&self,
algebra: &Algebra,
metadata: &QueryMetadata,
) -> Result<ExecutionPlan> {
let mut operations = Vec::new();
let mut specializations = Vec::new();
self.lower_to_operations(algebra, &mut operations)?;
if self.config.optimization_level >= 1 {
self.optimize_plan(&mut operations, &mut specializations)?;
}
let estimated_cost = self.calculate_plan_cost(&operations);
Ok(ExecutionPlan {
operations,
estimated_cost,
estimated_memory: metadata.estimated_memory,
optimization_hints: metadata.optimization_opportunities.clone(),
specializations,
})
}
#[allow(clippy::ptr_arg)]
fn lower_to_operations(&self, algebra: &Algebra, ops: &mut Vec<PlanOperation>) -> Result<()> {
match algebra {
Algebra::Bgp(patterns) => {
let pattern_type = self.determine_pattern_type(algebra);
let estimated_cardinality = if !patterns.is_empty() {
match self
.cardinality_estimator
.estimate_triple_pattern(&patterns[0])
{
Ok(cardinality) => cardinality,
Err(e) => {
warn!("Cardinality estimation failed: {}, using default", e);
10_000 }
}
} else {
10_000 };
debug!(
"BGP cardinality estimate: {} for {} patterns",
estimated_cardinality,
patterns.len()
);
ops.push(PlanOperation::ScanTriples {
pattern_id: ops.len(),
pattern_type,
estimated_cardinality: estimated_cardinality.try_into().unwrap_or(10_000),
});
}
Algebra::Join { left, right } => {
let left_start = ops.len();
self.lower_to_operations(left, ops)?;
let right_start = ops.len();
self.lower_to_operations(right, ops)?;
let left_vars = left.variables();
let right_vars = right.variables();
let join_variables: Vec<String> = left_vars
.iter()
.filter(|v| right_vars.contains(v))
.map(|v| v.name().to_string())
.collect();
ops.push(PlanOperation::HashJoin {
left_id: left_start,
right_id: right_start,
join_variables,
strategy: JitJoinStrategy::Hash,
});
}
Algebra::Filter { pattern, .. } => {
self.lower_to_operations(pattern, ops)?;
ops.push(PlanOperation::Filter {
expr_id: ops.len(),
filter_type: FilterType::Complex,
});
}
Algebra::Project { pattern, variables } => {
self.lower_to_operations(pattern, ops)?;
ops.push(PlanOperation::Project {
variables: variables.iter().map(|v| v.name().to_string()).collect(),
});
}
Algebra::Distinct { pattern } => {
self.lower_to_operations(pattern, ops)?;
ops.push(PlanOperation::Distinct);
}
Algebra::Slice {
pattern,
offset,
limit,
} => {
self.lower_to_operations(pattern, ops)?;
if let Some(off) = offset {
ops.push(PlanOperation::Offset { offset: *off });
}
if let Some(lim) = limit {
ops.push(PlanOperation::Limit { limit: *lim });
}
}
_ => {
ops.push(PlanOperation::ScanTriples {
pattern_id: ops.len(),
pattern_type: PatternType::AllVariables,
estimated_cardinality: 1000,
});
}
}
Ok(())
}
fn determine_pattern_type(&self, _algebra: &Algebra) -> PatternType {
PatternType::AllVariables
}
#[allow(clippy::ptr_arg)]
fn optimize_plan(
&self,
operations: &mut Vec<PlanOperation>,
specializations: &mut Vec<Specialization>,
) -> Result<()> {
for op in operations.iter_mut() {
if let PlanOperation::ScanTriples { pattern_type, .. } = op {
specializations.push(Specialization {
spec_type: SpecializationType::PatternScanning,
description: format!("Specialized scan for pattern type: {:?}", pattern_type),
speedup_factor: 1.5,
});
}
}
Ok(())
}
fn calculate_plan_cost(&self, operations: &[PlanOperation]) -> f64 {
let mut total_cost = 0.0;
for op in operations {
total_cost += match op {
PlanOperation::ScanTriples {
estimated_cardinality,
..
} => *estimated_cardinality as f64 * 0.1,
PlanOperation::HashJoin { .. } => 100.0,
PlanOperation::NestedLoopJoin { .. } => 500.0,
PlanOperation::Filter { .. } => 10.0,
PlanOperation::Project { .. } => 5.0,
PlanOperation::Sort { .. } => 200.0,
PlanOperation::Limit { .. } => 1.0,
PlanOperation::Offset { .. } => 1.0,
PlanOperation::Distinct => 150.0,
PlanOperation::Union { .. } => 50.0,
PlanOperation::Optional { .. } => 120.0,
};
}
total_cost
}
fn insert_into_cache(&self, query_id: String, compiled: Arc<CompiledQuery>) -> Result<()> {
if self.query_cache.len() * 1024 * 1024 > self.config.max_cache_size {
self.evict_cache_entry()?;
}
self.query_cache.insert(query_id, compiled);
Ok(())
}
fn evict_cache_entry(&self) -> Result<()> {
if let Some(key) = self.query_cache.iter().next().map(|e| e.key().clone()) {
self.query_cache.remove(&key);
self.stats.write().cache_evictions += 1;
debug!("Evicted cached query: {}", key);
}
Ok(())
}
fn record_cache_hit(&self) {
self.stats.write().cache_hits += 1;
}
fn record_cache_miss(&self) {
self.stats.write().cache_misses += 1;
}
fn record_compilation(&self, duration: Duration) {
let mut stats = self.stats.write();
stats.total_compilations += 1;
stats.total_compilation_time += duration;
stats.avg_compilation_time = if stats.total_compilations > 0 {
stats.total_compilation_time / stats.total_compilations as u32
} else {
Duration::ZERO
};
}
pub fn stats(&self) -> CompilerStats {
self.stats.read().clone()
}
pub fn clear_cache(&self) {
self.query_cache.clear();
info!("JIT query cache cleared");
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_jit_compiler_creation() {
let config = JitCompilerConfig::default();
let compiler = QueryJitCompiler::new(config);
assert!(compiler.is_ok());
}
#[test]
fn test_complexity_calculation() {
let compiler = QueryJitCompiler::new(JitCompilerConfig::default()).unwrap();
let metadata = QueryMetadata {
triple_pattern_count: 5,
join_count: 2,
filter_count: 3,
has_aggregation: true,
has_optional: true,
has_union: false,
..Default::default()
};
let complexity = compiler.calculate_complexity(&metadata);
assert!(complexity > 0);
assert!(complexity <= 1000);
}
#[test]
fn test_memory_estimation() {
let compiler = QueryJitCompiler::new(JitCompilerConfig::default()).unwrap();
let metadata = QueryMetadata {
triple_pattern_count: 3,
join_count: 2,
..Default::default()
};
let memory = compiler.estimate_memory(&metadata);
assert!(memory > 0);
}
#[test]
fn test_cache_operations() {
let config = JitCompilerConfig {
enable_caching: true,
..Default::default()
};
let compiler = QueryJitCompiler::new(config).unwrap();
assert_eq!(compiler.query_cache.len(), 0);
compiler.clear_cache();
assert_eq!(compiler.query_cache.len(), 0);
}
#[test]
fn test_execution_stats() {
let mut stats = ExecutionStats::default();
stats.record_execution(Duration::from_millis(100), 50);
assert_eq!(stats.execution_count, 1);
assert_eq!(stats.total_results, 50);
assert_eq!(stats.avg_results, 50.0);
stats.record_execution(Duration::from_millis(200), 30);
assert_eq!(stats.execution_count, 2);
assert_eq!(stats.total_results, 80);
assert_eq!(stats.avg_results, 40.0);
}
#[test]
fn test_should_reoptimize() {
let mut stats = ExecutionStats::default();
assert!(!stats.should_reoptimize(10));
for _ in 0..15 {
stats.record_execution(Duration::from_millis(150), 10);
}
assert!(stats.should_reoptimize(10));
}
#[test]
fn test_pattern_type_variants() {
let patterns = [
PatternType::AllVariables,
PatternType::SubjectBound,
PatternType::PredicateBound,
PatternType::ObjectBound,
PatternType::SubjectPredicateBound,
PatternType::SubjectObjectBound,
PatternType::PredicateObjectBound,
PatternType::FullyBound,
];
assert_eq!(patterns.len(), 8);
}
#[test]
fn test_join_strategy_variants() {
let strategies = [
JitJoinStrategy::Hash,
JitJoinStrategy::SortMerge,
JitJoinStrategy::IndexNestedLoop,
JitJoinStrategy::Bind,
];
assert_eq!(strategies.len(), 4);
}
}
#[cfg(test)]
mod extended_tests {
use super::*;
use std::time::Duration;
fn default_compiler() -> QueryJitCompiler {
QueryJitCompiler::new(JitCompilerConfig::default()).unwrap()
}
#[test]
fn test_default_config_has_reasonable_values() {
let config = JitCompilerConfig::default();
assert!(config.enabled, "Compiler should be enabled by default");
assert!(
config.enable_caching,
"Caching should be enabled by default"
);
assert!(config.max_cache_size > 0, "Cache size should be positive");
assert!(
config.optimization_level <= 3,
"Optimization level should be 0-3"
);
assert!(
config.max_plan_complexity > 0,
"Max plan complexity should be positive"
);
}
#[test]
fn test_config_with_disabled_caching() {
let config = JitCompilerConfig {
enable_caching: false,
..Default::default()
};
let compiler = QueryJitCompiler::new(config);
assert!(
compiler.is_ok(),
"Compiler should initialize with caching disabled"
);
}
#[test]
fn test_config_with_disabled_compiler() {
let config = JitCompilerConfig {
enabled: false,
..Default::default()
};
let compiler = QueryJitCompiler::new(config);
assert!(
compiler.is_ok(),
"Compiler should initialize even when disabled"
);
}
#[test]
fn test_execution_stats_initial_state() {
let stats = ExecutionStats::default();
assert_eq!(stats.execution_count, 0);
assert_eq!(stats.total_results, 0);
assert!(stats.min_execution_time.is_none());
assert!(stats.max_execution_time.is_none());
}
#[test]
fn test_execution_stats_min_max_tracking() {
let mut stats = ExecutionStats::default();
stats.record_execution(Duration::from_millis(50), 10);
stats.record_execution(Duration::from_millis(200), 20);
stats.record_execution(Duration::from_millis(100), 15);
assert_eq!(stats.min_execution_time.unwrap(), Duration::from_millis(50));
assert_eq!(
stats.max_execution_time.unwrap(),
Duration::from_millis(200)
);
}
#[test]
fn test_execution_stats_average_results() {
let mut stats = ExecutionStats::default();
stats.record_execution(Duration::from_millis(10), 10);
stats.record_execution(Duration::from_millis(10), 20);
stats.record_execution(Duration::from_millis(10), 30);
assert!(
(stats.avg_results - 20.0).abs() < 0.001,
"Average results should be 20.0"
);
}
#[test]
fn test_execution_stats_total_time_accumulates() {
let mut stats = ExecutionStats::default();
stats.record_execution(Duration::from_millis(100), 5);
stats.record_execution(Duration::from_millis(200), 5);
assert_eq!(stats.total_execution_time, Duration::from_millis(300));
}
#[test]
fn test_should_reoptimize_below_min_executions() {
let mut stats = ExecutionStats::default();
for _ in 0..5 {
stats.record_execution(Duration::from_millis(200), 10);
}
assert!(
!stats.should_reoptimize(10),
"Should not reoptimize below min_executions threshold"
);
}
#[test]
fn test_should_not_reoptimize_fast_queries() {
let mut stats = ExecutionStats::default();
for _ in 0..20 {
stats.record_execution(Duration::from_millis(1), 10);
}
assert!(
!stats.should_reoptimize(10),
"Fast queries should not trigger reoptimization"
);
}
#[test]
fn test_query_metadata_default() {
let meta = QueryMetadata::default();
assert_eq!(meta.triple_pattern_count, 0);
assert!(!meta.has_aggregation);
assert!(!meta.has_optional);
assert!(!meta.has_union);
}
#[test]
fn test_query_metadata_with_aggregation() {
let meta = QueryMetadata {
has_aggregation: true,
join_count: 3,
triple_pattern_count: 4,
..Default::default()
};
assert!(meta.has_aggregation);
assert_eq!(meta.join_count, 3);
}
#[test]
fn test_compiler_stats_initial_values() {
let compiler = default_compiler();
let stats = compiler.stats();
assert_eq!(stats.total_compilations, 0);
assert_eq!(stats.cache_hits, 0);
assert_eq!(stats.cache_misses, 0);
assert_eq!(stats.cache_evictions, 0);
}
#[test]
fn test_clear_cache_makes_it_empty() {
let compiler = default_compiler();
compiler.clear_cache();
assert_eq!(compiler.query_cache.len(), 0);
}
#[test]
fn test_jit_join_strategy_all_variants() {
let _: Vec<JitJoinStrategy> = vec![
JitJoinStrategy::Hash,
JitJoinStrategy::SortMerge,
JitJoinStrategy::IndexNestedLoop,
JitJoinStrategy::Bind,
];
}
#[test]
fn test_filter_type_all_variants() {
let _: Vec<FilterType> = vec![
FilterType::Equality,
FilterType::NumericComparison,
FilterType::StringOperation,
FilterType::Regex,
FilterType::BooleanLogic,
FilterType::Complex,
];
}
#[test]
fn test_pattern_type_all_8_variants_coverage() {
let variants = [
PatternType::AllVariables,
PatternType::SubjectBound,
PatternType::PredicateBound,
PatternType::ObjectBound,
PatternType::SubjectPredicateBound,
PatternType::SubjectObjectBound,
PatternType::PredicateObjectBound,
PatternType::FullyBound,
];
assert_eq!(variants.len(), 8);
}
}