pub mod eval;
pub mod operators;
use crate::parser::ast::*;
use crate::planner::{LogicalPlan, TemporalFilterPlan};
use cypherlite_core::{EdgeId, LabelRegistry, NodeId, PropertyValue};
use cypherlite_storage::StorageEngine;
use std::collections::HashMap;
pub trait ScalarFnLookup {
fn call_scalar(&self, name: &str, args: &[Value]) -> Option<Result<Value, ExecutionError>>;
}
impl ScalarFnLookup for () {
fn call_scalar(&self, _name: &str, _args: &[Value]) -> Option<Result<Value, ExecutionError>> {
None
}
}
pub trait TriggerLookup {
fn fire_before_create(
&self,
ctx: &cypherlite_core::TriggerContext,
) -> Result<(), ExecutionError>;
fn fire_after_create(
&self,
ctx: &cypherlite_core::TriggerContext,
) -> Result<(), ExecutionError>;
fn fire_before_update(
&self,
ctx: &cypherlite_core::TriggerContext,
) -> Result<(), ExecutionError>;
fn fire_after_update(
&self,
ctx: &cypherlite_core::TriggerContext,
) -> Result<(), ExecutionError>;
fn fire_before_delete(
&self,
ctx: &cypherlite_core::TriggerContext,
) -> Result<(), ExecutionError>;
fn fire_after_delete(
&self,
ctx: &cypherlite_core::TriggerContext,
) -> Result<(), ExecutionError>;
}
impl TriggerLookup for () {
fn fire_before_create(
&self,
_ctx: &cypherlite_core::TriggerContext,
) -> Result<(), ExecutionError> {
Ok(())
}
fn fire_after_create(
&self,
_ctx: &cypherlite_core::TriggerContext,
) -> Result<(), ExecutionError> {
Ok(())
}
fn fire_before_update(
&self,
_ctx: &cypherlite_core::TriggerContext,
) -> Result<(), ExecutionError> {
Ok(())
}
fn fire_after_update(
&self,
_ctx: &cypherlite_core::TriggerContext,
) -> Result<(), ExecutionError> {
Ok(())
}
fn fire_before_delete(
&self,
_ctx: &cypherlite_core::TriggerContext,
) -> Result<(), ExecutionError> {
Ok(())
}
fn fire_after_delete(
&self,
_ctx: &cypherlite_core::TriggerContext,
) -> Result<(), ExecutionError> {
Ok(())
}
}
#[cfg(feature = "plugin")]
impl TriggerLookup
for cypherlite_core::plugin::PluginRegistry<dyn cypherlite_core::plugin::Trigger>
{
fn fire_before_create(
&self,
ctx: &cypherlite_core::TriggerContext,
) -> Result<(), ExecutionError> {
for name in self.list() {
if let Some(trigger) = self.get(name) {
trigger.on_before_create(ctx).map_err(|e| ExecutionError {
message: e.to_string(),
})?;
}
}
Ok(())
}
fn fire_after_create(
&self,
ctx: &cypherlite_core::TriggerContext,
) -> Result<(), ExecutionError> {
for name in self.list() {
if let Some(trigger) = self.get(name) {
trigger.on_after_create(ctx).map_err(|e| ExecutionError {
message: e.to_string(),
})?;
}
}
Ok(())
}
fn fire_before_update(
&self,
ctx: &cypherlite_core::TriggerContext,
) -> Result<(), ExecutionError> {
for name in self.list() {
if let Some(trigger) = self.get(name) {
trigger.on_before_update(ctx).map_err(|e| ExecutionError {
message: e.to_string(),
})?;
}
}
Ok(())
}
fn fire_after_update(
&self,
ctx: &cypherlite_core::TriggerContext,
) -> Result<(), ExecutionError> {
for name in self.list() {
if let Some(trigger) = self.get(name) {
trigger.on_after_update(ctx).map_err(|e| ExecutionError {
message: e.to_string(),
})?;
}
}
Ok(())
}
fn fire_before_delete(
&self,
ctx: &cypherlite_core::TriggerContext,
) -> Result<(), ExecutionError> {
for name in self.list() {
if let Some(trigger) = self.get(name) {
trigger.on_before_delete(ctx).map_err(|e| ExecutionError {
message: e.to_string(),
})?;
}
}
Ok(())
}
fn fire_after_delete(
&self,
ctx: &cypherlite_core::TriggerContext,
) -> Result<(), ExecutionError> {
for name in self.list() {
if let Some(trigger) = self.get(name) {
trigger.on_after_delete(ctx).map_err(|e| ExecutionError {
message: e.to_string(),
})?;
}
}
Ok(())
}
}
#[cfg(feature = "plugin")]
impl ScalarFnLookup
for cypherlite_core::plugin::PluginRegistry<dyn cypherlite_core::plugin::ScalarFunction>
{
fn call_scalar(&self, name: &str, args: &[Value]) -> Option<Result<Value, ExecutionError>> {
let func = self.get(name)?;
let pv_args: Result<Vec<PropertyValue>, String> =
args.iter().cloned().map(PropertyValue::try_from).collect();
let pv_args = match pv_args {
Ok(a) => a,
Err(e) => {
return Some(Err(ExecutionError {
message: format!("plugin function argument conversion: {}", e),
}))
}
};
match func.call(&pv_args) {
Ok(result) => Some(Ok(Value::from(result))),
Err(e) => Some(Err(ExecutionError {
message: e.to_string(),
})),
}
}
}
#[derive(Debug, Clone, PartialEq)]
pub enum Value {
Null,
Bool(bool),
Int64(i64),
Float64(f64),
String(String),
Bytes(Vec<u8>),
List(Vec<Value>),
Node(NodeId),
Edge(EdgeId),
DateTime(i64),
#[cfg(feature = "subgraph")]
Subgraph(cypherlite_core::SubgraphId),
#[cfg(feature = "hypergraph")]
Hyperedge(cypherlite_core::HyperEdgeId),
#[cfg(feature = "hypergraph")]
TemporalNode(NodeId, i64),
}
impl From<PropertyValue> for Value {
fn from(pv: PropertyValue) -> Self {
match pv {
PropertyValue::Null => Value::Null,
PropertyValue::Bool(b) => Value::Bool(b),
PropertyValue::Int64(i) => Value::Int64(i),
PropertyValue::Float64(f) => Value::Float64(f),
PropertyValue::String(s) => Value::String(s),
PropertyValue::Bytes(b) => Value::Bytes(b),
PropertyValue::Array(a) => Value::List(a.into_iter().map(Value::from).collect()),
PropertyValue::DateTime(ms) => Value::DateTime(ms),
}
}
}
impl TryFrom<Value> for PropertyValue {
type Error = String;
fn try_from(v: Value) -> Result<Self, Self::Error> {
match v {
Value::Null => Ok(PropertyValue::Null),
Value::Bool(b) => Ok(PropertyValue::Bool(b)),
Value::Int64(i) => Ok(PropertyValue::Int64(i)),
Value::Float64(f) => Ok(PropertyValue::Float64(f)),
Value::String(s) => Ok(PropertyValue::String(s)),
Value::Bytes(b) => Ok(PropertyValue::Bytes(b)),
Value::List(l) => {
let items: Result<Vec<_>, _> = l.into_iter().map(PropertyValue::try_from).collect();
Ok(PropertyValue::Array(items?))
}
Value::DateTime(ms) => Ok(PropertyValue::DateTime(ms)),
Value::Node(_) | Value::Edge(_) => {
Err("cannot convert graph entity to property".into())
}
#[cfg(feature = "subgraph")]
Value::Subgraph(_) => Err("cannot convert graph entity to property".into()),
#[cfg(feature = "hypergraph")]
Value::Hyperedge(_) => Err("cannot convert graph entity to property".into()),
#[cfg(feature = "hypergraph")]
Value::TemporalNode(_, _) => Err("cannot convert graph entity to property".into()),
}
}
}
pub type Record = HashMap<String, Value>;
pub type Params = HashMap<String, Value>;
#[derive(Debug, Clone, PartialEq)]
pub struct ExecutionError {
pub message: String,
}
impl std::fmt::Display for ExecutionError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "Execution error: {}", self.message)
}
}
impl std::error::Error for ExecutionError {}
pub fn execute(
plan: &LogicalPlan,
engine: &mut StorageEngine,
params: &Params,
scalar_fns: &dyn ScalarFnLookup,
trigger_fns: &dyn TriggerLookup,
) -> Result<Vec<Record>, ExecutionError> {
match plan {
LogicalPlan::EmptySource => Ok(vec![Record::new()]),
LogicalPlan::NodeScan {
variable,
label_id,
limit,
..
} => {
let mut records = operators::node_scan::execute_node_scan(variable, *label_id, engine);
if let Some(lim) = limit {
records.truncate(*lim);
}
Ok(records)
}
LogicalPlan::Expand {
source,
src_var,
rel_var,
target_var,
rel_type_id,
direction,
temporal_filter,
} => {
let source_records = execute(source, engine, params, scalar_fns, trigger_fns)?;
let tf = resolve_temporal_filter(temporal_filter, engine, params, scalar_fns)?;
Ok(operators::expand::execute_expand(
source_records,
src_var,
rel_var.as_deref(),
target_var,
*rel_type_id,
direction,
engine,
tf.as_ref(),
))
}
LogicalPlan::Filter { source, predicate } => {
let source_records = execute(source, engine, params, scalar_fns, trigger_fns)?;
operators::filter::execute_filter(source_records, predicate, engine, params, scalar_fns)
}
LogicalPlan::Project {
source,
items,
distinct,
} => {
let source_records = execute(source, engine, params, scalar_fns, trigger_fns)?;
let mut result = operators::project::execute_project(
source_records,
items,
engine,
params,
scalar_fns,
)?;
if *distinct {
deduplicate_records(&mut result);
}
Ok(result)
}
LogicalPlan::Sort { source, items } => {
let source_records = execute(source, engine, params, scalar_fns, trigger_fns)?;
Ok(operators::sort::execute_sort(
source_records,
items,
engine,
params,
scalar_fns,
))
}
LogicalPlan::Skip { source, count } => {
let source_records = execute(source, engine, params, scalar_fns, trigger_fns)?;
let n = eval_count_expr(count)?;
Ok(operators::limit::execute_skip(source_records, n))
}
LogicalPlan::Limit { source, count } => {
let source_records = execute(source, engine, params, scalar_fns, trigger_fns)?;
let n = eval_count_expr(count)?;
Ok(operators::limit::execute_limit(source_records, n))
}
LogicalPlan::Aggregate {
source,
group_keys,
aggregates,
} => {
let source_records = execute(source, engine, params, scalar_fns, trigger_fns)?;
operators::aggregate::execute_aggregate(
source_records,
group_keys,
aggregates,
engine,
params,
scalar_fns,
)
}
LogicalPlan::CreateOp { source, pattern } => {
let source_records = match source {
Some(s) => execute(s, engine, params, scalar_fns, trigger_fns)?,
None => vec![Record::new()],
};
operators::create::execute_create(
source_records,
pattern,
engine,
params,
scalar_fns,
trigger_fns,
)
}
LogicalPlan::DeleteOp {
source,
exprs,
detach,
} => {
let source_records = execute(source, engine, params, scalar_fns, trigger_fns)?;
operators::delete::execute_delete(
source_records,
exprs,
*detach,
engine,
params,
scalar_fns,
trigger_fns,
)
}
LogicalPlan::SetOp { source, items } => {
let source_records = execute(source, engine, params, scalar_fns, trigger_fns)?;
operators::set_props::execute_set(
source_records,
items,
engine,
params,
scalar_fns,
trigger_fns,
)
}
LogicalPlan::RemoveOp { source, items } => {
let source_records = execute(source, engine, params, scalar_fns, trigger_fns)?;
operators::set_props::execute_remove(
source_records,
items,
engine,
params,
scalar_fns,
trigger_fns,
)
}
LogicalPlan::Unwind {
source,
expr,
variable,
} => {
let source_records = execute(source, engine, params, scalar_fns, trigger_fns)?;
operators::unwind::execute_unwind(
source_records,
expr,
variable,
engine,
params,
scalar_fns,
)
}
LogicalPlan::With {
source,
items,
where_clause,
distinct,
} => {
let source_records = execute(source, engine, params, scalar_fns, trigger_fns)?;
let mut result =
operators::with::execute_with(source_records, items, engine, params, scalar_fns)?;
if *distinct {
deduplicate_records(&mut result);
}
if let Some(ref predicate) = where_clause {
result = operators::filter::execute_filter(
result, predicate, engine, params, scalar_fns,
)?;
}
Ok(result)
}
LogicalPlan::MergeOp {
source,
pattern,
on_match,
on_create,
} => {
let source_records = match source {
Some(s) => execute(s, engine, params, scalar_fns, trigger_fns)?,
None => vec![Record::new()],
};
operators::merge::execute_merge(
source_records,
pattern,
on_match,
on_create,
engine,
params,
scalar_fns,
trigger_fns,
)
}
LogicalPlan::CreateIndex {
name,
label,
property,
} => {
let label_id = engine.get_or_create_label(label);
let prop_key_id = engine.get_or_create_prop_key(property);
let index_name = match name {
Some(n) => n.clone(),
None => format!("idx_{}_{}", label, property),
};
engine
.index_manager_mut()
.create_index(index_name.clone(), label_id, prop_key_id)
.map_err(|e| ExecutionError {
message: e.to_string(),
})?;
engine
.catalog_mut()
.add_index_definition(cypherlite_storage::index::IndexDefinition {
name: index_name,
label_id,
prop_key_id,
});
let nodes: Vec<(
cypherlite_core::NodeId,
Vec<(u32, cypherlite_core::PropertyValue)>,
)> = engine
.scan_nodes_by_label(label_id)
.iter()
.map(|n| (n.node_id, n.properties.clone()))
.collect();
for (nid, props) in &nodes {
for (pk, v) in props {
if *pk == prop_key_id {
if let Some(idx) = engine
.index_manager_mut()
.find_index_mut(label_id, prop_key_id)
{
idx.insert(v, *nid);
}
}
}
}
Ok(vec![])
}
LogicalPlan::CreateEdgeIndex {
name,
rel_type,
property,
} => {
let rel_type_id = engine.get_or_create_rel_type(rel_type);
let prop_key_id = engine.get_or_create_prop_key(property);
let index_name = match name {
Some(n) => n.clone(),
None => format!("eidx_{}_{}", rel_type, property),
};
engine
.edge_index_manager_mut()
.create_index(index_name.clone(), rel_type_id, prop_key_id)
.map_err(|e| ExecutionError {
message: e.to_string(),
})?;
let edges: Vec<(
cypherlite_core::EdgeId,
Vec<(u32, cypherlite_core::PropertyValue)>,
)> = engine
.scan_edges_by_type(rel_type_id)
.iter()
.map(|e| (e.edge_id, e.properties.clone()))
.collect();
for (eid, props) in &edges {
for (pk, v) in props {
if *pk == prop_key_id {
if let Some(idx) = engine
.edge_index_manager_mut()
.find_index_mut(rel_type_id, prop_key_id)
{
idx.insert(v, *eid);
}
}
}
}
Ok(vec![])
}
LogicalPlan::DropIndex { name } => {
let removed_node = engine.index_manager_mut().drop_index(name);
if removed_node.is_ok() {
engine.catalog_mut().remove_index_definition(name);
return Ok(vec![]);
}
engine
.edge_index_manager_mut()
.drop_index(name)
.map_err(|e| ExecutionError {
message: e.to_string(),
})?;
Ok(vec![])
}
LogicalPlan::VarLengthExpand {
source,
src_var,
rel_var,
target_var,
rel_type_id,
direction,
min_hops,
max_hops,
temporal_filter,
} => {
let source_records = execute(source, engine, params, scalar_fns, trigger_fns)?;
let tf = resolve_temporal_filter(temporal_filter, engine, params, scalar_fns)?;
Ok(operators::var_length_expand::execute_var_length_expand(
source_records,
src_var,
rel_var.as_deref(),
target_var,
*rel_type_id,
direction,
*min_hops,
*max_hops,
engine,
tf.as_ref(),
))
}
LogicalPlan::OptionalExpand {
source,
src_var,
rel_var,
target_var,
rel_type_id,
direction,
} => {
let source_records = execute(source, engine, params, scalar_fns, trigger_fns)?;
Ok(operators::optional_expand::execute_optional_expand(
source_records,
src_var,
rel_var.as_deref(),
target_var,
*rel_type_id,
direction,
engine,
))
}
LogicalPlan::IndexScan {
variable,
label_id,
prop_key,
lookup_value,
} => operators::index_scan::execute_index_scan(
variable,
*label_id,
prop_key,
lookup_value,
engine,
params,
scalar_fns,
),
#[cfg(feature = "subgraph")]
LogicalPlan::SubgraphScan { variable } => Ok(
operators::subgraph_scan::execute_subgraph_scan(variable, engine),
),
#[cfg(feature = "subgraph")]
LogicalPlan::CreateSnapshotOp {
variable,
labels: _,
properties,
temporal_anchor,
sub_plan,
return_vars,
} => execute_create_snapshot(
variable.as_deref(),
properties,
temporal_anchor.as_ref(),
sub_plan,
return_vars,
engine,
params,
),
#[cfg(feature = "hypergraph")]
LogicalPlan::HyperEdgeScan { variable } => Ok(
operators::hyperedge_scan::execute_hyperedge_scan(variable, engine),
),
#[cfg(feature = "hypergraph")]
LogicalPlan::CreateHyperedgeOp {
source,
variable,
labels,
sources,
targets,
} => {
let source_records = match source {
Some(s) => execute(s, engine, params, scalar_fns, trigger_fns)?,
None => vec![Record::new()],
};
execute_create_hyperedge(
variable.as_deref(),
labels,
sources,
targets,
&source_records,
engine,
params,
)
}
LogicalPlan::AsOfScan {
source,
timestamp_expr,
} => {
let source_records = execute(source, engine, params, scalar_fns, trigger_fns)?;
operators::temporal_scan::execute_as_of_scan(
source_records,
timestamp_expr,
engine,
params,
scalar_fns,
)
}
LogicalPlan::TemporalRangeScan {
source,
start_expr,
end_expr,
} => {
let source_records = execute(source, engine, params, scalar_fns, trigger_fns)?;
operators::temporal_scan::execute_temporal_range_scan(
source_records,
start_expr,
end_expr,
engine,
params,
scalar_fns,
)
}
}
}
fn resolve_temporal_filter(
plan: &Option<TemporalFilterPlan>,
engine: &mut StorageEngine,
params: &Params,
scalar_fns: &dyn ScalarFnLookup,
) -> Result<Option<operators::temporal_filter::TemporalFilter>, ExecutionError> {
let tfp = match plan {
Some(p) => p,
None => return Ok(None),
};
let empty_record = Record::new();
match tfp {
TemporalFilterPlan::AsOf(expr) => {
let val = eval::eval(expr, &empty_record, engine, params, scalar_fns)?;
let ms = extract_timestamp(val)?;
Ok(Some(operators::temporal_filter::TemporalFilter::AsOf(ms)))
}
TemporalFilterPlan::Between(start_expr, end_expr) => {
let start_val = eval::eval(start_expr, &empty_record, engine, params, scalar_fns)?;
let end_val = eval::eval(end_expr, &empty_record, engine, params, scalar_fns)?;
let start_ms = extract_timestamp(start_val)?;
let end_ms = extract_timestamp(end_val)?;
Ok(Some(operators::temporal_filter::TemporalFilter::Between(
start_ms, end_ms,
)))
}
}
}
fn extract_timestamp(val: Value) -> Result<i64, ExecutionError> {
match val {
Value::DateTime(ms) => Ok(ms),
Value::Int64(ms) => Ok(ms),
_ => Err(ExecutionError {
message: "temporal filter expression must evaluate to DateTime or integer".to_string(),
}),
}
}
fn eval_count_expr(expr: &Expression) -> Result<usize, ExecutionError> {
match expr {
Expression::Literal(Literal::Integer(n)) => {
if *n < 0 {
return Err(ExecutionError {
message: "SKIP/LIMIT count must be non-negative".to_string(),
});
}
Ok(*n as usize)
}
_ => Err(ExecutionError {
message: "SKIP/LIMIT count must be a literal integer".to_string(),
}),
}
}
#[cfg(feature = "subgraph")]
#[allow(clippy::too_many_arguments)]
fn execute_create_snapshot(
variable: Option<&str>,
properties: &Option<crate::parser::ast::MapLiteral>,
temporal_anchor_expr: Option<&crate::parser::ast::Expression>,
sub_plan: &LogicalPlan,
return_vars: &[String],
engine: &mut StorageEngine,
params: &Params,
) -> Result<Vec<Record>, ExecutionError> {
use cypherlite_core::{LabelRegistry, PropertyValue, SubgraphId};
let sub_records = execute(sub_plan, engine, params, &(), &())?;
let mut node_ids = Vec::new();
for record in &sub_records {
for var in return_vars {
if let Some(Value::Node(nid)) = record.get(var) {
if !node_ids.contains(nid) {
node_ids.push(*nid);
}
}
}
}
let empty_record = Record::new();
let sg_props = match properties {
Some(map) => {
let mut result = Vec::new();
for (key, expr) in map {
let value = eval::eval(expr, &empty_record, engine, params, &())?;
let pv = PropertyValue::try_from(value).map_err(|e| ExecutionError {
message: format!("invalid property value for '{}': {}", key, e),
})?;
let key_id = engine.get_or_create_prop_key(key);
result.push((key_id, pv));
}
result
}
None => vec![],
};
let temporal_anchor = match temporal_anchor_expr {
Some(expr) => {
let val = eval::eval(expr, &empty_record, engine, params, &())?;
let ms = extract_timestamp(val)?;
Some(ms)
}
None => {
match params.get("__query_start_ms__") {
Some(Value::Int64(ms)) => Some(*ms),
_ => None,
}
}
};
let mut final_props = sg_props;
let now = match params.get("__query_start_ms__") {
Some(Value::Int64(ms)) => *ms,
_ => 0,
};
let created_key = engine.get_or_create_prop_key("_created_at");
final_props.push((created_key, PropertyValue::DateTime(now)));
let sg_id: SubgraphId = engine.create_subgraph(final_props, temporal_anchor);
for nid in &node_ids {
engine.add_member(sg_id, *nid).map_err(|e| ExecutionError {
message: format!("failed to add member to subgraph: {}", e),
})?;
}
let mut result_record = Record::new();
if let Some(var) = variable {
result_record.insert(var.to_string(), Value::Subgraph(sg_id));
}
Ok(vec![result_record])
}
#[cfg(feature = "hypergraph")]
#[allow(clippy::too_many_arguments)]
fn execute_create_hyperedge(
variable: Option<&str>,
labels: &[String],
sources: &[Expression],
targets: &[Expression],
source_records: &[Record],
engine: &mut StorageEngine,
params: &Params,
) -> Result<Vec<Record>, ExecutionError> {
use cypherlite_core::{HyperEdgeId, LabelRegistry};
let record = source_records.first().cloned().unwrap_or_default();
let rel_type_id = if let Some(label) = labels.first() {
engine.get_or_create_rel_type(label)
} else {
0
};
let resolved_sources = resolve_hyperedge_participants(sources, &record, engine, params)?;
let resolved_targets = resolve_hyperedge_participants(targets, &record, engine, params)?;
let he_id: HyperEdgeId =
engine.create_hyperedge(rel_type_id, resolved_sources, resolved_targets, vec![]);
let mut result_record = record;
if let Some(var) = variable {
result_record.insert(var.to_string(), Value::Hyperedge(he_id));
}
Ok(vec![result_record])
}
#[cfg(feature = "hypergraph")]
fn resolve_hyperedge_participants(
expressions: &[Expression],
record: &Record,
engine: &mut StorageEngine,
params: &Params,
) -> Result<Vec<cypherlite_core::GraphEntity>, ExecutionError> {
use cypherlite_core::GraphEntity;
let mut entities = Vec::new();
for expr in expressions {
match expr {
Expression::TemporalRef { node, timestamp } => {
let node_val = eval::eval(node, record, engine, params, &())?;
let ts_val = eval::eval(timestamp, record, engine, params, &())?;
let node_id = match node_val {
Value::Node(nid) => nid,
_ => {
return Err(ExecutionError {
message: "temporal reference node must resolve to a Node".to_string(),
})
}
};
let ts_ms = extract_timestamp(ts_val)?;
entities.push(GraphEntity::TemporalRef(node_id, ts_ms));
}
_ => {
let val = eval::eval(expr, record, engine, params, &())?;
match val {
Value::Node(nid) => entities.push(GraphEntity::Node(nid)),
#[cfg(feature = "subgraph")]
Value::Subgraph(sid) => entities.push(GraphEntity::Subgraph(sid)),
Value::Hyperedge(hid) => {
entities.push(GraphEntity::HyperEdge(hid));
}
_ => {
return Err(ExecutionError {
message: "hyperedge participant must resolve to a graph entity"
.to_string(),
})
}
}
}
}
}
Ok(entities)
}
fn deduplicate_records(records: &mut Vec<Record>) {
let mut seen: Vec<Record> = Vec::new();
records.retain(|r| {
if seen.contains(r) {
false
} else {
seen.push(r.clone());
true
}
});
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_value_from_property_value_null() {
assert_eq!(Value::from(PropertyValue::Null), Value::Null);
}
#[test]
fn test_value_from_property_value_bool() {
assert_eq!(Value::from(PropertyValue::Bool(true)), Value::Bool(true));
}
#[test]
fn test_value_from_property_value_int64() {
assert_eq!(Value::from(PropertyValue::Int64(42)), Value::Int64(42));
}
#[test]
fn test_value_from_property_value_float64() {
assert_eq!(
Value::from(PropertyValue::Float64(3.15)),
Value::Float64(3.15)
);
}
#[test]
fn test_value_from_property_value_string() {
assert_eq!(
Value::from(PropertyValue::String("hello".into())),
Value::String("hello".into())
);
}
#[test]
fn test_value_from_property_value_bytes() {
assert_eq!(
Value::from(PropertyValue::Bytes(vec![1, 2, 3])),
Value::Bytes(vec![1, 2, 3])
);
}
#[test]
fn test_value_from_property_value_array() {
let pv = PropertyValue::Array(vec![PropertyValue::Int64(1), PropertyValue::Null]);
assert_eq!(
Value::from(pv),
Value::List(vec![Value::Int64(1), Value::Null])
);
}
#[test]
fn test_value_try_into_property_value_success() {
assert_eq!(
PropertyValue::try_from(Value::Null),
Ok(PropertyValue::Null)
);
assert_eq!(
PropertyValue::try_from(Value::Bool(false)),
Ok(PropertyValue::Bool(false))
);
assert_eq!(
PropertyValue::try_from(Value::Int64(10)),
Ok(PropertyValue::Int64(10))
);
assert_eq!(
PropertyValue::try_from(Value::Float64(1.5)),
Ok(PropertyValue::Float64(1.5))
);
assert_eq!(
PropertyValue::try_from(Value::String("x".into())),
Ok(PropertyValue::String("x".into()))
);
assert_eq!(
PropertyValue::try_from(Value::Bytes(vec![0xAB])),
Ok(PropertyValue::Bytes(vec![0xAB]))
);
}
#[test]
fn test_value_try_into_property_value_list() {
let v = Value::List(vec![Value::Int64(1), Value::Bool(true)]);
let pv = PropertyValue::try_from(v);
assert_eq!(
pv,
Ok(PropertyValue::Array(vec![
PropertyValue::Int64(1),
PropertyValue::Bool(true)
]))
);
}
#[test]
fn test_value_try_into_property_value_node_fails() {
let result = PropertyValue::try_from(Value::Node(NodeId(1)));
assert!(result.is_err());
assert!(result
.expect_err("should error")
.contains("cannot convert graph entity"));
}
#[test]
fn test_value_try_into_property_value_edge_fails() {
let result = PropertyValue::try_from(Value::Edge(EdgeId(1)));
assert!(result.is_err());
}
#[test]
fn test_execution_error_display() {
let err = ExecutionError {
message: "test error".to_string(),
};
assert_eq!(err.to_string(), "Execution error: test error");
}
#[test]
fn test_execution_error_is_error_trait() {
let err = ExecutionError {
message: "test".to_string(),
};
let _: &dyn std::error::Error = &err;
}
#[test]
fn test_record_type_is_hashmap() {
let mut record: Record = Record::new();
record.insert("n".to_string(), Value::Node(NodeId(1)));
assert_eq!(record.get("n"), Some(&Value::Node(NodeId(1))));
}
#[test]
fn test_params_type_is_hashmap() {
let mut params: Params = Params::new();
params.insert("name".to_string(), Value::String("Alice".into()));
assert_eq!(
params.get("name"),
Some(&Value::String("Alice".to_string()))
);
}
#[test]
fn test_eval_count_expr_positive() {
let expr = Expression::Literal(Literal::Integer(10));
assert_eq!(eval_count_expr(&expr), Ok(10));
}
#[test]
fn test_eval_count_expr_zero() {
let expr = Expression::Literal(Literal::Integer(0));
assert_eq!(eval_count_expr(&expr), Ok(0));
}
#[test]
fn test_eval_count_expr_negative_fails() {
let expr = Expression::Literal(Literal::Integer(-5));
assert!(eval_count_expr(&expr).is_err());
}
#[test]
fn test_eval_count_expr_non_integer_fails() {
let expr = Expression::Variable("n".to_string());
assert!(eval_count_expr(&expr).is_err());
}
#[test]
fn test_deduplicate_records() {
let mut r1 = Record::new();
r1.insert("x".to_string(), Value::Int64(1));
let mut r2 = Record::new();
r2.insert("x".to_string(), Value::Int64(2));
let r3 = r1.clone();
let mut records = vec![r1, r2, r3];
deduplicate_records(&mut records);
assert_eq!(records.len(), 2);
}
#[test]
fn test_value_from_property_value_datetime() {
assert_eq!(
Value::from(PropertyValue::DateTime(1_700_000_000_000)),
Value::DateTime(1_700_000_000_000)
);
}
#[test]
fn test_value_try_into_property_value_datetime() {
assert_eq!(
PropertyValue::try_from(Value::DateTime(1_700_000_000_000)),
Ok(PropertyValue::DateTime(1_700_000_000_000))
);
}
#[cfg(feature = "hypergraph")]
mod hyperedge_value_tests {
use super::*;
use cypherlite_core::HyperEdgeId;
#[test]
fn test_value_hyperedge_creation() {
let val = Value::Hyperedge(HyperEdgeId(42));
assert_eq!(val, Value::Hyperedge(HyperEdgeId(42)));
}
#[test]
fn test_value_hyperedge_try_into_property_value_fails() {
let result = PropertyValue::try_from(Value::Hyperedge(HyperEdgeId(1)));
assert!(result.is_err());
assert!(result
.expect_err("should error")
.contains("cannot convert graph entity"));
}
#[test]
fn test_value_hyperedge_ne_node() {
let node_val = Value::Node(NodeId(1));
let hyperedge_val = Value::Hyperedge(HyperEdgeId(1));
assert_ne!(node_val, hyperedge_val);
}
#[test]
fn test_value_hyperedge_clone() {
let val = Value::Hyperedge(HyperEdgeId(7));
let cloned = val.clone();
assert_eq!(val, cloned);
}
#[test]
fn test_value_hyperedge_debug() {
let val = Value::Hyperedge(HyperEdgeId(99));
let debug = format!("{:?}", val);
assert!(debug.contains("Hyperedge"));
assert!(debug.contains("99"));
}
}
#[cfg(feature = "subgraph")]
mod subgraph_value_tests {
use super::*;
use cypherlite_core::SubgraphId;
#[test]
fn test_value_subgraph_creation() {
let val = Value::Subgraph(SubgraphId(42));
assert_eq!(val, Value::Subgraph(SubgraphId(42)));
}
#[test]
fn test_value_subgraph_try_into_property_value_fails() {
let result = PropertyValue::try_from(Value::Subgraph(SubgraphId(1)));
assert!(result.is_err());
assert!(result
.expect_err("should error")
.contains("cannot convert graph entity"));
}
#[test]
fn test_value_subgraph_ne_node() {
let node_val = Value::Node(NodeId(1));
let subgraph_val = Value::Subgraph(SubgraphId(1));
assert_ne!(node_val, subgraph_val);
}
#[test]
fn test_value_subgraph_clone() {
let val = Value::Subgraph(SubgraphId(7));
let cloned = val.clone();
assert_eq!(val, cloned);
}
#[test]
fn test_value_subgraph_debug() {
let val = Value::Subgraph(SubgraphId(99));
let debug = format!("{:?}", val);
assert!(debug.contains("Subgraph"));
assert!(debug.contains("99"));
}
}
}