#![allow(unused_variables)]
use std::collections::HashMap;
use interstellar::p;
use interstellar::storage::Graph;
use interstellar::traversal::__;
use interstellar::value::{Value, VertexId};
use crate::common::graphs::create_large_graph;
fn create_large_vertex_graph(count: usize) -> Graph {
let graph = Graph::new();
for i in 0..count {
let mut props = HashMap::new();
props.insert("index".to_string(), Value::Int(i as i64));
props.insert("name".to_string(), Value::String(format!("vertex_{}", i)));
props.insert(
"category".to_string(),
Value::String(format!("cat_{}", i % 10)),
);
graph.add_vertex("node", props);
}
graph
}
fn create_chain_graph(length: usize) -> (Graph, VertexId, VertexId) {
let graph = Graph::new();
let mut prev_id: Option<VertexId> = None;
let mut first_id: Option<VertexId> = None;
let mut last_id: Option<VertexId> = None;
for i in 0..length {
let mut props = HashMap::new();
props.insert("index".to_string(), Value::Int(i as i64));
props.insert("depth".to_string(), Value::Int(i as i64));
let id = graph.add_vertex("node", props);
if first_id.is_none() {
first_id = Some(id);
}
last_id = Some(id);
if let Some(prev) = prev_id {
graph.add_edge(prev, id, "next", HashMap::new()).unwrap();
}
prev_id = Some(id);
}
(graph, first_id.unwrap(), last_id.unwrap())
}
fn create_dense_graph(vertex_count: usize, edges_per_vertex: usize) -> Graph {
let graph = Graph::new();
let mut ids = Vec::with_capacity(vertex_count);
for i in 0..vertex_count {
let mut props = HashMap::new();
props.insert("index".to_string(), Value::Int(i as i64));
let id = graph.add_vertex("node", props);
ids.push(id);
}
for i in 0..vertex_count {
for j in 1..=edges_per_vertex {
let target = (i + j) % vertex_count;
if target != i {
let _ = graph.add_edge(ids[i], ids[target], "connects", HashMap::new());
}
}
}
graph
}
fn create_aggregation_graph(count: usize) -> Graph {
let graph = Graph::new();
for i in 0..count {
let mut props = HashMap::new();
props.insert("index".to_string(), Value::Int(i as i64));
props.insert("value".to_string(), Value::Int((i * 7 % 100) as i64));
props.insert(
"group".to_string(),
Value::String(format!("group_{}", i % 5)),
);
props.insert("priority".to_string(), Value::Int((i % 3) as i64));
graph.add_vertex("item", props);
}
graph
}
#[test]
fn handles_thousand_vertices() {
let graph = create_large_vertex_graph(1000);
let snapshot = graph.snapshot();
let g = snapshot.gremlin();
let count = g.v().count();
assert_eq!(count, 1000);
let results = g.v().to_list();
assert_eq!(results.len(), 1000);
}
#[test]
fn handles_ten_thousand_vertices() {
let graph = create_large_vertex_graph(10_000);
let snapshot = graph.snapshot();
let g = snapshot.gremlin();
let count = g.v().count();
assert_eq!(count, 10_000);
let filtered_count = g.v().has_where("index", p::lt(5000)).count();
assert_eq!(filtered_count, 5000);
}
#[test]
fn limit_on_large_result_set() {
let graph = create_large_vertex_graph(10_000);
let snapshot = graph.snapshot();
let g = snapshot.gremlin();
let limited = g.v().limit(10).to_list();
assert_eq!(limited.len(), 10);
let limited_100 = g.v().limit(100).to_list();
assert_eq!(limited_100.len(), 100);
let limited_1000 = g.v().limit(1000).to_list();
assert_eq!(limited_1000.len(), 1000);
}
#[test]
fn pagination_on_large_result_set() {
let graph = create_large_vertex_graph(1000);
let snapshot = graph.snapshot();
let g = snapshot.gremlin();
let page1 = g.v().range(0, 100).to_list();
let page2 = g.v().range(100, 200).to_list();
let page3 = g.v().range(200, 300).to_list();
assert_eq!(page1.len(), 100);
assert_eq!(page2.len(), 100);
assert_eq!(page3.len(), 100);
let page1_ids: std::collections::HashSet<_> =
page1.iter().filter_map(|v| v.as_vertex_id()).collect();
let page2_ids: std::collections::HashSet<_> =
page2.iter().filter_map(|v| v.as_vertex_id()).collect();
assert!(
page1_ids.is_disjoint(&page2_ids),
"Pages should not overlap"
);
}
#[test]
fn dedup_on_large_result_set_with_duplicates() {
let graph = create_dense_graph(100, 10);
let snapshot = graph.snapshot();
let g = snapshot.gremlin();
let with_dups = g.v().out().out().to_list();
let without_dups = g.v().out().out().dedup().to_list();
assert!(
without_dups.len() <= with_dups.len(),
"Dedup should reduce or maintain count"
);
assert!(
without_dups.len() <= 100,
"Should have at most 100 unique vertices"
);
}
#[test]
fn count_on_large_result_set() {
let graph = create_large_vertex_graph(50_000);
let snapshot = graph.snapshot();
let g = snapshot.gremlin();
let count = g.v().count();
assert_eq!(count, 50_000);
let filtered_count = g.v().has_where("index", p::gte(25_000)).count();
assert_eq!(filtered_count, 25_000);
}
#[test]
fn order_barrier_with_large_data() {
let graph = create_large_vertex_graph(5000);
let snapshot = graph.snapshot();
let g = snapshot.gremlin();
let ordered = g
.v()
.values("index")
.order()
.by_asc()
.build()
.limit(10)
.to_list();
assert_eq!(ordered.len(), 10);
let indices: Vec<i64> = ordered.iter().filter_map(|v| v.as_i64()).collect();
assert_eq!(indices, vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
}
#[test]
fn order_desc_barrier_with_large_data() {
let graph = create_large_vertex_graph(5000);
let snapshot = graph.snapshot();
let g = snapshot.gremlin();
let ordered = g
.v()
.values("index")
.order()
.by_desc()
.build()
.limit(10)
.to_list();
assert_eq!(ordered.len(), 10);
let indices: Vec<i64> = ordered.iter().filter_map(|v| v.as_i64()).collect();
assert_eq!(
indices,
vec![4999, 4998, 4997, 4996, 4995, 4994, 4993, 4992, 4991, 4990]
);
}
#[test]
fn group_barrier_with_large_data() {
let graph = create_aggregation_graph(10_000);
let snapshot = graph.snapshot();
let g = snapshot.gremlin();
let grouped = g.v().group().by_key("group").by_value().build().to_list();
assert_eq!(grouped.len(), 1);
if let Value::Map(map) = &grouped[0] {
assert_eq!(map.len(), 5);
for (key, value) in map.iter() {
if let Value::List(list) = value {
assert_eq!(
list.len(),
2000,
"Each group should have 2000 elements, got {} for key {:?}",
list.len(),
key
);
}
}
}
}
#[test]
fn group_count_barrier_with_large_data() {
let graph = create_aggregation_graph(10_000);
let snapshot = graph.snapshot();
let g = snapshot.gremlin();
let counts = g.v().group_count().by_key("priority").build().to_list();
assert_eq!(counts.len(), 1);
if let Value::Map(map) = &counts[0] {
assert_eq!(map.len(), 3);
let total: i64 = map.values().filter_map(|v| v.as_i64()).sum();
assert_eq!(total, 10_000);
}
}
#[test]
fn sum_barrier_with_large_data() {
let graph = create_large_vertex_graph(10_000);
let snapshot = graph.snapshot();
let g = snapshot.gremlin();
let sum = g.v().values("index").sum();
assert_eq!(sum, Value::Int(49_995_000));
}
#[test]
fn min_max_barrier_with_large_data() {
let graph = create_large_vertex_graph(10_000);
let snapshot = graph.snapshot();
let g = snapshot.gremlin();
let min = g.v().values("index").min();
let max = g.v().values("index").max();
assert_eq!(min, Some(Value::Int(0)));
assert_eq!(max, Some(Value::Int(9999)));
}
#[test]
fn filter_streams_without_buffering() {
let graph = create_large_vertex_graph(10_000);
let snapshot = graph.snapshot();
let g = snapshot.gremlin();
let results = g.v().has_where("index", p::lt(100)).limit(5).to_list();
assert_eq!(results.len(), 5);
for r in &results {
if let Some(vid) = r.as_vertex_id() {
}
}
}
#[test]
fn navigation_streams_without_buffering() {
let (graph, start, _end) = create_chain_graph(1000);
let snapshot = graph.snapshot();
let g = snapshot.gremlin();
let results = g
.v_ids([start])
.repeat(__.out())
.times(1000) .emit()
.limit(10) .to_list();
assert_eq!(results.len(), 10);
}
#[test]
fn non_barrier_chain_streams() {
let graph = create_large_vertex_graph(10_000);
let snapshot = graph.snapshot();
let g = snapshot.gremlin();
let results = g
.v()
.has_label("node")
.has_where("index", p::lt(1000))
.values("name")
.limit(5)
.to_list();
assert_eq!(results.len(), 5);
}
#[test]
fn barrier_then_stream() {
let graph = create_aggregation_graph(1000);
let snapshot = graph.snapshot();
let g = snapshot.gremlin();
let results = g
.v()
.values("value")
.order()
.by_asc()
.build()
.limit(10)
.to_list();
assert_eq!(results.len(), 10);
let values: Vec<i64> = results.iter().filter_map(|v| v.as_i64()).collect();
for i in 1..values.len() {
assert!(values[i - 1] <= values[i], "Should be in ascending order");
}
}
#[test]
fn count_optimized_execution() {
let graph = create_large_vertex_graph(100_000);
let snapshot = graph.snapshot();
let g = snapshot.gremlin();
let count = g.v().count();
assert_eq!(count, 100_000);
let filtered = g.v().has_where("index", p::lt(50_000)).count();
assert_eq!(filtered, 50_000);
}
#[test]
fn empty_result_from_large_graph() {
let graph = create_large_vertex_graph(10_000);
let snapshot = graph.snapshot();
let g = snapshot.gremlin();
let results = g.v().has_where("index", p::gt(100_000)).to_list();
assert!(results.is_empty());
let count = g.v().has_where("index", p::gt(100_000)).count();
assert_eq!(count, 0);
}
#[test]
fn single_result_from_large_graph() {
let graph = create_large_vertex_graph(10_000);
let snapshot = graph.snapshot();
let g = snapshot.gremlin();
let result = g.v().has_where("index", p::eq(5000i64)).one();
assert!(result.is_ok());
}
#[test]
fn deep_traversal_in_chain() {
let (graph, start, end) = create_chain_graph(100);
let snapshot = graph.snapshot();
let g = snapshot.gremlin();
let final_vertex = g
.v_ids([start])
.repeat(__.out())
.times(99) .to_list();
assert_eq!(final_vertex.len(), 1);
assert_eq!(final_vertex[0].as_vertex_id(), Some(end));
}
#[test]
fn path_tracking_large_traversal() {
let (graph, start, _end) = create_chain_graph(50);
let snapshot = graph.snapshot();
let g = snapshot.gremlin();
let paths = g
.v_ids([start])
.with_path()
.repeat(__.out())
.times(10)
.path()
.to_list();
assert_eq!(paths.len(), 1);
if let Value::List(path) = &paths[0] {
assert_eq!(path.len(), 11);
}
}
#[test]
fn aggregation_accuracy_large_data() {
let graph = create_aggregation_graph(10_000);
let snapshot = graph.snapshot();
let g = snapshot.gremlin();
let expected_sum: i64 = (0..10_000i64).map(|i| (i * 7) % 100).sum();
let actual_sum = g.v().values("value").sum();
assert_eq!(actual_sum, Value::Int(expected_sum));
}
#[test]
fn large_graph_fixture_structure() {
let graph = create_large_graph(1000, 5);
let snapshot = graph.snapshot();
let g = snapshot.gremlin();
let vertex_count = g.v().count();
assert_eq!(vertex_count, 1000);
let type_a_count = g.v().has_label("type_a").count();
let type_b_count = g.v().has_label("type_b").count();
let type_c_count = g.v().has_label("type_c").count();
assert!(type_a_count >= 333 && type_a_count <= 334);
assert!(type_b_count >= 333 && type_b_count <= 334);
assert!(type_c_count >= 333 && type_c_count <= 334);
}
#[test]
fn large_graph_fixture_properties() {
let graph = create_large_graph(500, 3);
let snapshot = graph.snapshot();
let g = snapshot.gremlin();
let with_index = g.v().has("index").count();
assert_eq!(with_index, 500);
let groups = g.v().values("group").dedup().to_list();
assert_eq!(groups.len(), 10);
let priorities = g.v().values("priority").dedup().to_list();
assert_eq!(priorities.len(), 5);
let active_count = g.v().has_value("active", true).count();
let inactive_count = g.v().has_value("active", false).count();
assert_eq!(active_count, 250);
assert_eq!(inactive_count, 250);
}
#[test]
fn large_graph_fixture_edge_labels() {
let graph = create_large_graph(100, 6);
let snapshot = graph.snapshot();
let g = snapshot.gremlin();
let edge_a = g.e().has_label("edge_a").count();
let edge_b = g.e().has_label("edge_b").count();
let edge_c = g.e().has_label("edge_c").count();
let total_edges = edge_a + edge_b + edge_c;
assert!(total_edges > 0);
let edges_with_weight = g.e().has("weight").count();
assert_eq!(edges_with_weight, total_edges);
}
#[test]
fn large_graph_traversal_mixed_labels() {
let graph = create_large_graph(200, 4);
let snapshot = graph.snapshot();
let g = snapshot.gremlin();
let results = g
.v()
.has_label("type_a")
.limit(10)
.out_labels(&["edge_b"])
.has_label("type_c")
.dedup()
.to_list();
let _ = results;
}
#[test]
fn large_graph_aggregation() {
let graph = create_large_graph(1000, 3);
let snapshot = graph.snapshot();
let g = snapshot.gremlin();
let expected_sum: i64 = (0..1000i64).sum();
let actual_sum = g.v().values("index").sum();
assert_eq!(actual_sum, Value::Int(expected_sum));
let grouped = g.v().group_count().by_key("priority").build().to_list();
if let Some(Value::Map(map)) = grouped.first() {
assert_eq!(map.len(), 5);
let total: i64 = map.values().filter_map(|v| v.as_i64()).sum();
assert_eq!(total, 1000);
}
}
#[test]
fn large_graph_float_filtering() {
let graph = create_large_graph(500, 2);
let snapshot = graph.snapshot();
let g = snapshot.gremlin();
let high_score_count = g.v().has_where("score", p::gt(25.0f64)).count();
assert_eq!(high_score_count, 249);
}
#[test]
fn large_graph_deep_traversal_scaling() {
let graph = create_large_graph(500, 4);
let snapshot = graph.snapshot();
let g = snapshot.gremlin();
let start_id = g
.v()
.has_where("index", p::eq(0i64))
.to_list()
.first()
.and_then(|v| v.as_vertex_id());
if let Some(id) = start_id {
let reachable = g
.v_ids([id])
.repeat(__.out())
.times(5)
.emit()
.dedup()
.count();
assert!(
reachable >= 10,
"Expected at least 10 reachable vertices, got {}",
reachable
);
}
}
#[test]
fn large_graph_order_by_score() {
let graph = create_large_graph(1000, 2);
let snapshot = graph.snapshot();
let g = snapshot.gremlin();
let top_scores = g
.v()
.values("score")
.order()
.by_desc()
.build()
.limit(10)
.to_list();
assert_eq!(top_scores.len(), 10);
if let Some(Value::Float(top)) = top_scores.first() {
assert!((*top - 99.9).abs() < 0.01);
}
}