use crate::backend::native::adjacency::{AdjacencyHelpers, Direction};
use crate::backend::native::graph_file::GraphFile;
use crate::backend::native::types::{NativeNodeId, NativeResult};
use crate::backend::native::v2::edge_cluster::EdgeCluster;
use ahash::AHashMap;
use crate::backend::native::adjacency::SequentialClusterReader;
use crate::backend::native::graph_ops::traversal_context::TraversalContext;
type CacheKey = (NativeNodeId, Direction);
pub type TraversalCache = AHashMap<CacheKey, Vec<NativeNodeId>>;
#[derive(Debug, Default, Clone, Copy)]
pub struct TraversalCacheStats {
pub hits: u64,
pub misses: u64,
}
impl TraversalCacheStats {
#[inline]
pub fn new() -> Self {
Self::default()
}
#[inline]
pub fn hit_rate(&self) -> f64 {
let total = self.hits + self.misses;
if total == 0 {
0.0
} else {
self.hits as f64 / total as f64
}
}
#[inline]
pub fn record_hit(&mut self) {
self.hits += 1;
}
#[inline]
pub fn record_miss(&mut self) {
self.misses += 1;
}
}
pub fn get_neighbors_cached(
graph_file: &mut GraphFile,
node_id: NativeNodeId,
direction: Direction,
cache: &mut TraversalCache,
stats: &mut TraversalCacheStats,
) -> NativeResult<Vec<NativeNodeId>> {
let cache_key = (node_id, direction);
if let Some(cached) = cache.get(&cache_key) {
stats.record_hit();
return Ok(cached.clone());
}
stats.record_miss();
let neighbors = match direction {
Direction::Outgoing => AdjacencyHelpers::get_outgoing_neighbors(graph_file, node_id)?,
Direction::Incoming => AdjacencyHelpers::get_incoming_neighbors(graph_file, node_id)?,
};
cache.insert(cache_key, neighbors.clone());
Ok(neighbors)
}
pub fn get_neighbors_optimized(
graph_file: &mut GraphFile,
node_id: NativeNodeId,
direction: Direction,
ctx: &mut TraversalContext,
) -> NativeResult<Vec<NativeNodeId>> {
let cache_key = (node_id, direction);
if ctx.detector.is_linear_confirmed() {
let l1_result = ctx.buffer.get(node_id).map(|node_record| {
let (cluster_offset, cluster_size) = match direction {
Direction::Outgoing => (
node_record.outgoing_cluster_offset,
node_record.outgoing_cluster_size,
),
Direction::Incoming => (
node_record.incoming_cluster_offset,
node_record.incoming_cluster_size,
),
};
(cluster_offset, cluster_size)
});
if let Some((cluster_offset, cluster_size)) = l1_result {
ctx.record_buffer_hit();
if cluster_offset > 0 && cluster_size > 0 {
if let Some(cached_cluster_bytes) = ctx.buffer.get_cluster(cluster_offset) {
if let Ok(cluster) = EdgeCluster::deserialize(cached_cluster_bytes) {
let neighbors: Vec<NativeNodeId> = cluster
.iter_neighbors()
.map(|id| id as NativeNodeId)
.collect();
ctx.cache.insert(cache_key, neighbors.clone());
return Ok(neighbors);
}
}
let mut cluster_data = vec![0u8; cluster_size as usize];
if graph_file
.read_bytes(cluster_offset, &mut cluster_data)
.is_ok()
{
if let Ok(cluster) = EdgeCluster::deserialize(&cluster_data) {
let neighbors: Vec<NativeNodeId> = cluster
.iter_neighbors()
.map(|id| id as NativeNodeId)
.collect();
ctx.cache.insert(cache_key, neighbors.clone());
return Ok(neighbors);
}
}
}
else {
ctx.cache.insert(cache_key, Vec::new());
return Ok(Vec::new());
}
} else {
ctx.record_buffer_miss();
}
}
if ctx.cluster_buffer.is_none() && ctx.detector.should_use_sequential_read() {
let mut reader = SequentialClusterReader::new();
match reader.read_chain_clusters(graph_file, ctx.detector.cluster_offsets()) {
Ok(buffer) => {
ctx.cluster_buffer = Some(buffer);
ctx.cluster_buffer_offsets = ctx.detector.cluster_offsets().to_vec();
}
Err(_) => {
}
}
}
if let Some(buffer) = &ctx.cluster_buffer {
if let Some(&cluster_index) = ctx.node_cluster_index.get(&node_id) {
let mut reader = SequentialClusterReader::new();
match reader.extract_neighbors(buffer, cluster_index, &ctx.cluster_buffer_offsets) {
Ok(neighbors) => {
ctx.cache.insert(cache_key, neighbors.clone());
return Ok(neighbors);
}
Err(_) => {
}
}
}
}
if let Some(cached) = ctx.cache.get(&cache_key) {
ctx.stats.record_hit();
return Ok(cached.clone());
}
ctx.stats.record_miss();
let neighbors = match direction {
Direction::Outgoing => AdjacencyHelpers::get_outgoing_neighbors(graph_file, node_id)?,
Direction::Incoming => AdjacencyHelpers::get_incoming_neighbors(graph_file, node_id)?,
};
ctx.cache.insert(cache_key, neighbors.clone());
Ok(neighbors)
}
pub fn traverse_with_detection(
graph_file: &mut GraphFile,
node_id: NativeNodeId,
direction: Direction,
cluster_offset: u64,
cluster_size: u32,
ctx: &mut TraversalContext,
) -> NativeResult<Vec<NativeNodeId>> {
use crate::backend::native::adjacency::TraversalPattern;
let degree = match direction {
Direction::Outgoing => AdjacencyHelpers::outgoing_degree(graph_file, node_id)?,
Direction::Incoming => AdjacencyHelpers::incoming_degree(graph_file, node_id)?,
};
let pattern = ctx
.detector
.observe_with_cluster(node_id, degree, cluster_offset, cluster_size);
let cluster_index = ctx.detector.cluster_offsets().len().saturating_sub(1);
ctx.node_cluster_index.insert(node_id, cluster_index);
if pattern == TraversalPattern::Branching {
ctx.clear_cluster_buffer();
}
get_neighbors_optimized(graph_file, node_id, direction, ctx)
}
#[cfg(test)]
mod tests {
use super::*;
use crate::backend::native::types::NativeBackendError;
#[test]
fn test_cache_stats_new() {
let stats = TraversalCacheStats::new();
assert_eq!(stats.hits, 0);
assert_eq!(stats.misses, 0);
assert_eq!(stats.hit_rate(), 0.0);
}
#[test]
fn test_cache_stats_default() {
let stats = TraversalCacheStats::default();
assert_eq!(stats.hits, 0);
assert_eq!(stats.misses, 0);
}
#[test]
fn test_cache_stats_hit_rate() {
let mut stats = TraversalCacheStats::new();
assert_eq!(stats.hit_rate(), 0.0);
stats.record_hit();
stats.record_hit();
stats.record_miss();
stats.record_miss();
assert!((stats.hit_rate() - 0.5).abs() < f64::EPSILON);
stats.hits = 10;
stats.misses = 0;
assert_eq!(stats.hit_rate(), 1.0);
stats.hits = 0;
stats.misses = 10;
assert_eq!(stats.hit_rate(), 0.0);
}
#[test]
fn test_cache_key_combines_node_and_direction() {
let key1: CacheKey = (42, Direction::Outgoing);
let key2: CacheKey = (42, Direction::Incoming);
assert_ne!(key1, key2, "Keys should differ by direction");
let key3: CacheKey = (42, Direction::Outgoing);
let key4: CacheKey = (99, Direction::Outgoing);
assert_ne!(key3, key4, "Keys should differ by node_id");
let key5: CacheKey = (42, Direction::Outgoing);
let key6: CacheKey = (42, Direction::Outgoing);
assert_eq!(key5, key6, "Keys should be identical");
}
#[test]
fn test_traversal_cache_type_alias() {
let cache: TraversalCache = TraversalCache::new();
assert_eq!(cache.len(), 0);
let mut cache: TraversalCache = AHashMap::new();
cache.insert((1, Direction::Outgoing), vec![2, 3, 4]);
cache.insert((1, Direction::Incoming), vec![0]);
assert_eq!(cache.len(), 2);
assert_eq!(cache.get(&(1, Direction::Outgoing)), Some(&vec![2, 3, 4]));
assert_eq!(cache.get(&(1, Direction::Incoming)), Some(&vec![0]));
}
#[test]
fn test_get_neighbors_optimized_has_sequential_cluster_reader_import() {
use crate::backend::native::adjacency::SequentialClusterReader;
let _ = SequentialClusterReader::new();
}
#[test]
fn test_traverse_with_detection_populates_mapping() {
use crate::backend::native::adjacency::{Direction, LinearDetector};
use crate::backend::native::types::NativeNodeId;
let ctx = TraversalContext::new();
assert!(ctx.node_cluster_index.is_empty());
assert_eq!(ctx.node_cluster_index.len(), 0);
}
#[test]
fn test_traverse_with_detection_clears_on_branching() {
use crate::backend::native::adjacency::{Direction, TraversalPattern};
let mut ctx = TraversalContext::new();
assert!(ctx.cluster_buffer.is_none());
}
}