use crate::backend::native::NativeBackendError;
use crate::backend::native::graph_file::GraphFile;
use crate::backend::native::optimizations;
use crate::backend::native::types::*;
use super::{TraversalContext, get_neighbors_optimized};
use crate::backend::native::adjacency::AdjacencyHelpers;
pub fn bfs_generic_scalar(
graph_file: &mut GraphFile,
start: NativeNodeId,
depth: u32,
) -> Result<Vec<NativeNodeId>, NativeBackendError> {
if depth == 0 {
return Ok(vec![start]);
}
let mut visited = std::collections::HashSet::new();
let mut queue = std::collections::VecDeque::new();
let mut result = Vec::new();
let mut ctx = TraversalContext::new();
visited.insert(start);
queue.push_back((start, 0));
while let Some((current_node, current_depth)) = queue.pop_front() {
if current_depth >= depth {
continue;
}
let degree = AdjacencyHelpers::outgoing_degree(graph_file, current_node)?;
let (cluster_offset, cluster_size) = match graph_file.read_node_at(current_node) {
Ok(node_record) => (
node_record.outgoing_cluster_offset,
node_record.outgoing_cluster_size,
),
Err(_) => (0, 0), };
let _pattern =
ctx.detector
.observe_with_cluster(current_node, degree, cluster_offset, cluster_size);
let cluster_index = ctx.detector.cluster_offsets().len().saturating_sub(1);
ctx.node_cluster_index.insert(current_node, cluster_index);
if ctx.detector.is_linear_confirmed() && !ctx.buffer.contains(current_node) {
ctx.buffer
.prefetch_clusters_from(graph_file, current_node)?;
}
let neighbors = get_neighbors_optimized(
graph_file,
current_node,
crate::backend::native::adjacency::Direction::Outgoing,
&mut ctx,
)?;
for neighbor in neighbors {
if !visited.contains(&neighbor) {
visited.insert(neighbor);
result.push(neighbor);
queue.push_back((neighbor, current_depth + 1));
}
}
}
#[cfg(debug_assertions)]
{
let total_lookups = ctx.buffer_hits + ctx.buffer_misses + ctx.stats.hits + ctx.stats.misses;
if total_lookups > 0 {
log::debug!(
"BFS optimized stats: buffer_hits={}, buffer_misses={}, cache_hits={}, cache_misses={}, combined_hit_rate={:.2}%",
ctx.buffer_hits,
ctx.buffer_misses,
ctx.stats.hits,
ctx.stats.misses,
ctx.combined_hit_rate() * 100.0
);
}
}
Ok(result)
}
pub fn bfs_pointer_table_optimized(
graph_file: &mut GraphFile,
start: NativeNodeId,
depth: u32,
) -> Result<Vec<NativeNodeId>, NativeBackendError> {
if depth == 0 {
return Ok(vec![start]);
}
let mut visited = std::collections::HashSet::new();
let mut queue = std::collections::VecDeque::new();
let mut result = Vec::new();
let mut ctx = TraversalContext::new();
visited.insert(start);
queue.push_back((start, 0));
while let Some((current_node, current_depth)) = queue.pop_front() {
if current_depth >= depth {
continue;
}
let degree = AdjacencyHelpers::outgoing_degree(graph_file, current_node)?;
let (cluster_offset, cluster_size) = match graph_file.read_node_at(current_node) {
Ok(node_record) => (
node_record.outgoing_cluster_offset,
node_record.outgoing_cluster_size,
),
Err(_) => (0, 0), };
let _pattern =
ctx.detector
.observe_with_cluster(current_node, degree, cluster_offset, cluster_size);
let cluster_index = ctx.detector.cluster_offsets().len().saturating_sub(1);
ctx.node_cluster_index.insert(current_node, cluster_index);
if ctx.detector.is_linear_confirmed() && !ctx.buffer.contains(current_node) {
ctx.buffer.prefetch_from(graph_file, current_node)?;
}
let neighbors =
if let Some(offsets) = optimizations::get_outgoing_edge_offsets(current_node) {
let mut neighbor_ids = Vec::with_capacity(offsets.len());
for &offset in &offsets {
if let Ok(edge_record) = graph_file.read_edge_at_offset(offset) {
neighbor_ids.push(edge_record.to_id);
}
}
neighbor_ids
} else {
get_neighbors_optimized(
graph_file,
current_node,
crate::backend::native::adjacency::Direction::Outgoing,
&mut ctx,
)?
};
for neighbor in neighbors {
if !visited.contains(&neighbor) {
visited.insert(neighbor);
result.push(neighbor);
queue.push_back((neighbor, current_depth + 1));
}
}
}
#[cfg(debug_assertions)]
{
let total_lookups = ctx.buffer_hits + ctx.buffer_misses + ctx.stats.hits + ctx.stats.misses;
if total_lookups > 0 {
log::debug!(
"BFS pointer table optimized stats: buffer_hits={}, buffer_misses={}, cache_hits={}, cache_misses={}, combined_hit_rate={:.2}%",
ctx.buffer_hits,
ctx.buffer_misses,
ctx.stats.hits,
ctx.stats.misses,
ctx.combined_hit_rate() * 100.0
);
}
}
Ok(result)
}
pub fn bfs_fully_optimized(
graph_file: &mut GraphFile,
start: NativeNodeId,
depth: u32,
) -> Result<Vec<NativeNodeId>, NativeBackendError> {
if depth == 0 {
return Ok(vec![start]);
}
let mut visited = std::collections::HashSet::new();
let mut queue = std::collections::VecDeque::new();
let mut result = Vec::new();
let mut ctx = TraversalContext::new();
visited.insert(start);
queue.push_back((start, 0));
while let Some((current_node, current_depth)) = queue.pop_front() {
if current_depth >= depth {
continue;
}
let degree = AdjacencyHelpers::outgoing_degree(graph_file, current_node)?;
let (cluster_offset, cluster_size) = match graph_file.read_node_at(current_node) {
Ok(node_record) => (
node_record.outgoing_cluster_offset,
node_record.outgoing_cluster_size,
),
Err(_) => (0, 0), };
let _pattern =
ctx.detector
.observe_with_cluster(current_node, degree, cluster_offset, cluster_size);
let cluster_index = ctx.detector.cluster_offsets().len().saturating_sub(1);
ctx.node_cluster_index.insert(current_node, cluster_index);
if ctx.detector.is_linear_confirmed() && !ctx.buffer.contains(current_node) {
ctx.buffer.prefetch_from(graph_file, current_node)?;
}
let neighbors =
if let Some(offsets) = optimizations::get_outgoing_edge_offsets(current_node) {
let mut neighbor_ids = Vec::with_capacity(offsets.len());
if let Some(_hot_metadata) = optimizations::get_node_hot(current_node) {
for &offset in &offsets {
if let Ok(edge_record) = graph_file.read_edge_at_offset(offset) {
neighbor_ids.push(edge_record.to_id);
}
}
} else {
for &offset in &offsets {
if let Ok(edge_record) = graph_file.read_edge_at_offset(offset) {
neighbor_ids.push(edge_record.to_id);
}
}
if let Ok(node_record) = graph_file.read_node_at(current_node) {
let hot_metadata = optimizations::extract_node_hot(&node_record);
optimizations::put_node_hot(current_node, hot_metadata);
}
}
neighbor_ids
} else {
get_neighbors_optimized(
graph_file,
current_node,
crate::backend::native::adjacency::Direction::Outgoing,
&mut ctx,
)?
};
for neighbor in neighbors {
if !visited.contains(&neighbor) {
visited.insert(neighbor);
result.push(neighbor);
queue.push_back((neighbor, current_depth + 1));
}
}
}
#[cfg(debug_assertions)]
{
let total_lookups = ctx.buffer_hits + ctx.buffer_misses + ctx.stats.hits + ctx.stats.misses;
if total_lookups > 0 {
log::debug!(
"BFS fully optimized stats: buffer_hits={}, buffer_misses={}, cache_hits={}, cache_misses={}, combined_hit_rate={:.2}%",
ctx.buffer_hits,
ctx.buffer_misses,
ctx.stats.hits,
ctx.stats.misses,
ctx.combined_hit_rate() * 100.0
);
}
}
Ok(result)
}