use crate::backend::native::{
constants::HEADER_SIZE,
persistent_header::{PERSISTENT_HEADER_SIZE, PersistentHeaderV2},
types::NativeResult,
};
pub fn encode_persistent_header(header: &PersistentHeaderV2) -> NativeResult<Vec<u8>> {
let mut buffer = Vec::with_capacity(PERSISTENT_HEADER_SIZE);
buffer.extend_from_slice(&header.magic);
buffer.extend_from_slice(&header.version.to_be_bytes());
buffer.extend_from_slice(&header.flags.to_be_bytes());
buffer.extend_from_slice(&header.node_count.to_be_bytes());
buffer.extend_from_slice(&header.edge_count.to_be_bytes());
buffer.extend_from_slice(&header.schema_version.to_be_bytes()[0..4]);
buffer.extend_from_slice(&header.reserved.to_be_bytes());
buffer.extend_from_slice(&header.node_data_offset.to_be_bytes());
buffer.extend_from_slice(&header.edge_data_offset.to_be_bytes());
buffer.extend_from_slice(&header.outgoing_cluster_offset.to_be_bytes());
buffer.extend_from_slice(&header.incoming_cluster_offset.to_be_bytes());
buffer.extend_from_slice(&header.free_space_offset.to_be_bytes());
assert_eq!(
buffer.len(),
PERSISTENT_HEADER_SIZE,
"Persistent header encoding size mismatch"
);
assert_eq!(
buffer.len(),
HEADER_SIZE as usize,
"Header must match constants::HEADER_SIZE"
);
Ok(buffer)
}
pub fn decode_persistent_header(bytes: &[u8]) -> NativeResult<PersistentHeaderV2> {
if bytes.len() < PERSISTENT_HEADER_SIZE {
return Err(
crate::backend::native::types::NativeBackendError::FileTooSmall {
size: bytes.len() as u64,
min_size: PERSISTENT_HEADER_SIZE as u64,
},
);
}
let mut offset = 0;
let magic_slice = get_slice_safe(bytes, offset, 8)?;
let mut magic = [0u8; 8];
magic.copy_from_slice(magic_slice);
offset += 8;
let version_slice = get_slice_safe(bytes, offset, 4)?;
let version = u32::from_be_bytes([
version_slice[0],
version_slice[1],
version_slice[2],
version_slice[3],
]);
offset += 4;
let flags_slice = get_slice_safe(bytes, offset, 4)?;
let flags = u32::from_be_bytes([
flags_slice[0],
flags_slice[1],
flags_slice[2],
flags_slice[3],
]);
offset += 4;
let node_count_slice = get_slice_safe(bytes, offset, 8)?;
let node_count = u64::from_be_bytes([
node_count_slice[0],
node_count_slice[1],
node_count_slice[2],
node_count_slice[3],
node_count_slice[4],
node_count_slice[5],
node_count_slice[6],
node_count_slice[7],
]);
offset += 8;
let edge_count_slice = get_slice_safe(bytes, offset, 8)?;
let edge_count = u64::from_be_bytes([
edge_count_slice[0],
edge_count_slice[1],
edge_count_slice[2],
edge_count_slice[3],
edge_count_slice[4],
edge_count_slice[5],
edge_count_slice[6],
edge_count_slice[7],
]);
offset += 8;
let (schema_version, reserved) = if version == 2 {
let sv_slice = get_slice_safe(bytes, offset, 8)?;
let sv_full = u64::from_be_bytes([
sv_slice[0],
sv_slice[1],
sv_slice[2],
sv_slice[3],
sv_slice[4],
sv_slice[5],
sv_slice[6],
sv_slice[7],
]);
offset += 8;
(sv_full as u32, (sv_full >> 32) as u32)
} else {
let sv_slice = get_slice_safe(bytes, offset, 4)?;
let sv = u32::from_be_bytes([sv_slice[0], sv_slice[1], sv_slice[2], sv_slice[3]]);
offset += 4;
let res_slice = get_slice_safe(bytes, offset, 4)?;
let res = u32::from_be_bytes([res_slice[0], res_slice[1], res_slice[2], res_slice[3]]);
offset += 4;
(sv, res)
};
let node_data_offset_slice = get_slice_safe(bytes, offset, 8)?;
let node_data_offset = u64::from_be_bytes([
node_data_offset_slice[0],
node_data_offset_slice[1],
node_data_offset_slice[2],
node_data_offset_slice[3],
node_data_offset_slice[4],
node_data_offset_slice[5],
node_data_offset_slice[6],
node_data_offset_slice[7],
]);
offset += 8;
let edge_data_offset_slice = get_slice_safe(bytes, offset, 8)?;
let edge_data_offset = u64::from_be_bytes([
edge_data_offset_slice[0],
edge_data_offset_slice[1],
edge_data_offset_slice[2],
edge_data_offset_slice[3],
edge_data_offset_slice[4],
edge_data_offset_slice[5],
edge_data_offset_slice[6],
edge_data_offset_slice[7],
]);
offset += 8;
let mut outgoing_cluster_offset = 0u64;
let mut incoming_cluster_offset = 0u64;
let mut free_space_offset = 0u64;
if bytes.len() >= HEADER_SIZE as usize {
if std::env::var("HEADER_VALIDATE_DEBUG").is_ok() {
println!(
"[HEADER_READ_DEBUG] Reading outgoing_cluster_offset at offset {} (should be 56)",
offset
);
let outgoing_bytes = get_slice_safe(bytes, offset, 8)?;
println!(
"[HEADER_READ_DEBUG] Raw outgoing bytes: {:02x?}",
outgoing_bytes
);
}
let outgoing_slice = get_slice_safe(bytes, offset, 8)?;
outgoing_cluster_offset = u64::from_be_bytes([
outgoing_slice[0],
outgoing_slice[1],
outgoing_slice[2],
outgoing_slice[3],
outgoing_slice[4],
outgoing_slice[5],
outgoing_slice[6],
outgoing_slice[7],
]);
offset += 8;
if std::env::var("HEADER_VALIDATE_DEBUG").is_ok() {
println!(
"[HEADER_READ_DEBUG] Reading incoming_cluster_offset at offset {} (should be 64)",
offset
);
let incoming_bytes = get_slice_safe(bytes, offset, 8)?;
println!(
"[HEADER_READ_DEBUG] Raw incoming bytes: {:02x?}",
incoming_bytes
);
}
let incoming_slice = get_slice_safe(bytes, offset, 8)?;
incoming_cluster_offset = u64::from_be_bytes([
incoming_slice[0],
incoming_slice[1],
incoming_slice[2],
incoming_slice[3],
incoming_slice[4],
incoming_slice[5],
incoming_slice[6],
incoming_slice[7],
]);
offset += 8;
let free_space_slice = get_slice_safe(bytes, offset, 8)?;
free_space_offset = u64::from_be_bytes([
free_space_slice[0],
free_space_slice[1],
free_space_slice[2],
free_space_slice[3],
free_space_slice[4],
free_space_slice[5],
free_space_slice[6],
free_space_slice[7],
]);
}
Ok(PersistentHeaderV2 {
magic,
version,
flags,
node_count,
edge_count,
schema_version,
reserved,
node_data_offset,
edge_data_offset,
outgoing_cluster_offset,
incoming_cluster_offset,
free_space_offset,
})
}
pub fn get_slice_safe(data: &[u8], start: usize, len: usize) -> NativeResult<&[u8]> {
if start.checked_add(len).map_or(true, |end| end > data.len()) {
return Err(
crate::backend::native::types::NativeBackendError::InvalidHeader {
field: "header_data".to_string(),
reason: format!(
"slice access out of bounds: start={}, len={}, data_len={}",
start,
len,
data.len()
),
},
);
}
Ok(&data[start..start + len])
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_persistent_header_encode_decode_roundtrip() {
use crate::backend::native::v2::V2_MAGIC;
let header = PersistentHeaderV2 {
magic: V2_MAGIC,
version: 3, flags: crate::backend::native::constants::DEFAULT_FEATURE_FLAGS,
node_count: 100,
edge_count: 500,
schema_version: 1,
reserved: 0,
node_data_offset: 1024,
edge_data_offset: 8192,
outgoing_cluster_offset: 16384,
incoming_cluster_offset: 24576,
free_space_offset: 32768,
};
let encoded = encode_persistent_header(&header).unwrap();
let decoded = decode_persistent_header(&encoded).unwrap();
assert_eq!(header.node_count, decoded.node_count);
assert_eq!(header.edge_count, decoded.edge_count);
assert_eq!(header.schema_version, decoded.schema_version);
assert_eq!(header.reserved, decoded.reserved);
assert_eq!(header.node_data_offset, decoded.node_data_offset);
assert_eq!(header.edge_data_offset, decoded.edge_data_offset);
assert_eq!(header.magic, decoded.magic);
assert_eq!(header.version, decoded.version);
assert_eq!(header.flags, decoded.flags);
}
#[test]
fn test_persistent_header_encode_size() {
let header = PersistentHeaderV2::new_v2();
let encoded = encode_persistent_header(&header).unwrap();
assert_eq!(encoded.len(), PERSISTENT_HEADER_SIZE);
assert_eq!(encoded.len(), HEADER_SIZE as usize);
}
#[test]
fn test_decode_header_too_small() {
let small_data = vec![0u8; 10];
let result = decode_persistent_header(&small_data);
assert!(result.is_err());
}
#[test]
fn test_get_slice_safe_valid() {
let data = vec![1, 2, 3, 4, 5];
let slice = get_slice_safe(&data, 1, 3).unwrap();
assert_eq!(slice, &[2, 3, 4]);
}
#[test]
fn test_get_slice_safe_out_of_bounds() {
let data = vec![1, 2, 3, 4, 5];
let result = get_slice_safe(&data, 3, 5);
assert!(result.is_err());
}
#[test]
fn test_get_slice_safe_overflow() {
let data = vec![1, 2, 3, 4, 5];
let result = get_slice_safe(&data, usize::MAX, 1);
assert!(result.is_err());
}
#[test]
fn test_header_constants_consistency() {
assert_eq!(PERSISTENT_HEADER_SIZE, HEADER_SIZE as usize);
}
#[test]
fn test_decode_v2_format_backward_compatibility() {
use crate::backend::native::v2::V2_MAGIC;
let mut v2_header_bytes = [0u8; 80];
v2_header_bytes[0..8].copy_from_slice(&V2_MAGIC);
v2_header_bytes[8..12].copy_from_slice(&2u32.to_be_bytes());
v2_header_bytes[12..16].copy_from_slice(
&crate::backend::native::constants::DEFAULT_FEATURE_FLAGS.to_be_bytes(),
);
v2_header_bytes[16..24].copy_from_slice(&100u64.to_be_bytes());
v2_header_bytes[24..32].copy_from_slice(&500u64.to_be_bytes());
v2_header_bytes[32..40].copy_from_slice(&1u64.to_be_bytes());
v2_header_bytes[40..48].copy_from_slice(&1024u64.to_be_bytes());
v2_header_bytes[48..56].copy_from_slice(&8192u64.to_be_bytes());
v2_header_bytes[56..64].copy_from_slice(&16384u64.to_be_bytes());
v2_header_bytes[64..72].copy_from_slice(&24576u64.to_be_bytes());
v2_header_bytes[72..80].copy_from_slice(&32768u64.to_be_bytes());
let decoded = decode_persistent_header(&v2_header_bytes).unwrap();
assert_eq!(decoded.version, 2);
assert_eq!(decoded.schema_version, 1); assert_eq!(decoded.reserved, 0); assert_eq!(decoded.node_count, 100);
assert_eq!(decoded.edge_count, 500);
}
}