use crate::{
metadata::tables::{
file::FileRaw,
types::{RowWritable, TableInfoRef},
},
utils::{write_le_at, write_le_at_dyn},
Result,
};
impl RowWritable for FileRaw {
fn row_write(
&self,
data: &mut [u8],
offset: &mut usize,
_rid: u32,
sizes: &TableInfoRef,
) -> Result<()> {
write_le_at(data, offset, self.flags)?;
write_le_at_dyn(data, offset, self.name, sizes.is_large_str())?;
write_le_at_dyn(data, offset, self.hash_value, sizes.is_large_blob())?;
Ok(())
}
}
#[cfg(test)]
mod tests {
use std::sync::Arc;
use crate::metadata::tables::{
file::FileRaw,
types::{RowReadable, RowWritable, TableInfo, TableRow},
};
use crate::metadata::token::Token;
#[test]
fn test_file_row_size() {
let sizes = Arc::new(TableInfo::new_test(&[], false, false, false));
let expected_size = 4 + 2 + 2; assert_eq!(<FileRaw as TableRow>::row_size(&sizes), expected_size);
let sizes_large = Arc::new(TableInfo::new_test(&[], true, true, false));
let expected_size_large = 4 + 4 + 4; assert_eq!(
<FileRaw as TableRow>::row_size(&sizes_large),
expected_size_large
);
}
#[test]
fn test_file_row_write_small() {
let sizes = Arc::new(TableInfo::new_test(&[], false, false, false));
let file = FileRaw {
rid: 1,
token: Token::new(0x26000001),
offset: 0,
flags: 0x01010101,
name: 0x0202,
hash_value: 0x0303,
};
let mut buffer = vec![0u8; <FileRaw as TableRow>::row_size(&sizes) as usize];
let mut offset = 0;
file.row_write(&mut buffer, &mut offset, 1, &sizes).unwrap();
let expected = vec![
0x01, 0x01, 0x01, 0x01, 0x02, 0x02, 0x03, 0x03, ];
assert_eq!(buffer, expected);
assert_eq!(offset, expected.len());
}
#[test]
fn test_file_row_write_large() {
let sizes = Arc::new(TableInfo::new_test(&[], true, true, false));
let file = FileRaw {
rid: 1,
token: Token::new(0x26000001),
offset: 0,
flags: 0x01010101,
name: 0x02020202,
hash_value: 0x03030303,
};
let mut buffer = vec![0u8; <FileRaw as TableRow>::row_size(&sizes) as usize];
let mut offset = 0;
file.row_write(&mut buffer, &mut offset, 1, &sizes).unwrap();
let expected = vec![
0x01, 0x01, 0x01, 0x01, 0x02, 0x02, 0x02, 0x02, 0x03, 0x03, 0x03, 0x03, ];
assert_eq!(buffer, expected);
assert_eq!(offset, expected.len());
}
#[test]
fn test_file_round_trip() {
let sizes = Arc::new(TableInfo::new_test(&[], false, false, false));
let original = FileRaw {
rid: 42,
token: Token::new(0x2600002A),
offset: 0,
flags: 0x12345678,
name: 256, hash_value: 512, };
let mut buffer = vec![0u8; <FileRaw as TableRow>::row_size(&sizes) as usize];
let mut offset = 0;
original
.row_write(&mut buffer, &mut offset, 42, &sizes)
.unwrap();
let mut read_offset = 0;
let read_back = FileRaw::row_read(&buffer, &mut read_offset, 42, &sizes).unwrap();
assert_eq!(original.rid, read_back.rid);
assert_eq!(original.token, read_back.token);
assert_eq!(original.flags, read_back.flags);
assert_eq!(original.name, read_back.name);
assert_eq!(original.hash_value, read_back.hash_value);
}
#[test]
fn test_file_different_attributes() {
let sizes = Arc::new(TableInfo::new_test(&[], false, false, false));
let test_cases = vec![
(0x00000000, 100, 200, "File contains metadata"),
(0x00000001, 101, 201, "File contains no metadata"),
(0x00000002, 102, 202, "Reserved flag"),
(0x12345678, 103, 203, "Custom flags combination"),
];
for (flags, name_index, hash_index, _description) in test_cases {
let file = FileRaw {
rid: 1,
token: Token::new(0x26000001),
offset: 0,
flags,
name: name_index,
hash_value: hash_index,
};
let mut buffer = vec![0u8; <FileRaw as TableRow>::row_size(&sizes) as usize];
let mut offset = 0;
file.row_write(&mut buffer, &mut offset, 1, &sizes).unwrap();
let mut read_offset = 0;
let read_back = FileRaw::row_read(&buffer, &mut read_offset, 1, &sizes).unwrap();
assert_eq!(file.flags, read_back.flags);
assert_eq!(file.name, read_back.name);
assert_eq!(file.hash_value, read_back.hash_value);
}
}
#[test]
fn test_file_edge_cases() {
let sizes = Arc::new(TableInfo::new_test(&[], false, false, false));
let zero_file = FileRaw {
rid: 1,
token: Token::new(0x26000001),
offset: 0,
flags: 0,
name: 0,
hash_value: 0,
};
let mut buffer = vec![0u8; <FileRaw as TableRow>::row_size(&sizes) as usize];
let mut offset = 0;
zero_file
.row_write(&mut buffer, &mut offset, 1, &sizes)
.unwrap();
let expected = vec![
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, ];
assert_eq!(buffer, expected);
let max_file = FileRaw {
rid: 1,
token: Token::new(0x26000001),
offset: 0,
flags: 0xFFFFFFFF,
name: 0xFFFF,
hash_value: 0xFFFF,
};
let mut buffer = vec![0u8; <FileRaw as TableRow>::row_size(&sizes) as usize];
let mut offset = 0;
max_file
.row_write(&mut buffer, &mut offset, 1, &sizes)
.unwrap();
assert_eq!(buffer.len(), 8); }
#[test]
fn test_file_heap_sizes() {
let configurations = vec![
(false, false, 2, 2), (true, false, 4, 2), (false, true, 2, 4), (true, true, 4, 4), ];
for (large_str, large_blob, expected_str_size, expected_blob_size) in configurations {
let sizes = Arc::new(TableInfo::new_test(&[], large_str, large_blob, false));
let file = FileRaw {
rid: 1,
token: Token::new(0x26000001),
offset: 0,
flags: 0x12345678,
name: 0x12345678,
hash_value: 0x12345678,
};
let expected_total_size = 4 + expected_str_size + expected_blob_size;
assert_eq!(
<FileRaw as TableRow>::row_size(&sizes) as usize,
expected_total_size
);
let mut buffer = vec![0u8; expected_total_size];
let mut offset = 0;
file.row_write(&mut buffer, &mut offset, 1, &sizes).unwrap();
assert_eq!(buffer.len(), expected_total_size);
assert_eq!(offset, expected_total_size);
}
}
#[test]
fn test_file_common_scenarios() {
let sizes = Arc::new(TableInfo::new_test(&[], false, false, false));
let file_scenarios = vec![
(0x00000000, 100, 200, "Module file with metadata"),
(0x00000001, 101, 201, "Resource file without metadata"),
(0x00000000, 102, 202, "Native library file"),
(0x00000001, 103, 203, "Documentation XML file"),
(0x00000000, 104, 204, "Configuration data file"),
(0x00000001, 105, 205, "Satellite assembly resource"),
];
for (flags, name_index, hash_index, _description) in file_scenarios {
let file = FileRaw {
rid: 1,
token: Token::new(0x26000001),
offset: 0,
flags,
name: name_index,
hash_value: hash_index,
};
let mut buffer = vec![0u8; <FileRaw as TableRow>::row_size(&sizes) as usize];
let mut offset = 0;
file.row_write(&mut buffer, &mut offset, 1, &sizes).unwrap();
let mut read_offset = 0;
let read_back = FileRaw::row_read(&buffer, &mut read_offset, 1, &sizes).unwrap();
assert_eq!(file.flags, read_back.flags);
assert_eq!(file.name, read_back.name);
assert_eq!(file.hash_value, read_back.hash_value);
}
}
#[test]
fn test_file_security_hashes() {
let sizes = Arc::new(TableInfo::new_test(&[], false, false, false));
let hash_scenarios = vec![
(1, "SHA-1 hash (20 bytes)"),
(100, "SHA-256 hash (32 bytes)"),
(200, "MD5 hash (16 bytes)"),
(300, "Custom hash algorithm"),
(400, "Multiple hash values"),
(500, "Empty hash (no verification)"),
(1000, "Large hash blob"),
(65535, "Maximum hash index for 2-byte"),
];
for (hash_index, _description) in hash_scenarios {
let file = FileRaw {
rid: 1,
token: Token::new(0x26000001),
offset: 0,
flags: 0x00000000, name: 50, hash_value: hash_index,
};
let mut buffer = vec![0u8; <FileRaw as TableRow>::row_size(&sizes) as usize];
let mut offset = 0;
file.row_write(&mut buffer, &mut offset, 1, &sizes).unwrap();
let written_hash = u16::from_le_bytes([buffer[6], buffer[7]]);
assert_eq!(written_hash as u32, hash_index);
}
}
#[test]
fn test_file_known_binary_format() {
let sizes = Arc::new(TableInfo::new_test(&[], false, false, false));
let file = FileRaw {
rid: 1,
token: Token::new(0x26000001),
offset: 0,
flags: 0x01010101,
name: 0x0202,
hash_value: 0x0303,
};
let mut buffer = vec![0u8; <FileRaw as TableRow>::row_size(&sizes) as usize];
let mut offset = 0;
file.row_write(&mut buffer, &mut offset, 1, &sizes).unwrap();
let expected = vec![
0x01, 0x01, 0x01, 0x01, 0x02, 0x02, 0x03, 0x03, ];
assert_eq!(buffer, expected);
}
}