use crate::{
metadata::tables::{
encmap::EncMapRaw,
types::{RowWritable, TableInfoRef},
},
utils::write_le_at,
Result,
};
impl RowWritable for EncMapRaw {
fn row_write(
&self,
data: &mut [u8],
offset: &mut usize,
_rid: u32,
_sizes: &TableInfoRef,
) -> Result<()> {
write_le_at(data, offset, self.original_token.value())?;
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::{
metadata::tables::types::{RowReadable, TableInfo, TableRow},
metadata::token::Token,
};
#[test]
fn test_round_trip_serialization() {
let original_row = EncMapRaw {
rid: 1,
token: Token::new(0x1F00_0001),
offset: 0,
original_token: Token::new(0x0602_0001), };
let table_info = std::sync::Arc::new(TableInfo::new_test(
&[(crate::metadata::tables::TableId::EncMap, 100)],
false,
false,
false,
));
let row_size = <EncMapRaw as TableRow>::row_size(&table_info) as usize;
let mut buffer = vec![0u8; row_size];
let mut offset = 0;
original_row
.row_write(&mut buffer, &mut offset, 1, &table_info)
.expect("Serialization should succeed");
let mut read_offset = 0;
let deserialized_row = EncMapRaw::row_read(&buffer, &mut read_offset, 1, &table_info)
.expect("Deserialization should succeed");
assert_eq!(
original_row.original_token.value(),
deserialized_row.original_token.value()
);
assert_eq!(offset, row_size, "Offset should match expected row size");
assert_eq!(
read_offset, row_size,
"Read offset should match expected row size"
);
}
#[test]
fn test_known_binary_format() {
let encmap_entry = EncMapRaw {
rid: 1,
token: Token::new(0x1F00_0001),
offset: 0,
original_token: Token::new(0x0602_0001), };
let table_info = std::sync::Arc::new(TableInfo::new_test(
&[(crate::metadata::tables::TableId::EncMap, 100)],
false,
false,
false,
));
let row_size = <EncMapRaw as TableRow>::row_size(&table_info) as usize;
let mut buffer = vec![0u8; row_size];
let mut offset = 0;
encmap_entry
.row_write(&mut buffer, &mut offset, 1, &table_info)
.expect("Serialization should succeed");
assert_eq!(row_size, 4, "Row size should be 4 bytes");
assert_eq!(buffer[0], 0x01);
assert_eq!(buffer[1], 0x00);
assert_eq!(buffer[2], 0x02);
assert_eq!(buffer[3], 0x06);
}
#[test]
fn test_various_token_types() {
let test_cases = vec![
("TypeDef", 0x0200_0001), ("MethodDef", 0x0600_0010), ("Field", 0x0400_0025), ("Property", 0x1700_0003), ("Event", 0x1400_0007), ("Assembly", 0x2000_0001), ("Module", 0x0000_0001), ];
for (token_type, token_value) in test_cases {
let encmap_entry = EncMapRaw {
rid: 1,
token: Token::new(0x1F00_0001),
offset: 0,
original_token: Token::new(token_value),
};
let table_info = std::sync::Arc::new(TableInfo::new_test(
&[(crate::metadata::tables::TableId::EncMap, 100)],
false,
false,
false,
));
let row_size = <EncMapRaw as TableRow>::row_size(&table_info) as usize;
let mut buffer = vec![0u8; row_size];
let mut offset = 0;
encmap_entry
.row_write(&mut buffer, &mut offset, 1, &table_info)
.unwrap_or_else(|_| panic!("Serialization should succeed for {token_type}"));
let mut read_offset = 0;
let deserialized_row = EncMapRaw::row_read(&buffer, &mut read_offset, 1, &table_info)
.unwrap_or_else(|_| panic!("Deserialization should succeed for {token_type}"));
assert_eq!(
encmap_entry.original_token.value(),
deserialized_row.original_token.value(),
"Token value mismatch for {token_type}"
);
}
}
#[test]
fn test_multiple_token_mappings() {
let entries = [
EncMapRaw {
rid: 1,
token: Token::new(0x1F00_0001),
offset: 0,
original_token: Token::new(0x0600_0001), },
EncMapRaw {
rid: 2,
token: Token::new(0x1F00_0002),
offset: 4,
original_token: Token::new(0x0200_0005), },
EncMapRaw {
rid: 3,
token: Token::new(0x1F00_0003),
offset: 8,
original_token: Token::new(0x0400_0010), },
];
let table_info = std::sync::Arc::new(TableInfo::new_test(
&[(crate::metadata::tables::TableId::EncMap, 100)],
false,
false,
false,
));
let row_size = <EncMapRaw as TableRow>::row_size(&table_info) as usize;
let mut buffer = vec![0u8; row_size * entries.len()];
let mut offset = 0;
for (i, entry) in entries.iter().enumerate() {
entry
.row_write(&mut buffer, &mut offset, (i + 1) as u32, &table_info)
.expect("Serialization should succeed");
}
let mut read_offset = 0;
for (i, original_entry) in entries.iter().enumerate() {
let deserialized_row =
EncMapRaw::row_read(&buffer, &mut read_offset, (i + 1) as u32, &table_info)
.expect("Deserialization should succeed");
assert_eq!(
original_entry.original_token.value(),
deserialized_row.original_token.value()
);
}
}
#[test]
fn test_edge_case_tokens() {
let test_cases = vec![
("Minimum token", 0x0000_0001), ("Maximum row", 0x00FF_FFFF), ("High table ID", 0xFF00_0001), ];
for (description, token_value) in test_cases {
let encmap_entry = EncMapRaw {
rid: 1,
token: Token::new(0x1F00_0001),
offset: 0,
original_token: Token::new(token_value),
};
let table_info = std::sync::Arc::new(TableInfo::new_test(
&[(crate::metadata::tables::TableId::EncMap, 100)],
false,
false,
false,
));
let row_size = <EncMapRaw as TableRow>::row_size(&table_info) as usize;
let mut buffer = vec![0u8; row_size];
let mut offset = 0;
encmap_entry
.row_write(&mut buffer, &mut offset, 1, &table_info)
.unwrap_or_else(|_| panic!("Serialization should succeed for {description}"));
let mut read_offset = 0;
let deserialized_row = EncMapRaw::row_read(&buffer, &mut read_offset, 1, &table_info)
.unwrap_or_else(|_| panic!("Deserialization should succeed for {description}"));
assert_eq!(
encmap_entry.original_token.value(),
deserialized_row.original_token.value(),
"Token value mismatch for {description}"
);
}
}
#[test]
fn test_sequential_mappings() {
let base_tokens = [
0x0600_0001, 0x0600_0002, 0x0600_0003, 0x0200_0001, 0x0400_0001, ];
for (i, &token_value) in base_tokens.iter().enumerate() {
let encmap_entry = EncMapRaw {
rid: (i + 1) as u32,
token: Token::new(0x1F00_0000 | ((i + 1) as u32)),
offset: i * 4,
original_token: Token::new(token_value),
};
let table_info = std::sync::Arc::new(TableInfo::new_test(
&[(crate::metadata::tables::TableId::EncMap, 100)],
false,
false,
false,
));
let row_size = <EncMapRaw as TableRow>::row_size(&table_info) as usize;
let mut buffer = vec![0u8; row_size];
let mut offset = 0;
encmap_entry
.row_write(&mut buffer, &mut offset, (i + 1) as u32, &table_info)
.expect("Serialization should succeed");
let mut read_offset = 0;
let deserialized_row =
EncMapRaw::row_read(&buffer, &mut read_offset, (i + 1) as u32, &table_info)
.expect("Deserialization should succeed");
assert_eq!(
encmap_entry.original_token.value(),
deserialized_row.original_token.value()
);
}
}
}