use crate::{
metadata::tables::{
property::PropertyRaw,
types::{RowWritable, TableInfoRef},
},
utils::{write_le_at, write_le_at_dyn},
Error, Result,
};
impl RowWritable for PropertyRaw {
fn row_write(
&self,
data: &mut [u8],
offset: &mut usize,
_rid: u32,
sizes: &TableInfoRef,
) -> Result<()> {
let flags_u16 = u16::try_from(self.flags).map_err(|_| {
Error::LayoutFailed("Property flags value exceeds u16 range".to_string())
})?;
write_le_at(data, offset, flags_u16)?;
write_le_at_dyn(data, offset, self.name, sizes.is_large_str())?;
write_le_at_dyn(data, offset, self.signature, sizes.is_large_blob())?;
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::{
metadata::tables::types::{RowReadable, TableInfo, TableRow},
metadata::token::Token,
};
use std::sync::Arc;
#[test]
fn test_row_size() {
let table_info = Arc::new(TableInfo::new_test(&[], false, false, false));
let size = <PropertyRaw as TableRow>::row_size(&table_info);
assert_eq!(size, 6);
let table_info_large = Arc::new(TableInfo::new_test(&[], true, true, false));
let size_large = <PropertyRaw as TableRow>::row_size(&table_info_large);
assert_eq!(size_large, 10);
}
#[test]
fn test_round_trip_serialization() {
let original_row = PropertyRaw {
rid: 1,
token: Token::new(0x17000001),
offset: 0,
flags: 0x0101,
name: 0x0202,
signature: 0x0303,
};
let table_info = Arc::new(TableInfo::new_test(&[], false, false, false));
let row_size = <PropertyRaw as TableRow>::row_size(&table_info) as usize;
let mut buffer = vec![0u8; row_size];
let mut offset = 0;
original_row
.row_write(&mut buffer, &mut offset, 1, &table_info)
.expect("Serialization should succeed");
let mut read_offset = 0;
let deserialized_row = PropertyRaw::row_read(&buffer, &mut read_offset, 1, &table_info)
.expect("Deserialization should succeed");
assert_eq!(deserialized_row.rid, original_row.rid);
assert_eq!(deserialized_row.flags, original_row.flags);
assert_eq!(deserialized_row.name, original_row.name);
assert_eq!(deserialized_row.signature, original_row.signature);
assert_eq!(offset, row_size, "Offset should match expected row size");
}
#[test]
fn test_known_binary_format_small_heap() {
let data = vec![
0x01, 0x01, 0x02, 0x02, 0x03, 0x03, ];
let table_info = Arc::new(TableInfo::new_test(&[], false, false, false));
let mut read_offset = 0;
let reference_row = PropertyRaw::row_read(&data, &mut read_offset, 1, &table_info)
.expect("Reading reference data should succeed");
let mut buffer = vec![0u8; data.len()];
let mut write_offset = 0;
reference_row
.row_write(&mut buffer, &mut write_offset, 1, &table_info)
.expect("Serialization should succeed");
assert_eq!(
buffer, data,
"Serialized data should match original binary format"
);
}
#[test]
fn test_known_binary_format_large_heap() {
let data = vec![
0x01, 0x01, 0x02, 0x02, 0x02, 0x02, 0x03, 0x03, 0x03, 0x03, ];
let table_info = Arc::new(TableInfo::new_test(&[], true, true, false));
let mut read_offset = 0;
let reference_row = PropertyRaw::row_read(&data, &mut read_offset, 1, &table_info)
.expect("Reading reference data should succeed");
let mut buffer = vec![0u8; data.len()];
let mut write_offset = 0;
reference_row
.row_write(&mut buffer, &mut write_offset, 1, &table_info)
.expect("Serialization should succeed");
assert_eq!(
buffer, data,
"Serialized data should match original binary format"
);
}
#[test]
fn test_property_attributes() {
let test_cases = vec![
(0x0000, "None"),
(0x0200, "SpecialName"),
(0x0400, "RTSpecialName"),
(0x0600, "SpecialName|RTSpecialName"),
(0x1000, "HasDefault"),
(0x1200, "SpecialName|HasDefault"),
(0x1400, "RTSpecialName|HasDefault"),
(0x1600, "SpecialName|RTSpecialName|HasDefault"),
];
let table_info = Arc::new(TableInfo::new_test(&[], false, false, false));
for (flags, description) in test_cases {
let property_row = PropertyRaw {
rid: 1,
token: Token::new(0x17000001),
offset: 0,
flags,
name: 0x100,
signature: 0x200,
};
let row_size = <PropertyRaw as TableRow>::row_size(&table_info) as usize;
let mut buffer = vec![0u8; row_size];
let mut offset = 0;
property_row
.row_write(&mut buffer, &mut offset, 1, &table_info)
.unwrap_or_else(|_| panic!("Serialization should succeed for {description}"));
let mut read_offset = 0;
let deserialized_row = PropertyRaw::row_read(&buffer, &mut read_offset, 1, &table_info)
.unwrap_or_else(|_| panic!("Deserialization should succeed for {description}"));
assert_eq!(
deserialized_row.flags, property_row.flags,
"Flags should match for {description}"
);
}
}
#[test]
fn test_large_heap_serialization() {
let original_row = PropertyRaw {
rid: 1,
token: Token::new(0x17000001),
offset: 0,
flags: 0x1600, name: 0x123456,
signature: 0x789ABC,
};
let table_info = Arc::new(TableInfo::new_test(&[], true, true, false));
let row_size = <PropertyRaw as TableRow>::row_size(&table_info) as usize;
let mut buffer = vec![0u8; row_size];
let mut offset = 0;
original_row
.row_write(&mut buffer, &mut offset, 1, &table_info)
.expect("Large heap serialization should succeed");
let mut read_offset = 0;
let deserialized_row = PropertyRaw::row_read(&buffer, &mut read_offset, 1, &table_info)
.expect("Large heap deserialization should succeed");
assert_eq!(deserialized_row.flags, original_row.flags);
assert_eq!(deserialized_row.name, original_row.name);
assert_eq!(deserialized_row.signature, original_row.signature);
}
#[test]
fn test_edge_cases() {
let minimal_property = PropertyRaw {
rid: 1,
token: Token::new(0x17000001),
offset: 0,
flags: 0, name: 0, signature: 0, };
let table_info = Arc::new(TableInfo::new_test(&[], false, false, false));
let row_size = <PropertyRaw as TableRow>::row_size(&table_info) as usize;
let mut buffer = vec![0u8; row_size];
let mut offset = 0;
minimal_property
.row_write(&mut buffer, &mut offset, 1, &table_info)
.expect("Minimal property serialization should succeed");
let mut read_offset = 0;
let deserialized_row = PropertyRaw::row_read(&buffer, &mut read_offset, 1, &table_info)
.expect("Minimal property deserialization should succeed");
assert_eq!(deserialized_row.flags, minimal_property.flags);
assert_eq!(deserialized_row.name, minimal_property.name);
assert_eq!(deserialized_row.signature, minimal_property.signature);
}
#[test]
fn test_flags_range_validation() {
let large_flags_row = PropertyRaw {
rid: 1,
token: Token::new(0x17000001),
offset: 0,
flags: 0x12345678, name: 0x100,
signature: 0x200,
};
let table_info = Arc::new(TableInfo::new_test(&[], false, false, false));
let row_size = <PropertyRaw as TableRow>::row_size(&table_info) as usize;
let mut buffer = vec![0u8; row_size];
let mut offset = 0;
let result = large_flags_row.row_write(&mut buffer, &mut offset, 1, &table_info);
assert!(result.is_err());
assert!(result
.unwrap_err()
.to_string()
.contains("Property flags value exceeds u16 range"));
}
#[test]
fn test_different_heap_combinations() {
let property_row = PropertyRaw {
rid: 1,
token: Token::new(0x17000001),
offset: 0,
flags: 0x1200, name: 0x8000,
signature: 0x9000,
};
let test_cases = vec![
(false, false, 6), (true, false, 8), (false, true, 8), (true, true, 10), ];
for (large_str, large_blob, expected_size) in test_cases {
let table_info = Arc::new(TableInfo::new_test(
&[],
large_str,
large_blob,
false, ));
let size = <PropertyRaw as TableRow>::row_size(&table_info) as usize;
assert_eq!(
size, expected_size,
"Row size should be {expected_size} for large_str={large_str}, large_blob={large_blob}"
);
let mut buffer = vec![0u8; size];
let mut offset = 0;
property_row
.row_write(&mut buffer, &mut offset, 1, &table_info)
.expect("Serialization should succeed");
let mut read_offset = 0;
let deserialized_row = PropertyRaw::row_read(&buffer, &mut read_offset, 1, &table_info)
.expect("Deserialization should succeed");
assert_eq!(deserialized_row.flags, property_row.flags);
assert_eq!(deserialized_row.name, property_row.name);
assert_eq!(deserialized_row.signature, property_row.signature);
}
}
}