pub struct Fields<const N:usize>{
field_id_to_offset:[usize;N],
sorted_fields:[Field;N],
}
#[derive(Clone,Copy)]
struct Field{
size:usize,
offset:usize,
}
impl<const N:usize> Fields<N>{
pub const fn from_sizes(sizes:[usize;N])->Self{
let mut index=0;
let mut sides_encoded=[0u64;N];
assert!(N<u32::MAX as usize);
while index<N{
assert!(sizes[index]<u32::MAX as usize);
let size=sizes[index] as u64;
sides_encoded[index]=(size<<32)|(index as u64);
index+=1;
}
compile_time_sort::sort_u64_slice(&mut sides_encoded);
let mut field_id_to_offset=[0;N];
let mut sorted_fields=[Field{size:0,offset:0};N];
let mut i=0;
let mut offset=0;
while i<N{
let (index,size)=(sides_encoded[N-i-1] as u32 as usize, (sides_encoded[N-i-1]>>32) as u32 as usize);
field_id_to_offset[index]=offset;
sorted_fields[N-i-1]=Field{
size,
offset,
};
offset+=size;
i+=1;
}
Fields{
field_id_to_offset,
sorted_fields,
}
}
pub const fn size(&self)->usize{
let mut size=0;
let mut i=0;
while i<N{
size+=self.sorted_fields[i].size;
i+=1;
}
size
}
pub const fn offset_of(&self,index:usize)->usize{
self.field_id_to_offset[index]
}
pub const unsafe fn grow_fields(
&self,
ptr: *mut u8,
old_capacity: usize,
new_capacity: usize,
len: usize,
){
let mut i=0;
while i<N-1{
unsafe {
let src = ptr.add(old_capacity * self.sorted_fields[i].offset);
let dst = ptr.add(new_capacity * self.sorted_fields[i].offset);
let count = len * self.sorted_fields[i].size;
core::ptr::copy_nonoverlapping(src, dst, count);
}
i+=1;
}
}
pub const unsafe fn move_fields(
&self,
src: *const u8,
dst: *mut u8,
src_capacity: usize,
dst_capacity: usize,
dst_start_index: usize,
count: usize,
){
debug_assert!(dst_start_index+count<=dst_capacity);
let mut i=0;
while i<N{
unsafe {
let src = src.add(src_capacity * self.sorted_fields[i].offset);
let dst = dst.add(dst_capacity * self.sorted_fields[i].offset);
let dst = dst.add(dst_start_index * self.sorted_fields[i].size);
let count = count * self.sorted_fields[i].size;
core::ptr::copy_nonoverlapping(src, dst, count);
}
i+=1;
}
}
}