1use super::*;
2
3pub struct File {
4 bytes: Vec<u8>,
5 strings: usize,
6 blobs: usize,
7 tables: [Table; 17],
8}
9
10impl File {
11 pub fn read<P: AsRef<std::path::Path>>(path: P) -> Option<Self> {
12 std::fs::read(path).ok().and_then(Self::new)
13 }
14
15 pub fn new(bytes: Vec<u8>) -> Option<Self> {
16 let mut result = File {
17 bytes,
18 strings: 0,
19 blobs: 0,
20 tables: Default::default(),
21 };
22
23 let dos = result.bytes.view_as::<IMAGE_DOS_HEADER>(0)?;
24
25 if dos.e_magic != IMAGE_DOS_SIGNATURE
26 || result.bytes.copy_as::<u32>(dos.e_lfanew as usize)? != IMAGE_NT_SIGNATURE
27 {
28 return None;
29 }
30
31 let file_offset = dos.e_lfanew as usize + std::mem::size_of::<u32>();
32 let file = result.bytes.view_as::<IMAGE_FILE_HEADER>(file_offset)?;
33
34 let optional_offset = file_offset + std::mem::size_of::<IMAGE_FILE_HEADER>();
35
36 let (com_virtual_address, sections) = match result.bytes.copy_as::<u16>(optional_offset)? {
37 IMAGE_NT_OPTIONAL_HDR32_MAGIC => {
38 let optional = result
39 .bytes
40 .view_as::<IMAGE_OPTIONAL_HEADER32>(optional_offset)?;
41 (
42 optional.DataDirectory[IMAGE_DIRECTORY_ENTRY_COM_DESCRIPTOR as usize]
43 .VirtualAddress,
44 result.bytes.view_as_slice_of::<IMAGE_SECTION_HEADER>(
45 optional_offset + std::mem::size_of::<IMAGE_OPTIONAL_HEADER32>(),
46 file.NumberOfSections as usize,
47 )?,
48 )
49 }
50 IMAGE_NT_OPTIONAL_HDR64_MAGIC => {
51 let optional = result
52 .bytes
53 .view_as::<IMAGE_OPTIONAL_HEADER64>(optional_offset)?;
54 (
55 optional.DataDirectory[IMAGE_DIRECTORY_ENTRY_COM_DESCRIPTOR as usize]
56 .VirtualAddress,
57 result.bytes.view_as_slice_of::<IMAGE_SECTION_HEADER>(
58 optional_offset + std::mem::size_of::<IMAGE_OPTIONAL_HEADER64>(),
59 file.NumberOfSections as usize,
60 )?,
61 )
62 }
63 _ => return None,
64 };
65
66 let clr = result.bytes.view_as::<IMAGE_COR20_HEADER>(offset_from_rva(
67 section_from_rva(sections, com_virtual_address)?,
68 com_virtual_address,
69 ))?;
70
71 if clr.cb != std::mem::size_of::<IMAGE_COR20_HEADER>() as u32 {
72 return None;
73 }
74
75 let metadata_offset = offset_from_rva(
76 section_from_rva(sections, clr.MetaData.VirtualAddress)?,
77 clr.MetaData.VirtualAddress,
78 );
79 let metadata = result.bytes.view_as::<METADATA_HEADER>(metadata_offset)?;
80
81 if metadata.signature != METADATA_SIGNATURE {
82 return None;
83 }
84
85 let mut view = metadata_offset + metadata.length as usize + 20;
87 let mut tables_data: (usize, usize) = (0, 0);
88
89 for _ in 0..result
90 .bytes
91 .copy_as::<u16>(metadata_offset + metadata.length as usize + 18)?
92 {
93 let stream_offset = result.bytes.copy_as::<u32>(view)? as usize;
94 let stream_len = result.bytes.copy_as::<u32>(view + 4)? as usize;
95 let stream_name = result.bytes.view_as_str(view + 8)?;
96 match stream_name {
97 b"#Strings" => result.strings = metadata_offset + stream_offset,
98 b"#Blob" => result.blobs = metadata_offset + stream_offset,
99 b"#~" => tables_data = (metadata_offset + stream_offset, stream_len),
100 b"#GUID" => {}
101 b"#US" => {}
102 rest => panic!("{rest:?}"),
103 }
104 let mut padding = 4 - stream_name.len() % 4;
105 if padding == 0 {
106 padding = 4;
107 }
108 view += 8 + stream_name.len() + padding;
109 }
110
111 let heap_sizes = result.bytes.copy_as::<u8>(tables_data.0 + 6)?;
112 let string_index_size = if (heap_sizes & 1) == 1 { 4 } else { 2 };
113 let guid_index_size = if ((heap_sizes >> 1) & 1) == 1 { 4 } else { 2 };
114 let blob_index_size = if ((heap_sizes >> 2) & 1) == 1 { 4 } else { 2 };
115 let valid_bits = result.bytes.copy_as::<u64>(tables_data.0 + 8)?;
116 view = tables_data.0 + 24;
117
118 let unused_empty = Table::default();
120 let mut unused_assembly = Table::default();
121 let mut unused_assembly_os = Table::default();
122 let mut unused_assembly_processor = Table::default();
123 let mut unused_assembly_ref_os = Table::default();
124 let mut unused_assembly_ref = Table::default();
125 let mut unused_assembly_ref_processor = Table::default();
126 let mut unused_decl_security = Table::default();
127 let mut unused_event = Table::default();
128 let mut unused_event_map = Table::default();
129 let mut unused_exported_type = Table::default();
130 let mut unused_field_layout = Table::default();
131 let mut unused_field_marshal = Table::default();
132 let mut unused_field_rva = Table::default();
133 let mut unused_file = Table::default();
134 let mut unused_generic_param_constraint = Table::default();
135 let mut unused_manifest_resource = Table::default();
136 let mut unused_method_impl = Table::default();
137 let mut unused_method_semantics = Table::default();
138 let mut unused_method_spec = Table::default();
139 let mut unused_property = Table::default();
140 let mut unused_property_map = Table::default();
141 let mut unused_standalone_sig = Table::default();
142 let mut unused_module = Table::default();
143
144 for i in 0..64 {
145 if ((valid_bits >> i) & 1) == 0 {
146 continue;
147 }
148
149 let len = result.bytes.copy_as::<u32>(view)? as usize;
150 view += 4;
151
152 match i {
153 0x00 => unused_module.len = len,
154 0x01 => result.tables[TypeRef::TABLE].len = len,
155 0x02 => result.tables[TypeDef::TABLE].len = len,
156 0x04 => result.tables[Field::TABLE].len = len,
157 0x06 => result.tables[MethodDef::TABLE].len = len,
158 0x08 => result.tables[MethodParam::TABLE].len = len,
159 0x09 => result.tables[InterfaceImpl::TABLE].len = len,
160 0x0a => result.tables[MemberRef::TABLE].len = len,
161 0x0b => result.tables[Constant::TABLE].len = len,
162 0x0c => result.tables[Attribute::TABLE].len = len,
163 0x0d => unused_field_marshal.len = len,
164 0x0e => unused_decl_security.len = len,
165 0x0f => result.tables[ClassLayout::TABLE].len = len,
166 0x10 => unused_field_layout.len = len,
167 0x11 => unused_standalone_sig.len = len,
168 0x12 => unused_event_map.len = len,
169 0x14 => unused_event.len = len,
170 0x15 => unused_property_map.len = len,
171 0x17 => unused_property.len = len,
172 0x18 => unused_method_semantics.len = len,
173 0x19 => unused_method_impl.len = len,
174 0x1a => result.tables[ModuleRef::TABLE].len = len,
175 0x1b => result.tables[TypeSpec::TABLE].len = len,
176 0x1c => result.tables[ImplMap::TABLE].len = len,
177 0x1d => unused_field_rva.len = len,
178 0x20 => unused_assembly.len = len,
179 0x21 => unused_assembly_processor.len = len,
180 0x22 => unused_assembly_os.len = len,
181 0x23 => unused_assembly_ref.len = len,
182 0x24 => unused_assembly_ref_processor.len = len,
183 0x25 => unused_assembly_ref_os.len = len,
184 0x26 => unused_file.len = len,
185 0x27 => unused_exported_type.len = len,
186 0x28 => unused_manifest_resource.len = len,
187 0x29 => result.tables[NestedClass::TABLE].len = len,
188 0x2a => result.tables[GenericParam::TABLE].len = len,
189 0x2b => unused_method_spec.len = len,
190 0x2c => unused_generic_param_constraint.len = len,
191 _ => unreachable!(),
192 };
193 }
194
195 let tables = &result.tables;
196 let type_def_or_ref = coded_index_size(&[
197 tables[TypeDef::TABLE].len,
198 tables[TypeRef::TABLE].len,
199 tables[TypeSpec::TABLE].len,
200 ]);
201 let has_constant = coded_index_size(&[
202 tables[Field::TABLE].len,
203 tables[MethodParam::TABLE].len,
204 unused_property.len,
205 ]);
206 let has_field_marshal =
207 coded_index_size(&[tables[Field::TABLE].len, tables[MethodParam::TABLE].len]);
208 let has_decl_security = coded_index_size(&[
209 tables[TypeDef::TABLE].len,
210 tables[MethodDef::TABLE].len,
211 unused_assembly.len,
212 ]);
213 let member_ref_parent = coded_index_size(&[
214 tables[TypeDef::TABLE].len,
215 tables[TypeRef::TABLE].len,
216 tables[ModuleRef::TABLE].len,
217 tables[MethodDef::TABLE].len,
218 tables[TypeSpec::TABLE].len,
219 ]);
220 let has_semantics = coded_index_size(&[unused_event.len, unused_property.len]);
221 let method_def_or_ref =
222 coded_index_size(&[tables[MethodDef::TABLE].len, tables[MemberRef::TABLE].len]);
223 let member_forwarded =
224 coded_index_size(&[tables[Field::TABLE].len, tables[MethodDef::TABLE].len]);
225 let implementation = coded_index_size(&[
226 unused_file.len,
227 unused_assembly_ref.len,
228 unused_exported_type.len,
229 ]);
230 let custom_attribute_type = coded_index_size(&[
231 tables[MethodDef::TABLE].len,
232 tables[MemberRef::TABLE].len,
233 unused_empty.len,
234 unused_empty.len,
235 unused_empty.len,
236 ]);
237 let resolution_scope = coded_index_size(&[
238 unused_module.len,
239 tables[ModuleRef::TABLE].len,
240 unused_assembly_ref.len,
241 tables[TypeRef::TABLE].len,
242 ]);
243 let type_or_method_def =
244 coded_index_size(&[tables[TypeDef::TABLE].len, tables[MethodDef::TABLE].len]);
245
246 let has_custom_attribute = coded_index_size(&[
247 tables[MethodDef::TABLE].len,
248 tables[Field::TABLE].len,
249 tables[TypeRef::TABLE].len,
250 tables[TypeDef::TABLE].len,
251 tables[MethodParam::TABLE].len,
252 tables[InterfaceImpl::TABLE].len,
253 tables[MemberRef::TABLE].len,
254 unused_module.len,
255 unused_property.len,
256 unused_event.len,
257 unused_standalone_sig.len,
258 tables[ModuleRef::TABLE].len,
259 tables[TypeSpec::TABLE].len,
260 unused_assembly.len,
261 unused_assembly_ref.len,
262 unused_file.len,
263 unused_exported_type.len,
264 unused_manifest_resource.len,
265 tables[GenericParam::TABLE].len,
266 unused_generic_param_constraint.len,
267 unused_method_spec.len,
268 ]);
269
270 unused_assembly.set_columns(
271 4,
272 8,
273 4,
274 blob_index_size,
275 string_index_size,
276 string_index_size,
277 );
278 unused_assembly_os.set_columns(4, 4, 4, 0, 0, 0);
279 unused_assembly_processor.set_columns(4, 0, 0, 0, 0, 0);
280 unused_assembly_ref.set_columns(
281 8,
282 4,
283 blob_index_size,
284 string_index_size,
285 string_index_size,
286 blob_index_size,
287 );
288 unused_assembly_ref_os.set_columns(4, 4, 4, unused_assembly_ref.index_width(), 0, 0);
289 unused_assembly_ref_processor.set_columns(4, unused_assembly_ref.index_width(), 0, 0, 0, 0);
290 result.tables[ClassLayout::TABLE].set_columns(
291 2,
292 4,
293 result.tables[TypeDef::TABLE].index_width(),
294 0,
295 0,
296 0,
297 );
298 result.tables[Constant::TABLE].set_columns(2, has_constant, blob_index_size, 0, 0, 0);
299 result.tables[Attribute::TABLE].set_columns(
300 has_custom_attribute,
301 custom_attribute_type,
302 blob_index_size,
303 0,
304 0,
305 0,
306 );
307 unused_decl_security.set_columns(2, has_decl_security, blob_index_size, 0, 0, 0);
308 unused_event_map.set_columns(
309 result.tables[TypeDef::TABLE].index_width(),
310 unused_event.index_width(),
311 0,
312 0,
313 0,
314 0,
315 );
316 unused_event.set_columns(2, string_index_size, type_def_or_ref, 0, 0, 0);
317 unused_exported_type.set_columns(
318 4,
319 4,
320 string_index_size,
321 string_index_size,
322 implementation,
323 0,
324 );
325 result.tables[Field::TABLE].set_columns(2, string_index_size, blob_index_size, 0, 0, 0);
326 unused_field_layout.set_columns(4, result.tables[Field::TABLE].index_width(), 0, 0, 0, 0);
327 unused_field_marshal.set_columns(has_field_marshal, blob_index_size, 0, 0, 0, 0);
328 unused_field_rva.set_columns(4, result.tables[Field::TABLE].index_width(), 0, 0, 0, 0);
329 unused_file.set_columns(4, string_index_size, blob_index_size, 0, 0, 0);
330 result.tables[GenericParam::TABLE].set_columns(
331 2,
332 2,
333 type_or_method_def,
334 string_index_size,
335 0,
336 0,
337 );
338 unused_generic_param_constraint.set_columns(
339 result.tables[GenericParam::TABLE].index_width(),
340 type_def_or_ref,
341 0,
342 0,
343 0,
344 0,
345 );
346 result.tables[ImplMap::TABLE].set_columns(
347 2,
348 member_forwarded,
349 string_index_size,
350 result.tables[ModuleRef::TABLE].index_width(),
351 0,
352 0,
353 );
354 result.tables[InterfaceImpl::TABLE].set_columns(
355 result.tables[TypeDef::TABLE].index_width(),
356 type_def_or_ref,
357 0,
358 0,
359 0,
360 0,
361 );
362 unused_manifest_resource.set_columns(4, 4, string_index_size, implementation, 0, 0);
363 result.tables[MemberRef::TABLE].set_columns(
364 member_ref_parent,
365 string_index_size,
366 blob_index_size,
367 0,
368 0,
369 0,
370 );
371 result.tables[MethodDef::TABLE].set_columns(
372 4,
373 2,
374 2,
375 string_index_size,
376 blob_index_size,
377 result.tables[MethodParam::TABLE].index_width(),
378 );
379 unused_method_impl.set_columns(
380 result.tables[TypeDef::TABLE].index_width(),
381 method_def_or_ref,
382 method_def_or_ref,
383 0,
384 0,
385 0,
386 );
387 unused_method_semantics.set_columns(
388 2,
389 result.tables[MethodDef::TABLE].index_width(),
390 has_semantics,
391 0,
392 0,
393 0,
394 );
395 unused_method_spec.set_columns(method_def_or_ref, blob_index_size, 0, 0, 0, 0);
396 unused_module.set_columns(
397 2,
398 string_index_size,
399 guid_index_size,
400 guid_index_size,
401 guid_index_size,
402 0,
403 );
404 result.tables[ModuleRef::TABLE].set_columns(string_index_size, 0, 0, 0, 0, 0);
405 result.tables[NestedClass::TABLE].set_columns(
406 result.tables[TypeDef::TABLE].index_width(),
407 result.tables[TypeDef::TABLE].index_width(),
408 0,
409 0,
410 0,
411 0,
412 );
413 result.tables[MethodParam::TABLE].set_columns(2, 2, string_index_size, 0, 0, 0);
414 unused_property.set_columns(2, string_index_size, blob_index_size, 0, 0, 0);
415 unused_property_map.set_columns(
416 result.tables[TypeDef::TABLE].index_width(),
417 unused_property.index_width(),
418 0,
419 0,
420 0,
421 0,
422 );
423 unused_standalone_sig.set_columns(blob_index_size, 0, 0, 0, 0, 0);
424 result.tables[TypeDef::TABLE].set_columns(
425 4,
426 string_index_size,
427 string_index_size,
428 type_def_or_ref,
429 result.tables[Field::TABLE].index_width(),
430 result.tables[MethodDef::TABLE].index_width(),
431 );
432 result.tables[TypeRef::TABLE].set_columns(
433 resolution_scope,
434 string_index_size,
435 string_index_size,
436 0,
437 0,
438 0,
439 );
440 result.tables[TypeSpec::TABLE].set_columns(blob_index_size, 0, 0, 0, 0, 0);
441
442 unused_module.set_data(&mut view);
443 result.tables[TypeRef::TABLE].set_data(&mut view);
444 result.tables[TypeDef::TABLE].set_data(&mut view);
445 result.tables[Field::TABLE].set_data(&mut view);
446 result.tables[MethodDef::TABLE].set_data(&mut view);
447 result.tables[MethodParam::TABLE].set_data(&mut view);
448 result.tables[InterfaceImpl::TABLE].set_data(&mut view);
449 result.tables[MemberRef::TABLE].set_data(&mut view);
450 result.tables[Constant::TABLE].set_data(&mut view);
451 result.tables[Attribute::TABLE].set_data(&mut view);
452 unused_field_marshal.set_data(&mut view);
453 unused_decl_security.set_data(&mut view);
454 result.tables[ClassLayout::TABLE].set_data(&mut view);
455 unused_field_layout.set_data(&mut view);
456 unused_standalone_sig.set_data(&mut view);
457 unused_event_map.set_data(&mut view);
458 unused_event.set_data(&mut view);
459 unused_property_map.set_data(&mut view);
460 unused_property.set_data(&mut view);
461 unused_method_semantics.set_data(&mut view);
462 unused_method_impl.set_data(&mut view);
463 result.tables[ModuleRef::TABLE].set_data(&mut view);
464 result.tables[TypeSpec::TABLE].set_data(&mut view);
465 result.tables[ImplMap::TABLE].set_data(&mut view);
466 unused_field_rva.set_data(&mut view);
467 unused_assembly.set_data(&mut view);
468 unused_assembly_processor.set_data(&mut view);
469 unused_assembly_os.set_data(&mut view);
470 unused_assembly_ref.set_data(&mut view);
471 unused_assembly_ref_processor.set_data(&mut view);
472 unused_assembly_ref_os.set_data(&mut view);
473 unused_file.set_data(&mut view);
474 unused_exported_type.set_data(&mut view);
475 unused_manifest_resource.set_data(&mut view);
476 result.tables[NestedClass::TABLE].set_data(&mut view);
477 result.tables[GenericParam::TABLE].set_data(&mut view);
478
479 Some(result)
480 }
481
482 pub(crate) fn usize(&self, row: usize, table: usize, column: usize) -> usize {
483 let table = &self.tables[table];
484 let column = &table.columns[column];
485 let offset = table.offset + row * table.width + column.offset;
486 match column.width {
487 1 => self.bytes.copy_as::<u8>(offset).map_or(0, |v| v as usize),
488 2 => self.bytes.copy_as::<u16>(offset).map_or(0, |v| v as usize),
489 4 => self.bytes.copy_as::<u32>(offset).map_or(0, |v| v as usize),
490 _ => self.bytes.copy_as::<u64>(offset).map_or(0, |v| v as usize),
491 }
492 }
493
494 pub(crate) fn str(&self, row: usize, table: usize, column: usize) -> &str {
495 let offset = self.strings + self.usize(row, table, column);
496 let bytes = &self.bytes[offset..];
497 let nul_pos = bytes
498 .iter()
499 .position(|&c| c == 0)
500 .expect("expected null-terminated C-string");
501 std::str::from_utf8(&bytes[..nul_pos]).expect("expected valid utf-8 C-string")
502 }
503
504 pub(crate) fn blob(&self, row: usize, table: usize, column: usize) -> &[u8] {
505 let offset = self.blobs + self.usize(row, table, column);
506 let initial_byte = self.bytes[offset];
507
508 let (blob_size, blob_size_bytes) = match initial_byte >> 5 {
509 0..=3 => (initial_byte & 0x7f, 1),
510 4..=5 => (initial_byte & 0x3f, 2),
511 6 => (initial_byte & 0x1f, 4),
512 rest => panic!("{rest:?}"),
513 };
514
515 let mut blob_size = blob_size as usize;
516
517 for byte in &self.bytes[offset + 1..offset + blob_size_bytes] {
518 blob_size = blob_size.checked_shl(8).unwrap_or(0) + (*byte as usize);
519 }
520
521 let offset = offset + blob_size_bytes;
522 &self.bytes[offset..offset + blob_size]
523 }
524
525 pub(crate) fn list(
526 &self,
527 row: usize,
528 table: usize,
529 column: usize,
530 other_table: usize,
531 ) -> std::ops::Range<usize> {
532 let first = self.usize(row, table, column) - 1;
533 let next = row + 1;
534 let last = if next < self.tables[table].len {
535 self.usize(next, table, column) - 1
536 } else {
537 self.tables[other_table].len
538 };
539 first..last
540 }
541
542 pub(crate) fn equal_range(
543 &self,
544 table: usize,
545 column: usize,
546 value: usize,
547 ) -> std::ops::Range<usize> {
548 let mut first = 0;
549 let mut last = self.tables[table].len;
550 let mut count = last;
551
552 loop {
553 if count == 0 {
554 last = first;
555 break;
556 }
557
558 let count2 = count / 2;
559 let middle = first + count2;
560 let middle_value = self.usize(middle, table, column);
561
562 match middle_value.cmp(&value) {
563 Ordering::Less => {
564 first = middle + 1;
565 count -= count2 + 1;
566 }
567 Ordering::Greater => count = count2,
568 Ordering::Equal => {
569 let first2 = self.lower_bound(table, first, middle, column, value);
570 first += count;
571 last = self.upper_bound(table, middle + 1, first, column, value);
572 first = first2;
573 break;
574 }
575 }
576 }
577
578 first..last
579 }
580
581 pub(crate) fn parent(&self, row: usize, table: usize, column: usize) -> usize {
582 self.upper_bound(table, 0, self.tables[table].len, column, row + 1) - 1
583 }
584
585 fn lower_bound(
586 &self,
587 table: usize,
588 mut first: usize,
589 last: usize,
590 column: usize,
591 value: usize,
592 ) -> usize {
593 let mut count = last - first;
594 while count > 0 {
595 let count2 = count / 2;
596 let middle = first + count2;
597 if self.usize(middle, table, column) < value {
598 first = middle + 1;
599 count -= count2 + 1;
600 } else {
601 count = count2;
602 }
603 }
604 first
605 }
606
607 fn upper_bound(
608 &self,
609 table: usize,
610 mut first: usize,
611 last: usize,
612 column: usize,
613 value: usize,
614 ) -> usize {
615 let mut count = last - first;
616 while count > 0 {
617 let count2 = count / 2;
618 let middle = first + count2;
619 if value < self.usize(middle, table, column) {
620 count = count2
621 } else {
622 first = middle + 1;
623 count -= count2 + 1;
624 }
625 }
626 first
627 }
628
629 pub(crate) fn TypeDef(&self) -> std::ops::Range<usize> {
630 0..self.tables[TypeDef::TABLE].len
631 }
632
633 pub(crate) fn NestedClass(&self) -> std::ops::Range<usize> {
634 0..self.tables[NestedClass::TABLE].len
635 }
636}
637
638fn section_from_rva(sections: &[IMAGE_SECTION_HEADER], rva: u32) -> Option<&IMAGE_SECTION_HEADER> {
639 sections.iter().find(|&s| {
640 rva >= s.VirtualAddress && rva < s.VirtualAddress + unsafe { s.Misc.VirtualSize }
641 })
642}
643
644fn offset_from_rva(section: &IMAGE_SECTION_HEADER, rva: u32) -> usize {
645 (rva - section.VirtualAddress + section.PointerToRawData) as usize
646}
647
648trait View {
649 fn view_as<T>(&self, offset: usize) -> Option<&T>;
650 fn view_as_slice_of<T>(&self, offset: usize, len: usize) -> Option<&[T]>;
651 fn copy_as<T: Copy>(&self, offset: usize) -> Option<T>;
652 fn view_as_str(&self, offset: usize) -> Option<&[u8]>;
653 fn is_proper_length<T>(&self, offset: usize) -> Option<()>;
654 fn is_proper_length_and_alignment<T>(&self, offset: usize, count: usize) -> Option<*const T>;
655}
656
657impl View for [u8] {
658 fn view_as<T>(&self, offset: usize) -> Option<&T> {
659 unsafe { Some(&*self.is_proper_length_and_alignment(offset, 1)?) }
660 }
661
662 fn view_as_slice_of<T>(&self, offset: usize, len: usize) -> Option<&[T]> {
663 unsafe {
664 Some(std::slice::from_raw_parts(
665 self.is_proper_length_and_alignment(offset, len)?,
666 len,
667 ))
668 }
669 }
670
671 fn copy_as<T>(&self, offset: usize) -> Option<T> {
672 self.is_proper_length::<T>(offset)?;
673
674 unsafe {
675 let mut data = std::mem::MaybeUninit::zeroed().assume_init();
676 core::ptr::copy_nonoverlapping(
677 self[offset..].as_ptr(),
678 &mut data as *mut T as *mut u8,
679 std::mem::size_of::<T>(),
680 );
681 Some(data)
682 }
683 }
684
685 fn view_as_str(&self, offset: usize) -> Option<&[u8]> {
686 let buffer = &self[offset..];
687 let pos = buffer.iter().position(|c| *c == b'\0')?;
688 Some(&self[offset..offset + pos])
689 }
690
691 fn is_proper_length<T>(&self, offset: usize) -> Option<()> {
692 if offset + std::mem::size_of::<T>() <= self.len() {
693 Some(())
694 } else {
695 None
696 }
697 }
698
699 fn is_proper_length_and_alignment<T>(&self, offset: usize, count: usize) -> Option<*const T> {
700 self.is_proper_length::<T>(offset * count)?;
701 let ptr = &self[offset] as *const u8 as *const T;
702
703 if ptr.align_offset(std::mem::align_of::<T>()) == 0 {
704 Some(ptr)
705 } else {
706 None
707 }
708 }
709}
710
711#[derive(Default)]
712struct Table {
713 offset: usize,
714 len: usize,
715 width: usize,
716 columns: [Column; 6],
717}
718
719impl Table {
720 fn index_width(&self) -> usize {
721 if self.len < (1 << 16) {
722 2
723 } else {
724 4
725 }
726 }
727
728 fn set_columns(&mut self, a: usize, b: usize, c: usize, d: usize, e: usize, f: usize) {
729 self.width = a + b + c + d + e + f;
730 self.columns[0] = Column::new(0, a);
731 if b != 0 {
732 self.columns[1] = Column::new(a, b);
733 }
734 if c != 0 {
735 self.columns[2] = Column::new(a + b, c);
736 }
737 if d != 0 {
738 self.columns[3] = Column::new(a + b + c, d);
739 }
740 if e != 0 {
741 self.columns[4] = Column::new(a + b + c + d, e);
742 }
743 if f != 0 {
744 self.columns[5] = Column::new(a + b + c + d + e, f);
745 }
746 }
747
748 fn set_data(&mut self, offset: &mut usize) {
749 if self.len != 0 {
750 let next = *offset + self.len * self.width;
751 self.offset = *offset;
752 *offset = next;
753 }
754 }
755}
756
757#[derive(Default)]
758struct Column {
759 offset: usize,
760 width: usize,
761}
762
763impl Column {
764 fn new(offset: usize, width: usize) -> Self {
765 Self { offset, width }
766 }
767}
768
769#[repr(C)]
770#[derive(Default)]
771struct METADATA_HEADER {
772 signature: u32,
773 major_version: u16,
774 minor_version: u16,
775 reserved: u32,
776 length: u32,
777 version: [u8; 20],
778 flags: u16,
779 streams: u16,
780}
781
782const METADATA_SIGNATURE: u32 = 0x424A_5342;
783
784fn coded_index_size(tables: &[usize]) -> usize {
788 fn small(row_count: usize, bits: u8) -> bool {
789 (row_count as u64) < (1u64 << (16 - bits))
790 }
791
792 fn bits_needed(value: usize) -> u8 {
793 let mut value = value - 1;
794 let mut bits: u8 = 1;
795 while {
796 value >>= 1;
797 value != 0
798 } {
799 bits += 1;
800 }
801 bits
802 }
803
804 let bits_needed = bits_needed(tables.len());
805
806 if tables.iter().all(|table| small(*table, bits_needed)) {
807 2
808 } else {
809 4
810 }
811}