use core::iter::FusedIterator;
use core::mem;
use core::ops::{Deref, Range};
use zerocopy::byteorder::LittleEndian;
use zerocopy::{FromBytes, Immutable, IntoBytes, KnownLayout, Ref, SplitByteSlice, Unaligned, U32};
use crate::error::{NtHiveError, Result};
use crate::helpers::byte_subrange;
use crate::hive::Hive;
use crate::key_value::KeyValue;
#[allow(dead_code)]
#[derive(FromBytes, Immutable, IntoBytes, KnownLayout, Unaligned)]
#[repr(packed)]
struct KeyValuesListItem {
key_value_offset: U32<LittleEndian>,
}
struct KeyValuesListItemRange(Range<usize>);
impl KeyValuesListItemRange {
fn key_value_offset<B>(&self, hive: &Hive<B>) -> u32
where
B: SplitByteSlice,
{
let item = Ref::<&[u8], KeyValuesListItem>::from_bytes(&hive.data[self.0.clone()]).unwrap();
item.key_value_offset.get()
}
}
impl Deref for KeyValuesListItemRange {
type Target = Range<usize>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[derive(Clone)]
struct KeyValuesListItemRanges {
items_range: Range<usize>,
}
impl KeyValuesListItemRanges {
pub(crate) fn new(
count: u32,
count_field_offset: usize,
cell_range: Range<usize>,
) -> Result<Self> {
let byte_count = count as usize * mem::size_of::<KeyValuesListItem>();
let items_range = byte_subrange(&cell_range, byte_count).ok_or_else(|| {
NtHiveError::InvalidSizeField {
offset: count_field_offset,
expected: byte_count,
actual: cell_range.len(),
}
})?;
Ok(Self { items_range })
}
}
impl Iterator for KeyValuesListItemRanges {
type Item = KeyValuesListItemRange;
fn next(&mut self) -> Option<Self::Item> {
let item_range = byte_subrange(&self.items_range, mem::size_of::<KeyValuesListItem>())?;
self.items_range.start += mem::size_of::<KeyValuesListItem>();
Some(KeyValuesListItemRange(item_range))
}
fn count(self) -> usize {
let (size, _) = self.size_hint();
size
}
fn last(mut self) -> Option<Self::Item> {
let (size, _) = self.size_hint();
if size == 0 {
return None;
}
self.nth(size - 1)
}
fn nth(&mut self, n: usize) -> Option<Self::Item> {
let bytes_to_skip = n.checked_mul(mem::size_of::<KeyValuesListItem>())?;
self.items_range.start = self.items_range.start.checked_add(bytes_to_skip)?;
self.next()
}
fn size_hint(&self) -> (usize, Option<usize>) {
let size = self.items_range.len() / mem::size_of::<KeyValuesListItem>();
(size, Some(size))
}
}
impl ExactSizeIterator for KeyValuesListItemRanges {}
impl FusedIterator for KeyValuesListItemRanges {}
#[derive(Clone)]
pub struct KeyValues<'h, B: SplitByteSlice> {
hive: &'h Hive<B>,
key_values_list_item_ranges: KeyValuesListItemRanges,
}
impl<'h, B> KeyValues<'h, B>
where
B: SplitByteSlice,
{
pub(crate) fn new(
hive: &'h Hive<B>,
count: u32,
count_field_offset: usize,
cell_range: Range<usize>,
) -> Result<Self> {
let key_values_list_item_ranges =
KeyValuesListItemRanges::new(count, count_field_offset, cell_range)?;
Ok(Self {
hive,
key_values_list_item_ranges,
})
}
}
impl<'h, B> Iterator for KeyValues<'h, B>
where
B: SplitByteSlice,
{
type Item = Result<KeyValue<'h, B>>;
fn next(&mut self) -> Option<Self::Item> {
let key_values_list_item_range = self.key_values_list_item_ranges.next()?;
let key_value_offset = key_values_list_item_range.key_value_offset(self.hive);
let cell_range = iter_try!(self.hive.cell_range_from_data_offset(key_value_offset));
let key_value = iter_try!(KeyValue::new(self.hive, cell_range));
Some(Ok(key_value))
}
fn count(self) -> usize {
self.key_values_list_item_ranges.count()
}
fn last(mut self) -> Option<Self::Item> {
let (size, _) = self.size_hint();
if size == 0 {
return None;
}
self.nth(size - 1)
}
fn nth(&mut self, n: usize) -> Option<Self::Item> {
let bytes_to_skip = n.checked_mul(mem::size_of::<KeyValuesListItem>())?;
self.key_values_list_item_ranges.items_range.start = self
.key_values_list_item_ranges
.items_range
.start
.checked_add(bytes_to_skip)?;
self.next()
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.key_values_list_item_ranges.size_hint()
}
}
impl<B> ExactSizeIterator for KeyValues<'_, B> where B: SplitByteSlice {}
impl<B> FusedIterator for KeyValues<'_, B> where B: SplitByteSlice {}