use crate::alignment::Alignment;
use crate::slice::AlignedSlice;
use std::iter::FusedIterator;
use std::mem;
use std::ops::Deref;
#[repr(transparent)]
pub struct AlignedBlock<A: Alignment> {
slice: AlignedSlice<A>,
}
pub struct AlignedBlockIterator<'a, A: Alignment> {
bytes: &'a AlignedSlice<A>,
}
impl<'a, A: Alignment> AlignedBlockIterator<'a, A> {
#[must_use]
#[inline]
pub(crate) fn new(slice: &'a AlignedSlice<A>) -> Self {
Self { bytes: slice }
}
#[inline]
pub fn offset(&mut self, offset: isize) {
self.bytes = self.bytes.offset(offset);
}
}
impl<A: Alignment> Deref for AlignedBlock<A> {
type Target = AlignedSlice<A>;
#[inline]
fn deref(&self) -> &Self::Target {
unsafe { mem::transmute(self) }
}
}
impl<A: Alignment> AlignedBlock<A> {
#[must_use]
#[inline]
pub fn len(&self) -> usize {
self.slice.len()
}
#[must_use]
#[inline]
pub fn is_empty(&self) -> bool {
self.slice.is_empty()
}
#[must_use]
#[inline(always)]
pub fn alignment_size(&self) -> usize {
A::size()
}
}
impl<'a, A: Alignment> Iterator for AlignedBlockIterator<'a, A> {
type Item = &'a AlignedBlock<A>;
#[inline]
fn next(&mut self) -> Option<Self::Item> {
if self.bytes.is_empty() {
return None;
}
if self.bytes.len() < A::size() {
let chunk = unsafe { mem::transmute(self.bytes) };
self.bytes = Default::default();
return Some(chunk);
}
let chunk = unsafe { mem::transmute(&self.bytes[..A::size()]) };
self.bytes = self.bytes.offset(1);
Some(chunk)
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
let size = (self.bytes.len() + A::size() - 1) / A::size();
(size, Some(size))
}
}
impl<A: Alignment> ExactSizeIterator for AlignedBlockIterator<'_, A> {}
impl<A: Alignment> FusedIterator for AlignedBlockIterator<'_, A> {}
#[cfg(test)]
mod tests {
use crate::{alignment, AlignedBytes};
#[test]
fn alignment_size_equal_to_alignment_type() {
let bytes: AlignedBytes<alignment::TwoTo<7>> = AlignedBytes::new_zeroed(1024);
let mut iter = bytes.iter_blocks();
let block = iter.next().unwrap();
assert_eq!(128, block.alignment_size());
}
}