use core::{borrow, cmp, mem, ops};
use alloc::borrow::ToOwned;
use alloc::vec::Vec;
use crate::texel::{constants::MAX, MaxAligned, Texel, MAX_ALIGN};
#[derive(Clone, Default)]
pub(crate) struct Buffer {
inner: Vec<MaxAligned>,
}
#[repr(transparent)]
#[allow(non_camel_case_types)]
pub(crate) struct buf([u8]);
pub(crate) enum Cog<'buf> {
Owned(Buffer),
Borrowed(&'buf mut buf),
}
impl Buffer {
const ELEMENT: MaxAligned = MaxAligned([0; MAX_ALIGN]);
pub fn as_buf(&self) -> &buf {
buf::new(self.inner.as_slice())
}
pub fn as_buf_mut(&mut self) -> &mut buf {
buf::new_mut(self.inner.as_mut_slice())
}
pub fn new(length: usize) -> Self {
let alloc_len = Self::alloc_len(length);
let inner = alloc::vec![Self::ELEMENT; alloc_len];
Buffer { inner }
}
pub fn capacity(&self) -> usize {
self.inner.capacity() * mem::size_of::<MaxAligned>()
}
pub fn grow_to(&mut self, bytes: usize) {
let new_len = Self::alloc_len(bytes);
if self.inner.len() < new_len {
self.inner.resize(new_len, Self::ELEMENT);
}
}
pub fn resize_to(&mut self, bytes: usize) {
let new_len = Self::alloc_len(bytes);
self.inner.resize(new_len, Self::ELEMENT);
self.inner.shrink_to_fit()
}
fn alloc_len(length: usize) -> usize {
const CHUNK_SIZE: usize = mem::size_of::<MaxAligned>();
assert!(CHUNK_SIZE > 1);
length / CHUNK_SIZE + usize::from(length % CHUNK_SIZE != 0)
}
}
impl Cog<'_> {
pub(crate) fn to_owned(this: &mut Self) -> &'_ mut Buffer {
match this {
Cog::Owned(buffer) => buffer,
Cog::Borrowed(buffer) => {
let buffer = buffer.to_owned();
*this = Cog::Owned(buffer);
Cog::to_owned(this)
}
}
}
pub(crate) fn into_owned(this: Self) -> Buffer {
match this {
Cog::Owned(buffer) => buffer,
Cog::Borrowed(buffer) => buffer.to_owned(),
}
}
pub(crate) fn grow_to(this: &mut Self, bytes: usize) -> &mut buf {
if this.len() < bytes {
Cog::to_owned(this).grow_to(bytes);
}
&mut **this
}
}
impl buf {
pub fn new<T>(data: &T) -> &Self
where
T: AsRef<[MaxAligned]> + ?Sized,
{
let bytes = MAX.cast_bytes(data.as_ref());
Self::from_bytes(bytes).unwrap()
}
pub fn new_mut<T>(data: &mut T) -> &mut Self
where
T: AsMut<[MaxAligned]> + ?Sized,
{
let bytes = MAX.cast_mut_bytes(data.as_mut());
Self::from_bytes_mut(bytes).unwrap()
}
pub fn as_bytes(&self) -> &[u8] {
&self.0
}
pub fn as_bytes_mut(&mut self) -> &mut [u8] {
&mut self.0
}
pub fn as_texels<P>(&self, pixel: Texel<P>) -> &[P] {
pixel.cast_buf(self)
}
pub fn as_mut_texels<P>(&mut self, pixel: Texel<P>) -> &mut [P] {
pixel.cast_mut_buf(self)
}
pub fn map_within<P, Q>(
&mut self,
src: impl ops::RangeBounds<usize>,
dest: usize,
f: impl Fn(P) -> Q,
p: Texel<P>,
q: Texel<Q>,
) {
fn backwards_past_the_end(start_byte_diff: isize, size_diff: isize) -> Option<usize> {
assert!(size_diff >= 0);
if size_diff == 0 {
if start_byte_diff > 0 {
Some(0)
} else {
None
}
} else if start_byte_diff < 0 {
Some(0)
} else {
let floor = start_byte_diff / size_diff;
let ceil = (floor as usize) + usize::from(start_byte_diff % size_diff != 0);
Some(ceil)
}
}
let p_start = match src.start_bound() {
ops::Bound::Included(&bound) => bound,
ops::Bound::Excluded(&bound) => bound
.checked_add(1)
.expect("Range does not specify a valid bound start"),
ops::Bound::Unbounded => 0,
};
let p_end = match src.end_bound() {
ops::Bound::Excluded(&bound) => bound,
ops::Bound::Included(&bound) => bound
.checked_add(1)
.expect("Range does not specify a valid bound end"),
ops::Bound::Unbounded => self.as_texels(p).len(),
};
let len = p_end.checked_sub(p_start).expect("Bound violates order");
let q_start = dest;
let _ = self
.as_texels(p)
.get(p_start..)
.and_then(|slice| slice.get(..len))
.expect("Source out of bounds");
let _ = self
.as_texels(q)
.get(q_start..)
.and_then(|slice| slice.get(..len))
.expect("Destination out of bounds");
assert!(p.size() as isize > 0);
assert!(q.size() as isize > 0);
if p.size() >= q.size() {
let start_diff = (q.size() * q_start).wrapping_sub(p.size() * p_start) as isize;
let size_diff = p.size() as isize - q.size() as isize;
let backwards_end = backwards_past_the_end(start_diff, size_diff)
.unwrap_or(len)
.min(len);
self.map_backward(p_start, q_start, backwards_end, &f, p, q);
self.map_forward(
p_start + backwards_end,
q_start + backwards_end,
len - backwards_end,
&f,
p,
q,
);
} else {
let start_diff = (p.size() * p_start).wrapping_sub(q.size() * q_start) as isize;
let size_diff = q.size() as isize - p.size() as isize;
let backwards_end = backwards_past_the_end(start_diff, size_diff)
.unwrap_or(len)
.min(len);
self.map_backward(
p_start + backwards_end,
q_start + backwards_end,
len - backwards_end,
&f,
p,
q,
);
self.map_forward(p_start, q_start, backwards_end, &f, p, q);
}
}
fn map_forward<P, Q>(
&mut self,
src: usize,
dest: usize,
len: usize,
f: impl Fn(P) -> Q,
p: Texel<P>,
q: Texel<Q>,
) {
for idx in 0..len {
let source_idx = idx + src;
let target_idx = idx + dest;
let source = p.copy_val(&self.as_texels(p)[source_idx]);
let target = f(source);
self.as_mut_texels(q)[target_idx] = target;
}
}
fn map_backward<P, Q>(
&mut self,
src: usize,
dest: usize,
len: usize,
f: impl Fn(P) -> Q,
p: Texel<P>,
q: Texel<Q>,
) {
for idx in (0..len).rev() {
let source_idx = idx + src;
let target_idx = idx + dest;
let source = p.copy_val(&self.as_texels(p)[source_idx]);
let target = f(source);
self.as_mut_texels(q)[target_idx] = target;
}
}
}
trait ByteSlice: Sized {
fn len(&self) -> usize;
fn split_at(self, at: usize) -> (Self, Self);
}
impl<'a> ByteSlice for &'a [u8] {
fn len(&self) -> usize {
(**self).len()
}
fn split_at(self, at: usize) -> (Self, Self) {
self.split_at(at)
}
}
impl<'a> ByteSlice for &'a mut [u8] {
fn len(&self) -> usize {
(**self).len()
}
fn split_at(self, at: usize) -> (Self, Self) {
self.split_at_mut(at)
}
}
impl From<&'_ [u8]> for Buffer {
fn from(content: &'_ [u8]) -> Self {
let mut buffer = Buffer::new(content.len());
buffer[..content.len()].copy_from_slice(content);
buffer
}
}
impl From<&'_ buf> for Buffer {
fn from(content: &'_ buf) -> Self {
content.to_owned()
}
}
impl Default for &'_ buf {
fn default() -> Self {
buf::new(&mut [])
}
}
impl Default for &'_ mut buf {
fn default() -> Self {
buf::new_mut(&mut [])
}
}
impl borrow::Borrow<buf> for Buffer {
fn borrow(&self) -> &buf {
&**self
}
}
impl borrow::BorrowMut<buf> for Buffer {
fn borrow_mut(&mut self) -> &mut buf {
&mut **self
}
}
impl alloc::borrow::ToOwned for buf {
type Owned = Buffer;
fn to_owned(&self) -> Buffer {
let mut buffer = Buffer::new(self.len());
buffer.as_bytes_mut().copy_from_slice(self);
buffer
}
}
impl ops::Deref for Buffer {
type Target = buf;
fn deref(&self) -> &buf {
self.as_buf()
}
}
impl ops::DerefMut for Buffer {
fn deref_mut(&mut self) -> &mut buf {
self.as_buf_mut()
}
}
impl ops::Deref for buf {
type Target = [u8];
fn deref(&self) -> &[u8] {
self.as_bytes()
}
}
impl ops::DerefMut for buf {
fn deref_mut(&mut self) -> &mut [u8] {
self.as_bytes_mut()
}
}
impl ops::Deref for Cog<'_> {
type Target = buf;
fn deref(&self) -> &buf {
match self {
Cog::Owned(buffer) => buffer,
Cog::Borrowed(buffer) => buffer,
}
}
}
impl ops::DerefMut for Cog<'_> {
fn deref_mut(&mut self) -> &mut buf {
match self {
Cog::Owned(buffer) => buffer,
Cog::Borrowed(buffer) => buffer,
}
}
}
impl borrow::Borrow<buf> for Cog<'_> {
fn borrow(&self) -> &buf {
&**self
}
}
impl borrow::BorrowMut<buf> for Cog<'_> {
fn borrow_mut(&mut self) -> &mut buf {
&mut **self
}
}
impl cmp::PartialEq<Cog<'_>> for Cog<'_> {
fn eq(&self, other: &Cog<'_>) -> bool {
**self == **other
}
}
impl cmp::Eq for Cog<'_> {}
impl cmp::PartialEq for buf {
fn eq(&self, other: &buf) -> bool {
self.as_bytes() == other.as_bytes()
}
}
impl cmp::Eq for buf {}
impl cmp::PartialEq for Buffer {
fn eq(&self, other: &Buffer) -> bool {
self.as_bytes() == other.as_bytes()
}
}
impl cmp::Eq for Buffer {}
impl ops::Index<ops::RangeTo<usize>> for buf {
type Output = buf;
fn index(&self, idx: ops::RangeTo<usize>) -> &buf {
Self::from_bytes(&self.0[idx]).unwrap()
}
}
impl ops::IndexMut<ops::RangeTo<usize>> for buf {
fn index_mut(&mut self, idx: ops::RangeTo<usize>) -> &mut buf {
Self::from_bytes_mut(&mut self.0[idx]).unwrap()
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::texels::{MAX, U16, U32, U8};
#[test]
fn single_max_element() {
let mut buffer = Buffer::new(mem::size_of::<MaxAligned>());
let slice = buffer.as_mut_texels(MAX);
assert!(slice.len() == 1);
}
#[test]
fn growing() {
let mut buffer = Buffer::new(0);
assert_eq!(buffer.capacity(), 0);
buffer.grow_to(mem::size_of::<MaxAligned>());
let capacity = buffer.capacity();
assert!(buffer.capacity() > 0);
buffer.grow_to(capacity);
assert_eq!(buffer.capacity(), capacity);
buffer.grow_to(0);
assert_eq!(buffer.capacity(), capacity);
buffer.grow_to(capacity + 1);
assert!(buffer.capacity() > capacity);
}
#[test]
fn reinterpret() {
let mut buffer = Buffer::new(mem::size_of::<u32>());
assert!(buffer.as_mut_texels(U32).len() >= 1);
buffer
.as_mut_texels(U16)
.iter_mut()
.for_each(|p| *p = 0x0f0f);
buffer
.as_texels(U32)
.iter()
.for_each(|p| assert_eq!(*p, 0x0f0f0f0f));
buffer
.as_texels(U8)
.iter()
.for_each(|p| assert_eq!(*p, 0x0f));
buffer
.as_mut_texels(U8)
.iter_mut()
.enumerate()
.for_each(|(idx, p)| *p = idx as u8);
assert_eq!(u32::from_be(buffer.as_texels(U32)[0]), 0x00010203);
}
#[test]
fn mapping_great_to_small() {
const LEN: usize = 10;
let mut buffer = Buffer::new(LEN * mem::size_of::<u32>());
buffer
.as_mut_texels(U32)
.iter_mut()
.enumerate()
.for_each(|(idx, p)| *p = idx as u32);
buffer.map_within(..LEN, 0, |n: u32| n as u8, U32, U8);
buffer.map_within(..LEN, 0, |n: u8| n as u32, U8, U32);
assert_eq!(
buffer.as_texels(U32)[..LEN].to_vec(),
(0..LEN as u32).collect::<Vec<_>>()
);
buffer.map_within(0..LEN, 3 * LEN, |n: u32| n as u8, U32, U8);
buffer.map_within(3 * LEN..4 * LEN, 0, |n: u8| n as u32, U8, U32);
assert_eq!(
buffer.as_texels(U32)[..LEN].to_vec(),
(0..LEN as u32).collect::<Vec<_>>()
);
}
}