use std::ptr;
use crate::base::Scalar;
use crate::base::allocator::{Allocator, SameShapeC, SameShapeR};
use crate::base::default_allocator::DefaultAllocator;
use crate::base::dimension::{Dim, U1};
pub type SameShapeStorage<T, R1, C1, R2, C2> =
<DefaultAllocator as Allocator<SameShapeR<R1, R2>, SameShapeC<C1, C2>>>::Buffer<T>;
pub type Owned<T, R, C = U1> = <DefaultAllocator as Allocator<R, C>>::Buffer<T>;
pub type OwnedUninit<T, R, C = U1> = <DefaultAllocator as Allocator<R, C>>::BufferUninit<T>;
pub type RStride<T, R, C = U1> =
<<DefaultAllocator as Allocator<R, C>>::Buffer<T> as RawStorage<T, R, C>>::RStride;
pub type CStride<T, R, C = U1> =
<<DefaultAllocator as Allocator<R, C>>::Buffer<T> as RawStorage<T, R, C>>::CStride;
pub unsafe trait RawStorage<T, R: Dim, C: Dim = U1>: Sized {
type RStride: Dim;
type CStride: Dim;
fn ptr(&self) -> *const T;
fn shape(&self) -> (R, C);
fn strides(&self) -> (Self::RStride, Self::CStride);
#[inline]
fn linear_index(&self, irow: usize, icol: usize) -> usize {
let (rstride, cstride) = self.strides();
irow * rstride.value() + icol * cstride.value()
}
#[inline]
fn get_address_unchecked_linear(&self, i: usize) -> *const T {
self.ptr().wrapping_add(i)
}
#[inline]
fn get_address_unchecked(&self, irow: usize, icol: usize) -> *const T {
self.get_address_unchecked_linear(self.linear_index(irow, icol))
}
#[inline]
unsafe fn get_unchecked_linear(&self, i: usize) -> &T {
unsafe { &*self.get_address_unchecked_linear(i) }
}
#[inline]
unsafe fn get_unchecked(&self, irow: usize, icol: usize) -> &T {
unsafe { self.get_unchecked_linear(self.linear_index(irow, icol)) }
}
fn is_contiguous(&self) -> bool;
unsafe fn as_slice_unchecked(&self) -> &[T];
}
pub unsafe trait Storage<T: Scalar, R: Dim, C: Dim = U1>: RawStorage<T, R, C> {
fn into_owned(self) -> Owned<T, R, C>
where
DefaultAllocator: Allocator<R, C>;
fn clone_owned(&self) -> Owned<T, R, C>
where
DefaultAllocator: Allocator<R, C>;
fn forget_elements(self);
}
pub unsafe trait RawStorageMut<T, R: Dim, C: Dim = U1>: RawStorage<T, R, C> {
fn ptr_mut(&mut self) -> *mut T;
#[inline]
fn get_address_unchecked_linear_mut(&mut self, i: usize) -> *mut T {
self.ptr_mut().wrapping_add(i)
}
#[inline]
fn get_address_unchecked_mut(&mut self, irow: usize, icol: usize) -> *mut T {
let lid = self.linear_index(irow, icol);
self.get_address_unchecked_linear_mut(lid)
}
unsafe fn get_unchecked_linear_mut(&mut self, i: usize) -> &mut T {
unsafe { &mut *self.get_address_unchecked_linear_mut(i) }
}
#[inline]
unsafe fn get_unchecked_mut(&mut self, irow: usize, icol: usize) -> &mut T {
unsafe { &mut *self.get_address_unchecked_mut(irow, icol) }
}
#[inline]
unsafe fn swap_unchecked_linear(&mut self, i1: usize, i2: usize) {
unsafe {
let base = self.ptr_mut();
let offset1 = self.get_address_unchecked_linear_mut(i1).offset_from(base);
let offset2 = self.get_address_unchecked_linear_mut(i2).offset_from(base);
let base = self.ptr_mut();
let a = base.offset(offset1);
let b = base.offset(offset2);
ptr::swap(a, b);
}
}
#[inline]
unsafe fn swap_unchecked(&mut self, row_col1: (usize, usize), row_col2: (usize, usize)) {
unsafe {
let lid1 = self.linear_index(row_col1.0, row_col1.1);
let lid2 = self.linear_index(row_col2.0, row_col2.1);
self.swap_unchecked_linear(lid1, lid2)
}
}
unsafe fn as_mut_slice_unchecked(&mut self) -> &mut [T];
}
pub unsafe trait StorageMut<T: Scalar, R: Dim, C: Dim = U1>:
Storage<T, R, C> + RawStorageMut<T, R, C>
{
}
unsafe impl<S, T: Scalar, R, C> StorageMut<T, R, C> for S
where
R: Dim,
C: Dim,
S: Storage<T, R, C> + RawStorageMut<T, R, C>,
{
}
pub unsafe trait IsContiguous {}
pub trait ReshapableStorage<T, R1, C1, R2, C2>: RawStorage<T, R1, C1>
where
T: Scalar,
R1: Dim,
C1: Dim,
R2: Dim,
C2: Dim,
{
type Output: RawStorage<T, R2, C2>;
fn reshape_generic(self, nrows: R2, ncols: C2) -> Self::Output;
}