Trait nyx_space::dimensions::storage::Storage [−][src]
pub unsafe trait Storage<N, R, C = U1>: Debug where
C: Dim,
N: Scalar,
R: Dim, { type RStride: Dim; type CStride: Dim; fn ptr(&self) -> *const N; fn shape(&self) -> (R, C); fn strides(&self) -> (Self::RStride, Self::CStride); fn is_contiguous(&self) -> bool; fn as_slice(&self) -> &[N]ⓘ; fn into_owned(self) -> <DefaultAllocator as Allocator<N, R, C>>::Buffer
where
DefaultAllocator: Allocator<N, R, C>; fn clone_owned(&self) -> <DefaultAllocator as Allocator<N, R, C>>::Buffer
where
DefaultAllocator: Allocator<N, R, C>; fn linear_index(&self, irow: usize, icol: usize) -> usize { ... } unsafe fn get_address_unchecked_linear(&self, i: usize) -> *const N { ... } unsafe fn get_address_unchecked(&self, irow: usize, icol: usize) -> *const N { ... } unsafe fn get_unchecked_linear(&self, i: usize) -> &N { ... } unsafe fn get_unchecked(&self, irow: usize, icol: usize) -> &N { ... } }
Expand description
The trait shared by all matrix data storage.
TODO: doc
Note that Self
must always have a number of elements compatible with the matrix length (given
by R
and C
if they are known at compile-time). For example, implementors of this trait
should not allow the user to modify the size of the underlying buffer with safe methods
(for example the VecStorage::data_mut
method is unsafe because the user could change the
vector’s size so that it no longer contains enough elements: this will lead to UB.
Associated Types
Required methods
The dimension of the matrix at run-time. Arr length of zero indicates the additive identity
element of any dimension. Must be equal to Self::dimension()
if it is not None
.
The spacing between consecutive row elements and consecutive column elements.
For example this returns (1, 5)
for a row-major matrix with 5 columns.
fn is_contiguous(&self) -> bool
[src]
fn is_contiguous(&self) -> bool
[src]Indicates whether this data buffer stores its elements contiguously.
Retrieves the data buffer as a contiguous slice.
The matrix components may not be stored in a contiguous way, depending on the strides.
fn into_owned(self) -> <DefaultAllocator as Allocator<N, R, C>>::Buffer where
DefaultAllocator: Allocator<N, R, C>,
[src]
fn into_owned(self) -> <DefaultAllocator as Allocator<N, R, C>>::Buffer where
DefaultAllocator: Allocator<N, R, C>,
[src]Builds a matrix data storage that does not contain any reference.
fn clone_owned(&self) -> <DefaultAllocator as Allocator<N, R, C>>::Buffer where
DefaultAllocator: Allocator<N, R, C>,
[src]
fn clone_owned(&self) -> <DefaultAllocator as Allocator<N, R, C>>::Buffer where
DefaultAllocator: Allocator<N, R, C>,
[src]Clones this data storage to one that does not contain any reference.
Provided methods
fn linear_index(&self, irow: usize, icol: usize) -> usize
[src]
fn linear_index(&self, irow: usize, icol: usize) -> usize
[src]Compute the index corresponding to the irow-th row and icol-th column of this matrix. The index must be such that the following holds:
let lindex = self.linear_index(irow, icol); assert!(*self.get_unchecked(irow, icol) == *self.get_unchecked_linear(lindex))
unsafe fn get_address_unchecked_linear(&self, i: usize) -> *const N
[src]
unsafe fn get_address_unchecked_linear(&self, i: usize) -> *const N
[src]Gets the address of the i-th matrix component without performing bound-checking.
unsafe fn get_address_unchecked(&self, irow: usize, icol: usize) -> *const N
[src]
unsafe fn get_address_unchecked(&self, irow: usize, icol: usize) -> *const N
[src]Gets the address of the i-th matrix component without performing bound-checking.
unsafe fn get_unchecked_linear(&self, i: usize) -> &N
[src]
unsafe fn get_unchecked_linear(&self, i: usize) -> &N
[src]Retrieves a reference to the i-th element without bound-checking.
unsafe fn get_unchecked(&self, irow: usize, icol: usize) -> &N
[src]
unsafe fn get_unchecked(&self, irow: usize, icol: usize) -> &N
[src]Retrieves a reference to the i-th element without bound-checking.
Implementors
type RStride = RStride
type CStride = CStride
pub fn strides(
&self
) -> (<SliceStorage<'a, N, R, C, RStride, CStride> as Storage<N, R, C>>::RStride, <SliceStorage<'a, N, R, C, RStride, CStride> as Storage<N, R, C>>::CStride)
[src]pub fn into_owned(self) -> <DefaultAllocator as Allocator<N, R, C>>::Buffer where
DefaultAllocator: Allocator<N, R, C>,
[src]pub fn clone_owned(&self) -> <DefaultAllocator as Allocator<N, R, C>>::Buffer where
DefaultAllocator: Allocator<N, R, C>,
[src]type RStride = RStride
type CStride = CStride
pub fn strides(
&self
) -> (<SliceStorageMut<'a, N, R, C, RStride, CStride> as Storage<N, R, C>>::RStride, <SliceStorageMut<'a, N, R, C, RStride, CStride> as Storage<N, R, C>>::CStride)
[src]pub fn into_owned(self) -> <DefaultAllocator as Allocator<N, R, C>>::Buffer where
DefaultAllocator: Allocator<N, R, C>,
[src]pub fn clone_owned(&self) -> <DefaultAllocator as Allocator<N, R, C>>::Buffer where
DefaultAllocator: Allocator<N, R, C>,
[src]impl<N, C> Storage<N, Dynamic, C> for VecStorage<N, Dynamic, C> where
C: Dim,
N: Scalar,
DefaultAllocator: Allocator<N, Dynamic, C>,
<DefaultAllocator as Allocator<N, Dynamic, C>>::Buffer == VecStorage<N, Dynamic, C>,
[src]
impl<N, C> Storage<N, Dynamic, C> for VecStorage<N, Dynamic, C> where
C: Dim,
N: Scalar,
DefaultAllocator: Allocator<N, Dynamic, C>,
<DefaultAllocator as Allocator<N, Dynamic, C>>::Buffer == VecStorage<N, Dynamic, C>,
[src]pub fn strides(
&self
) -> (<VecStorage<N, Dynamic, C> as Storage<N, Dynamic, C>>::RStride, <VecStorage<N, Dynamic, C> as Storage<N, Dynamic, C>>::CStride)
[src]pub fn into_owned(
self
) -> <DefaultAllocator as Allocator<N, Dynamic, C>>::Buffer where
DefaultAllocator: Allocator<N, Dynamic, C>,
[src]pub fn clone_owned(
&self
) -> <DefaultAllocator as Allocator<N, Dynamic, C>>::Buffer where
DefaultAllocator: Allocator<N, Dynamic, C>,
[src]impl<N, R> Storage<N, R, Dynamic> for VecStorage<N, R, Dynamic> where
N: Scalar,
R: DimName,
DefaultAllocator: Allocator<N, R, Dynamic>,
<DefaultAllocator as Allocator<N, R, Dynamic>>::Buffer == VecStorage<N, R, Dynamic>,
[src]
impl<N, R> Storage<N, R, Dynamic> for VecStorage<N, R, Dynamic> where
N: Scalar,
R: DimName,
DefaultAllocator: Allocator<N, R, Dynamic>,
<DefaultAllocator as Allocator<N, R, Dynamic>>::Buffer == VecStorage<N, R, Dynamic>,
[src]type CStride = R
pub fn strides(
&self
) -> (<VecStorage<N, R, Dynamic> as Storage<N, R, Dynamic>>::RStride, <VecStorage<N, R, Dynamic> as Storage<N, R, Dynamic>>::CStride)
[src]pub fn into_owned(
self
) -> <DefaultAllocator as Allocator<N, R, Dynamic>>::Buffer where
DefaultAllocator: Allocator<N, R, Dynamic>,
[src]pub fn clone_owned(
&self
) -> <DefaultAllocator as Allocator<N, R, Dynamic>>::Buffer where
DefaultAllocator: Allocator<N, R, Dynamic>,
[src]impl<N, R, C> Storage<N, R, C> for ArrayStorage<N, R, C> where
C: DimName,
N: Scalar,
R: DimName,
<R as DimName>::Value: Mul<<C as DimName>::Value>,
<<R as DimName>::Value as Mul<<C as DimName>::Value>>::Output: ArrayLength<N>,
DefaultAllocator: Allocator<N, R, C>,
<DefaultAllocator as Allocator<N, R, C>>::Buffer == ArrayStorage<N, R, C>,
[src]
impl<N, R, C> Storage<N, R, C> for ArrayStorage<N, R, C> where
C: DimName,
N: Scalar,
R: DimName,
<R as DimName>::Value: Mul<<C as DimName>::Value>,
<<R as DimName>::Value as Mul<<C as DimName>::Value>>::Output: ArrayLength<N>,
DefaultAllocator: Allocator<N, R, C>,
<DefaultAllocator as Allocator<N, R, C>>::Buffer == ArrayStorage<N, R, C>,
[src]type CStride = R
pub fn strides(
&self
) -> (<ArrayStorage<N, R, C> as Storage<N, R, C>>::RStride, <ArrayStorage<N, R, C> as Storage<N, R, C>>::CStride)
[src]pub fn into_owned(self) -> <DefaultAllocator as Allocator<N, R, C>>::Buffer where
DefaultAllocator: Allocator<N, R, C>,
[src]pub fn clone_owned(&self) -> <DefaultAllocator as Allocator<N, R, C>>::Buffer where
DefaultAllocator: Allocator<N, R, C>,
[src]