[][src]Trait simba::scalar::SubsetOf

pub trait SubsetOf<T>: Sized {
    fn to_superset(&self) -> T;
fn from_superset_unchecked(element: &T) -> Self;
fn is_in_subset(element: &T) -> bool; fn from_superset(element: &T) -> Option<Self> { ... } }

Nested sets and conversions between them (using an injective mapping). Useful to work with substructures. In generic code, it is preferable to use SupersetOf as trait bound whenever possible instead of SubsetOf (because SupersetOf is automatically implemented whenever SubsetOf is).

The notion of "nested sets" is very broad and applies to what the types are supposed to represent, independently from their actual implementation details and limitations. For example:

  • f32 and f64 are both supposed to represent reals and are thus considered equal (even if in practice f64 has more elements).
  • u32 and i8 are respectively supposed to represent natural and relative numbers. Thus, u32 is a subset of i8.
  • A quaternion and a 3x3 orthogonal matrix with unit determinant are both sets of rotations. They can thus be considered equal.

In other words, implementation details due to machine limitations are ignored (otherwise we could not even, e.g., convert a u64 to an i64). If considering those limitations are important, other crates allowing you to query the limitations of given types should be used.

Required methods

fn to_superset(&self) -> T

The inclusion map: converts self to the equivalent element of its superset.

fn from_superset_unchecked(element: &T) -> Self

Use with care! Same as self.to_superset but without any property checks. Always succeeds.

fn is_in_subset(element: &T) -> bool

Checks if element is actually part of the subset Self (and can be converted to it).

Loading content...

Provided methods

fn from_superset(element: &T) -> Option<Self>

The inverse inclusion map: attempts to construct self from the equivalent element of its superset.

Must return None if element has no equivalent in Self.

Loading content...

Implementations on Foreign Types

impl<Fract: LeEqU8> SubsetOf<FixedI8<Fract>> for f64[src]

impl<Fract: LeEqU16> SubsetOf<FixedI16<Fract>> for f64[src]

impl<Fract: LeEqU32> SubsetOf<FixedI32<Fract>> for f64[src]

impl<Fract: LeEqU64> SubsetOf<FixedI64<Fract>> for f64[src]

impl SubsetOf<u8> for u8[src]

impl SubsetOf<u16> for u8[src]

impl SubsetOf<u32> for u8[src]

impl SubsetOf<u64> for u8[src]

impl SubsetOf<u128> for u8[src]

impl SubsetOf<usize> for u8[src]

impl SubsetOf<i8> for u8[src]

impl SubsetOf<i16> for u8[src]

impl SubsetOf<i32> for u8[src]

impl SubsetOf<i64> for u8[src]

impl SubsetOf<i128> for u8[src]

impl SubsetOf<isize> for u8[src]

impl SubsetOf<f32> for u8[src]

impl SubsetOf<f64> for u8[src]

impl SubsetOf<u8> for u16[src]

impl SubsetOf<u16> for u16[src]

impl SubsetOf<u32> for u16[src]

impl SubsetOf<u64> for u16[src]

impl SubsetOf<u128> for u16[src]

impl SubsetOf<usize> for u16[src]

impl SubsetOf<i8> for u16[src]

impl SubsetOf<i16> for u16[src]

impl SubsetOf<i32> for u16[src]

impl SubsetOf<i64> for u16[src]

impl SubsetOf<i128> for u16[src]

impl SubsetOf<isize> for u16[src]

impl SubsetOf<f32> for u16[src]

impl SubsetOf<f64> for u16[src]

impl SubsetOf<u8> for u32[src]

impl SubsetOf<u16> for u32[src]

impl SubsetOf<u32> for u32[src]

impl SubsetOf<u64> for u32[src]

impl SubsetOf<u128> for u32[src]

impl SubsetOf<usize> for u32[src]

impl SubsetOf<i8> for u32[src]

impl SubsetOf<i16> for u32[src]

impl SubsetOf<i32> for u32[src]

impl SubsetOf<i64> for u32[src]

impl SubsetOf<i128> for u32[src]

impl SubsetOf<isize> for u32[src]

impl SubsetOf<f32> for u32[src]

impl SubsetOf<f64> for u32[src]

impl SubsetOf<u8> for u64[src]

impl SubsetOf<u16> for u64[src]

impl SubsetOf<u32> for u64[src]

impl SubsetOf<u64> for u64[src]

impl SubsetOf<u128> for u64[src]

impl SubsetOf<usize> for u64[src]

impl SubsetOf<i8> for u64[src]

impl SubsetOf<i16> for u64[src]

impl SubsetOf<i32> for u64[src]

impl SubsetOf<i64> for u64[src]

impl SubsetOf<i128> for u64[src]

impl SubsetOf<isize> for u64[src]

impl SubsetOf<f32> for u64[src]

impl SubsetOf<f64> for u64[src]

impl SubsetOf<u8> for u128[src]

impl SubsetOf<u16> for u128[src]

impl SubsetOf<u32> for u128[src]

impl SubsetOf<u64> for u128[src]

impl SubsetOf<u128> for u128[src]

impl SubsetOf<usize> for u128[src]

impl SubsetOf<i8> for u128[src]

impl SubsetOf<i16> for u128[src]

impl SubsetOf<i32> for u128[src]

impl SubsetOf<i64> for u128[src]

impl SubsetOf<i128> for u128[src]

impl SubsetOf<isize> for u128[src]

impl SubsetOf<f32> for u128[src]

impl SubsetOf<f64> for u128[src]

impl SubsetOf<u8> for usize[src]

impl SubsetOf<u16> for usize[src]

impl SubsetOf<u32> for usize[src]

impl SubsetOf<u64> for usize[src]

impl SubsetOf<u128> for usize[src]

impl SubsetOf<usize> for usize[src]

impl SubsetOf<i8> for usize[src]

impl SubsetOf<i16> for usize[src]

impl SubsetOf<i32> for usize[src]

impl SubsetOf<i64> for usize[src]

impl SubsetOf<i128> for usize[src]

impl SubsetOf<isize> for usize[src]

impl SubsetOf<f32> for usize[src]

impl SubsetOf<f64> for usize[src]

impl SubsetOf<i8> for i8[src]

impl SubsetOf<i16> for i8[src]

impl SubsetOf<i32> for i8[src]

impl SubsetOf<i64> for i8[src]

impl SubsetOf<i128> for i8[src]

impl SubsetOf<isize> for i8[src]

impl SubsetOf<f32> for i8[src]

impl SubsetOf<f64> for i8[src]

impl SubsetOf<i8> for i16[src]

impl SubsetOf<i16> for i16[src]

impl SubsetOf<i32> for i16[src]

impl SubsetOf<i64> for i16[src]

impl SubsetOf<i128> for i16[src]

impl SubsetOf<isize> for i16[src]

impl SubsetOf<f32> for i16[src]

impl SubsetOf<f64> for i16[src]

impl SubsetOf<i8> for i32[src]

impl SubsetOf<i16> for i32[src]

impl SubsetOf<i32> for i32[src]

impl SubsetOf<i64> for i32[src]

impl SubsetOf<i128> for i32[src]

impl SubsetOf<isize> for i32[src]

impl SubsetOf<f32> for i32[src]

impl SubsetOf<f64> for i32[src]

impl SubsetOf<i8> for i64[src]

impl SubsetOf<i16> for i64[src]

impl SubsetOf<i32> for i64[src]

impl SubsetOf<i64> for i64[src]

impl SubsetOf<i128> for i64[src]

impl SubsetOf<isize> for i64[src]

impl SubsetOf<f32> for i64[src]

impl SubsetOf<f64> for i64[src]

impl SubsetOf<i8> for i128[src]

impl SubsetOf<i16> for i128[src]

impl SubsetOf<i32> for i128[src]

impl SubsetOf<i64> for i128[src]

impl SubsetOf<i128> for i128[src]

impl SubsetOf<isize> for i128[src]

impl SubsetOf<f32> for i128[src]

impl SubsetOf<f64> for i128[src]

impl SubsetOf<i8> for isize[src]

impl SubsetOf<i16> for isize[src]

impl SubsetOf<i32> for isize[src]

impl SubsetOf<i64> for isize[src]

impl SubsetOf<i128> for isize[src]

impl SubsetOf<isize> for isize[src]

impl SubsetOf<f32> for isize[src]

impl SubsetOf<f64> for isize[src]

impl SubsetOf<f32> for f32[src]

impl SubsetOf<f64> for f32[src]

impl SubsetOf<f32> for f64[src]

impl SubsetOf<f64> for f64[src]

impl<N1, N2: SupersetOf<N1>> SubsetOf<Complex<N2>> for Complex<N1>[src]

impl<N2: Zero + SupersetOf<u8>> SubsetOf<Complex<N2>> for u8[src]

impl<N2: Zero + SupersetOf<u16>> SubsetOf<Complex<N2>> for u16[src]

impl<N2: Zero + SupersetOf<u32>> SubsetOf<Complex<N2>> for u32[src]

impl<N2: Zero + SupersetOf<u64>> SubsetOf<Complex<N2>> for u64[src]

impl<N2: Zero + SupersetOf<u128>> SubsetOf<Complex<N2>> for u128[src]

impl<N2: Zero + SupersetOf<usize>> SubsetOf<Complex<N2>> for usize[src]

impl<N2: Zero + SupersetOf<i8>> SubsetOf<Complex<N2>> for i8[src]

impl<N2: Zero + SupersetOf<i16>> SubsetOf<Complex<N2>> for i16[src]

impl<N2: Zero + SupersetOf<i32>> SubsetOf<Complex<N2>> for i32[src]

impl<N2: Zero + SupersetOf<i64>> SubsetOf<Complex<N2>> for i64[src]

impl<N2: Zero + SupersetOf<i128>> SubsetOf<Complex<N2>> for i128[src]

impl<N2: Zero + SupersetOf<isize>> SubsetOf<Complex<N2>> for isize[src]

impl<N2: Zero + SupersetOf<f32>> SubsetOf<Complex<N2>> for f32[src]

impl<N2: Zero + SupersetOf<f64>> SubsetOf<Complex<N2>> for f64[src]

impl<N2: Zero + SupersetOf<d128>> SubsetOf<Complex<N2>> for d128[src]

impl<N2> SubsetOf<AutoSimd<N2>> for u8 where
    AutoSimd<N2>: SimdValue + Copy,
    <AutoSimd<N2> as SimdValue>::Element: SupersetOf<u8> + PartialEq
[src]

impl<N2> SubsetOf<AutoSimd<N2>> for u16 where
    AutoSimd<N2>: SimdValue + Copy,
    <AutoSimd<N2> as SimdValue>::Element: SupersetOf<u16> + PartialEq
[src]

impl<N2> SubsetOf<AutoSimd<N2>> for u32 where
    AutoSimd<N2>: SimdValue + Copy,
    <AutoSimd<N2> as SimdValue>::Element: SupersetOf<u32> + PartialEq
[src]

impl<N2> SubsetOf<AutoSimd<N2>> for u64 where
    AutoSimd<N2>: SimdValue + Copy,
    <AutoSimd<N2> as SimdValue>::Element: SupersetOf<u64> + PartialEq
[src]

impl<N2> SubsetOf<AutoSimd<N2>> for usize where
    AutoSimd<N2>: SimdValue + Copy,
    <AutoSimd<N2> as SimdValue>::Element: SupersetOf<usize> + PartialEq
[src]

impl<N2> SubsetOf<AutoSimd<N2>> for i8 where
    AutoSimd<N2>: SimdValue + Copy,
    <AutoSimd<N2> as SimdValue>::Element: SupersetOf<i8> + PartialEq
[src]

impl<N2> SubsetOf<AutoSimd<N2>> for i16 where
    AutoSimd<N2>: SimdValue + Copy,
    <AutoSimd<N2> as SimdValue>::Element: SupersetOf<i16> + PartialEq
[src]

impl<N2> SubsetOf<AutoSimd<N2>> for i32 where
    AutoSimd<N2>: SimdValue + Copy,
    <AutoSimd<N2> as SimdValue>::Element: SupersetOf<i32> + PartialEq
[src]

impl<N2> SubsetOf<AutoSimd<N2>> for i64 where
    AutoSimd<N2>: SimdValue + Copy,
    <AutoSimd<N2> as SimdValue>::Element: SupersetOf<i64> + PartialEq
[src]

impl<N2> SubsetOf<AutoSimd<N2>> for isize where
    AutoSimd<N2>: SimdValue + Copy,
    <AutoSimd<N2> as SimdValue>::Element: SupersetOf<isize> + PartialEq
[src]

impl<N2> SubsetOf<AutoSimd<N2>> for f32 where
    AutoSimd<N2>: SimdValue + Copy,
    <AutoSimd<N2> as SimdValue>::Element: SupersetOf<f32> + PartialEq
[src]

impl<N2> SubsetOf<AutoSimd<N2>> for f64 where
    AutoSimd<N2>: SimdValue + Copy,
    <AutoSimd<N2> as SimdValue>::Element: SupersetOf<f64> + PartialEq
[src]

impl<N2> SubsetOf<AutoSimd<N2>> for d128 where
    AutoSimd<N2>: SimdValue + Copy,
    <AutoSimd<N2> as SimdValue>::Element: SupersetOf<d128> + PartialEq
[src]

impl<N2> SubsetOf<Simd<N2>> for u8 where
    Simd<N2>: SimdValue + Copy,
    <Simd<N2> as SimdValue>::Element: SupersetOf<u8> + PartialEq
[src]

impl<N2> SubsetOf<Simd<N2>> for u16 where
    Simd<N2>: SimdValue + Copy,
    <Simd<N2> as SimdValue>::Element: SupersetOf<u16> + PartialEq
[src]

impl<N2> SubsetOf<Simd<N2>> for u32 where
    Simd<N2>: SimdValue + Copy,
    <Simd<N2> as SimdValue>::Element: SupersetOf<u32> + PartialEq
[src]

impl<N2> SubsetOf<Simd<N2>> for u64 where
    Simd<N2>: SimdValue + Copy,
    <Simd<N2> as SimdValue>::Element: SupersetOf<u64> + PartialEq
[src]

impl<N2> SubsetOf<Simd<N2>> for usize where
    Simd<N2>: SimdValue + Copy,
    <Simd<N2> as SimdValue>::Element: SupersetOf<usize> + PartialEq
[src]

impl<N2> SubsetOf<Simd<N2>> for i8 where
    Simd<N2>: SimdValue + Copy,
    <Simd<N2> as SimdValue>::Element: SupersetOf<i8> + PartialEq
[src]

impl<N2> SubsetOf<Simd<N2>> for i16 where
    Simd<N2>: SimdValue + Copy,
    <Simd<N2> as SimdValue>::Element: SupersetOf<i16> + PartialEq
[src]

impl<N2> SubsetOf<Simd<N2>> for i32 where
    Simd<N2>: SimdValue + Copy,
    <Simd<N2> as SimdValue>::Element: SupersetOf<i32> + PartialEq
[src]

impl<N2> SubsetOf<Simd<N2>> for i64 where
    Simd<N2>: SimdValue + Copy,
    <Simd<N2> as SimdValue>::Element: SupersetOf<i64> + PartialEq
[src]

impl<N2> SubsetOf<Simd<N2>> for isize where
    Simd<N2>: SimdValue + Copy,
    <Simd<N2> as SimdValue>::Element: SupersetOf<isize> + PartialEq
[src]

impl<N2> SubsetOf<Simd<N2>> for f32 where
    Simd<N2>: SimdValue + Copy,
    <Simd<N2> as SimdValue>::Element: SupersetOf<f32> + PartialEq
[src]

impl<N2> SubsetOf<Simd<N2>> for f64 where
    Simd<N2>: SimdValue + Copy,
    <Simd<N2> as SimdValue>::Element: SupersetOf<f64> + PartialEq
[src]

impl<N2> SubsetOf<Simd<N2>> for d128 where
    Simd<N2>: SimdValue + Copy,
    <Simd<N2> as SimdValue>::Element: SupersetOf<d128> + PartialEq
[src]

impl SubsetOf<WideF32x4> for u8[src]

impl SubsetOf<WideF32x4> for u16[src]

impl SubsetOf<WideF32x4> for u32[src]

impl SubsetOf<WideF32x4> for u64[src]

impl SubsetOf<WideF32x4> for usize[src]

impl SubsetOf<WideF32x4> for i8[src]

impl SubsetOf<WideF32x4> for i16[src]

impl SubsetOf<WideF32x4> for i32[src]

impl SubsetOf<WideF32x4> for i64[src]

impl SubsetOf<WideF32x4> for isize[src]

impl SubsetOf<WideF32x4> for f32[src]

impl SubsetOf<WideF32x4> for f64[src]

Loading content...

Implementors

impl SubsetOf<AutoSimd<[f32; 2]>> for AutoSimd<[f32; 2]>[src]

impl SubsetOf<AutoSimd<[f32; 4]>> for AutoSimd<[f32; 4]>[src]

impl SubsetOf<AutoSimd<[f32; 8]>> for AutoSimd<[f32; 8]>[src]

impl SubsetOf<AutoSimd<[f32; 16]>> for AutoSimd<[f32; 16]>[src]

impl SubsetOf<AutoSimd<[f64; 2]>> for AutoSimd<[f64; 2]>[src]

impl SubsetOf<AutoSimd<[f64; 4]>> for AutoSimd<[f64; 4]>[src]

impl SubsetOf<AutoSimd<[f64; 8]>> for AutoSimd<[f64; 8]>[src]

impl SubsetOf<AutoSimd<[i8; 2]>> for AutoSimd<[i8; 2]>[src]

impl SubsetOf<AutoSimd<[i8; 4]>> for AutoSimd<[i8; 4]>[src]

impl SubsetOf<AutoSimd<[i8; 8]>> for AutoSimd<[i8; 8]>[src]

impl SubsetOf<AutoSimd<[i8; 16]>> for AutoSimd<[i8; 16]>[src]

impl SubsetOf<AutoSimd<[i8; 32]>> for AutoSimd<[i8; 32]>[src]

impl SubsetOf<AutoSimd<[i16; 2]>> for AutoSimd<[i16; 2]>[src]

impl SubsetOf<AutoSimd<[i16; 4]>> for AutoSimd<[i16; 4]>[src]

impl SubsetOf<AutoSimd<[i16; 8]>> for AutoSimd<[i16; 8]>[src]

impl SubsetOf<AutoSimd<[i16; 16]>> for AutoSimd<[i16; 16]>[src]

impl SubsetOf<AutoSimd<[i16; 32]>> for AutoSimd<[i16; 32]>[src]

impl SubsetOf<AutoSimd<[i32; 2]>> for AutoSimd<[i32; 2]>[src]

impl SubsetOf<AutoSimd<[i32; 4]>> for AutoSimd<[i32; 4]>[src]

impl SubsetOf<AutoSimd<[i32; 8]>> for AutoSimd<[i32; 8]>[src]

impl SubsetOf<AutoSimd<[i32; 16]>> for AutoSimd<[i32; 16]>[src]

impl SubsetOf<AutoSimd<[i64; 2]>> for AutoSimd<[i64; 2]>[src]

impl SubsetOf<AutoSimd<[i64; 4]>> for AutoSimd<[i64; 4]>[src]

impl SubsetOf<AutoSimd<[i64; 8]>> for AutoSimd<[i64; 8]>[src]

impl SubsetOf<AutoSimd<[i128; 1]>> for AutoSimd<[i128; 1]>[src]

impl SubsetOf<AutoSimd<[i128; 2]>> for AutoSimd<[i128; 2]>[src]

impl SubsetOf<AutoSimd<[i128; 4]>> for AutoSimd<[i128; 4]>[src]

impl SubsetOf<AutoSimd<[isize; 2]>> for AutoSimd<[isize; 2]>[src]

impl SubsetOf<AutoSimd<[isize; 4]>> for AutoSimd<[isize; 4]>[src]

impl SubsetOf<AutoSimd<[isize; 8]>> for AutoSimd<[isize; 8]>[src]

impl SubsetOf<AutoSimd<[u8; 2]>> for AutoSimd<[u8; 2]>[src]

impl SubsetOf<AutoSimd<[u8; 4]>> for AutoSimd<[u8; 4]>[src]

impl SubsetOf<AutoSimd<[u8; 8]>> for AutoSimd<[u8; 8]>[src]

impl SubsetOf<AutoSimd<[u8; 16]>> for AutoSimd<[u8; 16]>[src]

impl SubsetOf<AutoSimd<[u8; 32]>> for AutoSimd<[u8; 32]>[src]

impl SubsetOf<AutoSimd<[u16; 2]>> for AutoSimd<[u16; 2]>[src]

impl SubsetOf<AutoSimd<[u16; 4]>> for AutoSimd<[u16; 4]>[src]

impl SubsetOf<AutoSimd<[u16; 8]>> for AutoSimd<[u16; 8]>[src]

impl SubsetOf<AutoSimd<[u16; 16]>> for AutoSimd<[u16; 16]>[src]

impl SubsetOf<AutoSimd<[u16; 32]>> for AutoSimd<[u16; 32]>[src]

impl SubsetOf<AutoSimd<[u32; 2]>> for AutoSimd<[u32; 2]>[src]

impl SubsetOf<AutoSimd<[u32; 4]>> for AutoSimd<[u32; 4]>[src]

impl SubsetOf<AutoSimd<[u32; 8]>> for AutoSimd<[u32; 8]>[src]

impl SubsetOf<AutoSimd<[u32; 16]>> for AutoSimd<[u32; 16]>[src]

impl SubsetOf<AutoSimd<[u64; 2]>> for AutoSimd<[u64; 2]>[src]

impl SubsetOf<AutoSimd<[u64; 4]>> for AutoSimd<[u64; 4]>[src]

impl SubsetOf<AutoSimd<[u64; 8]>> for AutoSimd<[u64; 8]>[src]

impl SubsetOf<AutoSimd<[u128; 1]>> for AutoSimd<[u128; 1]>[src]

impl SubsetOf<AutoSimd<[u128; 2]>> for AutoSimd<[u128; 2]>[src]

impl SubsetOf<AutoSimd<[u128; 4]>> for AutoSimd<[u128; 4]>[src]

impl SubsetOf<AutoSimd<[usize; 2]>> for AutoSimd<[usize; 2]>[src]

impl SubsetOf<AutoSimd<[usize; 4]>> for AutoSimd<[usize; 4]>[src]

impl SubsetOf<AutoSimd<[usize; 8]>> for AutoSimd<[usize; 8]>[src]

impl SubsetOf<Simd<Simd<[f32; 2]>>> for Simd<f32x2>[src]

impl SubsetOf<Simd<Simd<[f32; 4]>>> for Simd<f32x4>[src]

impl SubsetOf<Simd<Simd<[f32; 8]>>> for Simd<f32x8>[src]

impl SubsetOf<Simd<Simd<[f32; 16]>>> for Simd<f32x16>[src]

impl SubsetOf<Simd<Simd<[f64; 2]>>> for Simd<f64x2>[src]

impl SubsetOf<Simd<Simd<[f64; 4]>>> for Simd<f64x4>[src]

impl SubsetOf<Simd<Simd<[f64; 8]>>> for Simd<f64x8>[src]

impl SubsetOf<Simd<Simd<[i8; 2]>>> for Simd<i8x2>[src]

impl SubsetOf<Simd<Simd<[i8; 4]>>> for Simd<i8x4>[src]

impl SubsetOf<Simd<Simd<[i8; 8]>>> for Simd<i8x8>[src]

impl SubsetOf<Simd<Simd<[i8; 16]>>> for Simd<i8x16>[src]

impl SubsetOf<Simd<Simd<[i8; 32]>>> for Simd<i8x32>[src]

impl SubsetOf<Simd<Simd<[i8; 64]>>> for Simd<i8x64>[src]

impl SubsetOf<Simd<Simd<[i16; 2]>>> for Simd<i16x2>[src]

impl SubsetOf<Simd<Simd<[i16; 4]>>> for Simd<i16x4>[src]

impl SubsetOf<Simd<Simd<[i16; 8]>>> for Simd<i16x8>[src]

impl SubsetOf<Simd<Simd<[i16; 16]>>> for Simd<i16x16>[src]

impl SubsetOf<Simd<Simd<[i16; 32]>>> for Simd<i16x32>[src]

impl SubsetOf<Simd<Simd<[i32; 2]>>> for Simd<i32x2>[src]

impl SubsetOf<Simd<Simd<[i32; 4]>>> for Simd<i32x4>[src]

impl SubsetOf<Simd<Simd<[i32; 8]>>> for Simd<i32x8>[src]

impl SubsetOf<Simd<Simd<[i32; 16]>>> for Simd<i32x16>[src]

impl SubsetOf<Simd<Simd<[i64; 2]>>> for Simd<i64x2>[src]

impl SubsetOf<Simd<Simd<[i64; 4]>>> for Simd<i64x4>[src]

impl SubsetOf<Simd<Simd<[i64; 8]>>> for Simd<i64x8>[src]

impl SubsetOf<Simd<Simd<[i128; 1]>>> for Simd<i128x1>[src]

impl SubsetOf<Simd<Simd<[i128; 2]>>> for Simd<i128x2>[src]

impl SubsetOf<Simd<Simd<[i128; 4]>>> for Simd<i128x4>[src]

impl SubsetOf<Simd<Simd<[isize; 2]>>> for Simd<isizex2>[src]

impl SubsetOf<Simd<Simd<[isize; 4]>>> for Simd<isizex4>[src]

impl SubsetOf<Simd<Simd<[isize; 8]>>> for Simd<isizex8>[src]

impl SubsetOf<Simd<Simd<[u8; 2]>>> for Simd<u8x2>[src]

impl SubsetOf<Simd<Simd<[u8; 4]>>> for Simd<u8x4>[src]

impl SubsetOf<Simd<Simd<[u8; 8]>>> for Simd<u8x8>[src]

impl SubsetOf<Simd<Simd<[u8; 16]>>> for Simd<u8x16>[src]

impl SubsetOf<Simd<Simd<[u8; 32]>>> for Simd<u8x32>[src]

impl SubsetOf<Simd<Simd<[u8; 64]>>> for Simd<u8x64>[src]

impl SubsetOf<Simd<Simd<[u16; 2]>>> for Simd<u16x2>[src]

impl SubsetOf<Simd<Simd<[u16; 4]>>> for Simd<u16x4>[src]

impl SubsetOf<Simd<Simd<[u16; 8]>>> for Simd<u16x8>[src]

impl SubsetOf<Simd<Simd<[u16; 16]>>> for Simd<u16x16>[src]

impl SubsetOf<Simd<Simd<[u16; 32]>>> for Simd<u16x32>[src]

impl SubsetOf<Simd<Simd<[u32; 2]>>> for Simd<u32x2>[src]

impl SubsetOf<Simd<Simd<[u32; 4]>>> for Simd<u32x4>[src]

impl SubsetOf<Simd<Simd<[u32; 8]>>> for Simd<u32x8>[src]

impl SubsetOf<Simd<Simd<[u32; 16]>>> for Simd<u32x16>[src]

impl SubsetOf<Simd<Simd<[u64; 2]>>> for Simd<u64x2>[src]

impl SubsetOf<Simd<Simd<[u64; 4]>>> for Simd<u64x4>[src]

impl SubsetOf<Simd<Simd<[u64; 8]>>> for Simd<u64x8>[src]

impl SubsetOf<Simd<Simd<[u128; 1]>>> for Simd<u128x1>[src]

impl SubsetOf<Simd<Simd<[u128; 2]>>> for Simd<u128x2>[src]

impl SubsetOf<Simd<Simd<[u128; 4]>>> for Simd<u128x4>[src]

impl SubsetOf<Simd<Simd<[usize; 2]>>> for Simd<usizex2>[src]

impl SubsetOf<Simd<Simd<[usize; 4]>>> for Simd<usizex4>[src]

impl SubsetOf<Simd<Simd<[usize; 8]>>> for Simd<usizex8>[src]

impl SubsetOf<WideBoolF32x4> for WideBoolF32x4[src]

impl SubsetOf<WideF32x4> for WideF32x4[src]

impl<Fract: LeEqU8> SubsetOf<FixedI8<Fract>> for FixedI8<Fract>[src]

impl<Fract: LeEqU16> SubsetOf<FixedI16<Fract>> for FixedI16<Fract>[src]

impl<Fract: LeEqU32> SubsetOf<FixedI32<Fract>> for FixedI32<Fract>[src]

impl<Fract: LeEqU64> SubsetOf<FixedI64<Fract>> for FixedI64<Fract>[src]

Loading content...