pub struct Module<B: Backend> { /* private fields */ }Implementations§
Source§impl<B: Backend> Module<B>
impl<B: Backend> Module<B>
pub fn new_marker(n: u64) -> Self
pub unsafe fn from_nonnull(ptr: NonNull<B::Handle>, n: u64) -> Self
Sourcepub unsafe fn from_raw_parts(ptr: *mut B::Handle, n: u64) -> Self
pub unsafe fn from_raw_parts(ptr: *mut B::Handle, n: u64) -> Self
Construct from a raw pointer managed elsewhere.
SAFETY: ptr must be non-null and remain valid for the lifetime of this Module.
pub unsafe fn ptr(&self) -> *mut <B as Backend>::Handle
pub fn n(&self) -> usize
pub fn as_mut_ptr(&self) -> *mut B::Handle
pub fn log_n(&self) -> usize
Trait Implementations§
Source§impl<BE: Backend> Convolution<BE> for Module<BE>where
Self: Sized + ModuleN + SvpPPolAlloc<BE> + SvpApplyDftToDft<BE> + SvpPrepare<BE> + SvpPPolBytesOf + VecZnxDftBytesOf + VecZnxDftAddScaledInplace<BE>,
Scratch<BE>: ScratchTakeBasic,
impl<BE: Backend> Convolution<BE> for Module<BE>where
Self: Sized + ModuleN + SvpPPolAlloc<BE> + SvpApplyDftToDft<BE> + SvpPrepare<BE> + SvpPPolBytesOf + VecZnxDftBytesOf + VecZnxDftAddScaledInplace<BE>,
Scratch<BE>: ScratchTakeBasic,
fn convolution_tmp_bytes(&self, res_size: usize) -> usize
Source§impl<BE: Backend> CyclotomicOrder for Module<BE>where
Self: ModuleN,
impl<BE: Backend> CyclotomicOrder for Module<BE>where
Self: ModuleN,
fn cyclotomic_order(&self) -> i64
Source§impl<BE: Backend> GaloisElement for Module<BE>where
Self: CyclotomicOrder,
impl<BE: Backend> GaloisElement for Module<BE>where
Self: CyclotomicOrder,
fn galois_element(&self, generator: i64) -> i64
fn galois_element_inv(&self, gal_el: i64) -> i64
Source§impl<B> SvpApplyDft<B> for Module<B>where
B: Backend + SvpApplyDftImpl<B>,
impl<B> SvpApplyDft<B> for Module<B>where
B: Backend + SvpApplyDftImpl<B>,
Source§impl<B> SvpApplyDftToDft<B> for Module<B>where
B: Backend + SvpApplyDftToDftImpl<B>,
impl<B> SvpApplyDftToDft<B> for Module<B>where
B: Backend + SvpApplyDftToDftImpl<B>,
Source§impl<B> SvpApplyDftToDftAdd<B> for Module<B>where
B: Backend + SvpApplyDftToDftAddImpl<B>,
impl<B> SvpApplyDftToDftAdd<B> for Module<B>where
B: Backend + SvpApplyDftToDftAddImpl<B>,
Source§impl<B> SvpApplyDftToDftInplace<B> for Module<B>where
B: Backend + SvpApplyDftToDftInplaceImpl,
impl<B> SvpApplyDftToDftInplace<B> for Module<B>where
B: Backend + SvpApplyDftToDftInplaceImpl,
fn svp_apply_dft_to_dft_inplace<R, A>(
&self,
res: &mut R,
res_col: usize,
a: &A,
a_col: usize,
)where
R: VecZnxDftToMut<B>,
A: SvpPPolToRef<B>,
Source§impl<B> SvpPPolAlloc<B> for Module<B>where
B: Backend + SvpPPolAllocImpl<B>,
impl<B> SvpPPolAlloc<B> for Module<B>where
B: Backend + SvpPPolAllocImpl<B>,
fn svp_ppol_alloc(&self, cols: usize) -> SvpPPolOwned<B>
Source§impl<B> SvpPPolBytesOf for Module<B>where
B: Backend + SvpPPolAllocBytesImpl<B>,
impl<B> SvpPPolBytesOf for Module<B>where
B: Backend + SvpPPolAllocBytesImpl<B>,
fn bytes_of_svp_ppol(&self, cols: usize) -> usize
Source§impl<B> SvpPPolFromBytes<B> for Module<B>where
B: Backend + SvpPPolFromBytesImpl<B>,
impl<B> SvpPPolFromBytes<B> for Module<B>where
B: Backend + SvpPPolFromBytesImpl<B>,
fn svp_ppol_from_bytes(&self, cols: usize, bytes: Vec<u8>) -> SvpPPolOwned<B>
Source§impl<B> SvpPrepare<B> for Module<B>where
B: Backend + SvpPrepareImpl<B>,
impl<B> SvpPrepare<B> for Module<B>where
B: Backend + SvpPrepareImpl<B>,
fn svp_prepare<R, A>(&self, res: &mut R, res_col: usize, a: &A, a_col: usize)where
R: SvpPPolToMut<B>,
A: ScalarZnxToRef,
Source§impl<B> VecZnxAdd for Module<B>where
B: Backend + VecZnxAddImpl<B>,
impl<B> VecZnxAdd for Module<B>where
B: Backend + VecZnxAddImpl<B>,
Source§impl<B> VecZnxAddInplace for Module<B>where
B: Backend + VecZnxAddInplaceImpl<B>,
impl<B> VecZnxAddInplace for Module<B>where
B: Backend + VecZnxAddInplaceImpl<B>,
Source§fn vec_znx_add_inplace<R, A>(
&self,
res: &mut R,
res_col: usize,
a: &A,
a_col: usize,
)where
R: VecZnxToMut,
A: VecZnxToRef,
fn vec_znx_add_inplace<R, A>(
&self,
res: &mut R,
res_col: usize,
a: &A,
a_col: usize,
)where
R: VecZnxToMut,
A: VecZnxToRef,
Adds the selected column of
a to the selected column of res and writes the result on the selected column of res.Source§impl<B> VecZnxAddNormal for Module<B>where
B: Backend + VecZnxAddNormalImpl<B>,
impl<B> VecZnxAddNormal for Module<B>where
B: Backend + VecZnxAddNormalImpl<B>,
Source§impl<B> VecZnxAddScalar for Module<B>where
B: Backend + VecZnxAddScalarImpl<B>,
impl<B> VecZnxAddScalar for Module<B>where
B: Backend + VecZnxAddScalarImpl<B>,
Source§impl<B> VecZnxAddScalarInplace for Module<B>where
B: Backend + VecZnxAddScalarInplaceImpl<B>,
impl<B> VecZnxAddScalarInplace for Module<B>where
B: Backend + VecZnxAddScalarInplaceImpl<B>,
Source§fn vec_znx_add_scalar_inplace<R, A>(
&self,
res: &mut R,
res_col: usize,
res_limb: usize,
a: &A,
a_col: usize,
)where
R: VecZnxToMut,
A: ScalarZnxToRef,
fn vec_znx_add_scalar_inplace<R, A>(
&self,
res: &mut R,
res_col: usize,
res_limb: usize,
a: &A,
a_col: usize,
)where
R: VecZnxToMut,
A: ScalarZnxToRef,
Adds the selected column of
a on the selected column and limb of res.Source§impl<B> VecZnxAutomorphism for Module<B>where
B: Backend + VecZnxAutomorphismImpl<B>,
impl<B> VecZnxAutomorphism for Module<B>where
B: Backend + VecZnxAutomorphismImpl<B>,
Source§fn vec_znx_automorphism<R, A>(
&self,
k: i64,
res: &mut R,
res_col: usize,
a: &A,
a_col: usize,
)where
R: VecZnxToMut,
A: VecZnxToRef,
fn vec_znx_automorphism<R, A>(
&self,
k: i64,
res: &mut R,
res_col: usize,
a: &A,
a_col: usize,
)where
R: VecZnxToMut,
A: VecZnxToRef,
Applies the automorphism X^i -> X^ik on the selected column of
a and stores the result in res_col column of res.Source§impl<B> VecZnxAutomorphismInplace<B> for Module<B>where
B: Backend + VecZnxAutomorphismInplaceImpl<B>,
impl<B> VecZnxAutomorphismInplace<B> for Module<B>where
B: Backend + VecZnxAutomorphismInplaceImpl<B>,
Source§fn vec_znx_automorphism_inplace<R>(
&self,
k: i64,
res: &mut R,
res_col: usize,
scratch: &mut Scratch<B>,
)where
R: VecZnxToMut,
fn vec_znx_automorphism_inplace<R>(
&self,
k: i64,
res: &mut R,
res_col: usize,
scratch: &mut Scratch<B>,
)where
R: VecZnxToMut,
Applies the automorphism X^i -> X^ik on the selected column of
a.Source§impl<B> VecZnxAutomorphismInplaceTmpBytes for Module<B>where
B: Backend + VecZnxAutomorphismInplaceTmpBytesImpl<B>,
impl<B> VecZnxAutomorphismInplaceTmpBytes for Module<B>where
B: Backend + VecZnxAutomorphismInplaceTmpBytesImpl<B>,
fn vec_znx_automorphism_inplace_tmp_bytes(&self) -> usize
Source§impl<B> VecZnxBigAdd<B> for Module<B>where
B: Backend + VecZnxBigAddImpl<B>,
impl<B> VecZnxBigAdd<B> for Module<B>where
B: Backend + VecZnxBigAddImpl<B>,
Source§impl<B> VecZnxBigAddInplace<B> for Module<B>where
B: Backend + VecZnxBigAddInplaceImpl<B>,
impl<B> VecZnxBigAddInplace<B> for Module<B>where
B: Backend + VecZnxBigAddInplaceImpl<B>,
Source§fn vec_znx_big_add_inplace<R, A>(
&self,
res: &mut R,
res_col: usize,
a: &A,
a_col: usize,
)where
R: VecZnxBigToMut<B>,
A: VecZnxBigToRef<B>,
fn vec_znx_big_add_inplace<R, A>(
&self,
res: &mut R,
res_col: usize,
a: &A,
a_col: usize,
)where
R: VecZnxBigToMut<B>,
A: VecZnxBigToRef<B>,
Adds
a to b and stores the result on b.Source§impl<B> VecZnxBigAddNormal<B> for Module<B>where
B: Backend + VecZnxBigAddNormalImpl<B>,
impl<B> VecZnxBigAddNormal<B> for Module<B>where
B: Backend + VecZnxBigAddNormalImpl<B>,
Source§impl<B> VecZnxBigAddSmall<B> for Module<B>where
B: Backend + VecZnxBigAddSmallImpl<B>,
impl<B> VecZnxBigAddSmall<B> for Module<B>where
B: Backend + VecZnxBigAddSmallImpl<B>,
Source§impl<B> VecZnxBigAddSmallInplace<B> for Module<B>where
B: Backend + VecZnxBigAddSmallInplaceImpl<B>,
impl<B> VecZnxBigAddSmallInplace<B> for Module<B>where
B: Backend + VecZnxBigAddSmallInplaceImpl<B>,
Source§fn vec_znx_big_add_small_inplace<R, A>(
&self,
res: &mut R,
res_col: usize,
a: &A,
a_col: usize,
)where
R: VecZnxBigToMut<B>,
A: VecZnxToRef,
fn vec_znx_big_add_small_inplace<R, A>(
&self,
res: &mut R,
res_col: usize,
a: &A,
a_col: usize,
)where
R: VecZnxBigToMut<B>,
A: VecZnxToRef,
Adds
a to b and stores the result on b.Source§impl<B> VecZnxBigAlloc<B> for Module<B>where
B: Backend + VecZnxBigAllocImpl<B>,
impl<B> VecZnxBigAlloc<B> for Module<B>where
B: Backend + VecZnxBigAllocImpl<B>,
fn vec_znx_big_alloc(&self, cols: usize, size: usize) -> VecZnxBigOwned<B>
Source§impl<B> VecZnxBigAutomorphism<B> for Module<B>where
B: Backend + VecZnxBigAutomorphismImpl<B>,
impl<B> VecZnxBigAutomorphism<B> for Module<B>where
B: Backend + VecZnxBigAutomorphismImpl<B>,
Source§fn vec_znx_big_automorphism<R, A>(
&self,
k: i64,
res: &mut R,
res_col: usize,
a: &A,
a_col: usize,
)where
R: VecZnxBigToMut<B>,
A: VecZnxBigToRef<B>,
fn vec_znx_big_automorphism<R, A>(
&self,
k: i64,
res: &mut R,
res_col: usize,
a: &A,
a_col: usize,
)where
R: VecZnxBigToMut<B>,
A: VecZnxBigToRef<B>,
Applies the automorphism X^i -> X^ik on
a and stores the result on b.Source§impl<B> VecZnxBigAutomorphismInplace<B> for Module<B>where
B: Backend + VecZnxBigAutomorphismInplaceImpl<B>,
impl<B> VecZnxBigAutomorphismInplace<B> for Module<B>where
B: Backend + VecZnxBigAutomorphismInplaceImpl<B>,
Source§fn vec_znx_big_automorphism_inplace<A>(
&self,
k: i64,
a: &mut A,
a_col: usize,
scratch: &mut Scratch<B>,
)where
A: VecZnxBigToMut<B>,
fn vec_znx_big_automorphism_inplace<A>(
&self,
k: i64,
a: &mut A,
a_col: usize,
scratch: &mut Scratch<B>,
)where
A: VecZnxBigToMut<B>,
Applies the automorphism X^i -> X^ik on
a and stores the result on a.Source§impl<B> VecZnxBigAutomorphismInplaceTmpBytes for Module<B>where
B: Backend + VecZnxBigAutomorphismInplaceTmpBytesImpl<B>,
impl<B> VecZnxBigAutomorphismInplaceTmpBytes for Module<B>where
B: Backend + VecZnxBigAutomorphismInplaceTmpBytesImpl<B>,
fn vec_znx_big_automorphism_inplace_tmp_bytes(&self) -> usize
Source§impl<B> VecZnxBigBytesOf for Module<B>where
B: Backend + VecZnxBigAllocBytesImpl<B>,
impl<B> VecZnxBigBytesOf for Module<B>where
B: Backend + VecZnxBigAllocBytesImpl<B>,
Source§impl<B> VecZnxBigFromBytes<B> for Module<B>where
B: Backend + VecZnxBigFromBytesImpl<B>,
impl<B> VecZnxBigFromBytes<B> for Module<B>where
B: Backend + VecZnxBigFromBytesImpl<B>,
fn vec_znx_big_from_bytes( &self, cols: usize, size: usize, bytes: Vec<u8>, ) -> VecZnxBigOwned<B>
Source§impl<B> VecZnxBigFromSmall<B> for Module<B>where
B: Backend + VecZnxBigFromSmallImpl<B>,
impl<B> VecZnxBigFromSmall<B> for Module<B>where
B: Backend + VecZnxBigFromSmallImpl<B>,
fn vec_znx_big_from_small<R, A>(
&self,
res: &mut R,
res_col: usize,
a: &A,
a_col: usize,
)where
R: VecZnxBigToMut<B>,
A: VecZnxToRef,
Source§impl<B> VecZnxBigNegate<B> for Module<B>where
B: Backend + VecZnxBigNegateImpl<B>,
impl<B> VecZnxBigNegate<B> for Module<B>where
B: Backend + VecZnxBigNegateImpl<B>,
fn vec_znx_big_negate<R, A>(
&self,
res: &mut R,
res_col: usize,
a: &A,
a_col: usize,
)where
R: VecZnxBigToMut<B>,
A: VecZnxBigToRef<B>,
Source§impl<B> VecZnxBigNegateInplace<B> for Module<B>where
B: Backend + VecZnxBigNegateInplaceImpl<B>,
impl<B> VecZnxBigNegateInplace<B> for Module<B>where
B: Backend + VecZnxBigNegateInplaceImpl<B>,
fn vec_znx_big_negate_inplace<A>(&self, a: &mut A, a_col: usize)where
A: VecZnxBigToMut<B>,
Source§impl<B> VecZnxBigNormalize<B> for Module<B>where
B: Backend + VecZnxBigNormalizeImpl<B>,
impl<B> VecZnxBigNormalize<B> for Module<B>where
B: Backend + VecZnxBigNormalizeImpl<B>,
fn vec_znx_big_normalize<R, A>(
&self,
res_basek: usize,
res: &mut R,
res_col: usize,
a_basek: usize,
a: &A,
a_col: usize,
scratch: &mut Scratch<B>,
)where
R: VecZnxToMut,
A: VecZnxBigToRef<B>,
Source§impl<B> VecZnxBigNormalizeTmpBytes for Module<B>where
B: Backend + VecZnxBigNormalizeTmpBytesImpl<B>,
impl<B> VecZnxBigNormalizeTmpBytes for Module<B>where
B: Backend + VecZnxBigNormalizeTmpBytesImpl<B>,
fn vec_znx_big_normalize_tmp_bytes(&self) -> usize
Source§impl<B> VecZnxBigSub<B> for Module<B>where
B: Backend + VecZnxBigSubImpl<B>,
impl<B> VecZnxBigSub<B> for Module<B>where
B: Backend + VecZnxBigSubImpl<B>,
Source§impl<B> VecZnxBigSubInplace<B> for Module<B>where
B: Backend + VecZnxBigSubInplaceImpl<B>,
impl<B> VecZnxBigSubInplace<B> for Module<B>where
B: Backend + VecZnxBigSubInplaceImpl<B>,
Source§fn vec_znx_big_sub_inplace<R, A>(
&self,
res: &mut R,
res_col: usize,
a: &A,
a_col: usize,
)where
R: VecZnxBigToMut<B>,
A: VecZnxBigToRef<B>,
fn vec_znx_big_sub_inplace<R, A>(
&self,
res: &mut R,
res_col: usize,
a: &A,
a_col: usize,
)where
R: VecZnxBigToMut<B>,
A: VecZnxBigToRef<B>,
Subtracts
a from b and stores the result on b.Source§impl<B> VecZnxBigSubNegateInplace<B> for Module<B>where
B: Backend + VecZnxBigSubNegateInplaceImpl<B>,
impl<B> VecZnxBigSubNegateInplace<B> for Module<B>where
B: Backend + VecZnxBigSubNegateInplaceImpl<B>,
Source§fn vec_znx_big_sub_negate_inplace<R, A>(
&self,
res: &mut R,
res_col: usize,
a: &A,
a_col: usize,
)where
R: VecZnxBigToMut<B>,
A: VecZnxBigToRef<B>,
fn vec_znx_big_sub_negate_inplace<R, A>(
&self,
res: &mut R,
res_col: usize,
a: &A,
a_col: usize,
)where
R: VecZnxBigToMut<B>,
A: VecZnxBigToRef<B>,
Subtracts
b from a and stores the result on b.Source§impl<B> VecZnxBigSubSmallA<B> for Module<B>where
B: Backend + VecZnxBigSubSmallAImpl<B>,
impl<B> VecZnxBigSubSmallA<B> for Module<B>where
B: Backend + VecZnxBigSubSmallAImpl<B>,
Source§impl<B> VecZnxBigSubSmallB<B> for Module<B>where
B: Backend + VecZnxBigSubSmallBImpl<B>,
impl<B> VecZnxBigSubSmallB<B> for Module<B>where
B: Backend + VecZnxBigSubSmallBImpl<B>,
Source§impl<B> VecZnxBigSubSmallInplace<B> for Module<B>where
B: Backend + VecZnxBigSubSmallInplaceImpl<B>,
impl<B> VecZnxBigSubSmallInplace<B> for Module<B>where
B: Backend + VecZnxBigSubSmallInplaceImpl<B>,
Source§fn vec_znx_big_sub_small_inplace<R, A>(
&self,
res: &mut R,
res_col: usize,
a: &A,
a_col: usize,
)where
R: VecZnxBigToMut<B>,
A: VecZnxToRef,
fn vec_znx_big_sub_small_inplace<R, A>(
&self,
res: &mut R,
res_col: usize,
a: &A,
a_col: usize,
)where
R: VecZnxBigToMut<B>,
A: VecZnxToRef,
Subtracts
a from res and stores the result on res.Source§impl<B> VecZnxBigSubSmallNegateInplace<B> for Module<B>where
B: Backend + VecZnxBigSubSmallNegateInplaceImpl<B>,
impl<B> VecZnxBigSubSmallNegateInplace<B> for Module<B>where
B: Backend + VecZnxBigSubSmallNegateInplaceImpl<B>,
Source§fn vec_znx_big_sub_small_negate_inplace<R, A>(
&self,
res: &mut R,
res_col: usize,
a: &A,
a_col: usize,
)where
R: VecZnxBigToMut<B>,
A: VecZnxToRef,
fn vec_znx_big_sub_small_negate_inplace<R, A>(
&self,
res: &mut R,
res_col: usize,
a: &A,
a_col: usize,
)where
R: VecZnxBigToMut<B>,
A: VecZnxToRef,
Subtracts
res from a and stores the result on res.Source§impl<B> VecZnxCopy for Module<B>where
B: Backend + VecZnxCopyImpl<B>,
impl<B> VecZnxCopy for Module<B>where
B: Backend + VecZnxCopyImpl<B>,
fn vec_znx_copy<R, A>(&self, res: &mut R, res_col: usize, a: &A, a_col: usize)where
R: VecZnxToMut,
A: VecZnxToRef,
Source§impl<B> VecZnxDftAdd<B> for Module<B>where
B: Backend + VecZnxDftAddImpl<B>,
impl<B> VecZnxDftAdd<B> for Module<B>where
B: Backend + VecZnxDftAddImpl<B>,
Source§impl<B> VecZnxDftAddInplace<B> for Module<B>where
B: Backend + VecZnxDftAddInplaceImpl<B>,
impl<B> VecZnxDftAddInplace<B> for Module<B>where
B: Backend + VecZnxDftAddInplaceImpl<B>,
fn vec_znx_dft_add_inplace<R, A>(
&self,
res: &mut R,
res_col: usize,
a: &A,
a_col: usize,
)where
R: VecZnxDftToMut<B>,
A: VecZnxDftToRef<B>,
Source§impl<B> VecZnxDftAddScaledInplace<B> for Module<B>where
B: Backend + VecZnxDftAddScaledInplaceImpl<B>,
impl<B> VecZnxDftAddScaledInplace<B> for Module<B>where
B: Backend + VecZnxDftAddScaledInplaceImpl<B>,
fn vec_znx_dft_add_scaled_inplace<R, A>(
&self,
res: &mut R,
res_col: usize,
a: &A,
a_col: usize,
a_scale: i64,
)where
R: VecZnxDftToMut<B>,
A: VecZnxDftToRef<B>,
Source§impl<B> VecZnxDftAlloc<B> for Module<B>where
B: Backend + VecZnxDftAllocImpl<B>,
impl<B> VecZnxDftAlloc<B> for Module<B>where
B: Backend + VecZnxDftAllocImpl<B>,
fn vec_znx_dft_alloc(&self, cols: usize, size: usize) -> VecZnxDftOwned<B>
Source§impl<B> VecZnxDftApply<B> for Module<B>where
B: Backend + VecZnxDftApplyImpl<B>,
impl<B> VecZnxDftApply<B> for Module<B>where
B: Backend + VecZnxDftApplyImpl<B>,
fn vec_znx_dft_apply<R, A>(
&self,
step: usize,
offset: usize,
res: &mut R,
res_col: usize,
a: &A,
a_col: usize,
)where
R: VecZnxDftToMut<B>,
A: VecZnxToRef,
Source§impl<B> VecZnxDftBytesOf for Module<B>where
B: Backend + VecZnxDftAllocBytesImpl<B>,
impl<B> VecZnxDftBytesOf for Module<B>where
B: Backend + VecZnxDftAllocBytesImpl<B>,
Source§impl<B> VecZnxDftCopy<B> for Module<B>where
B: Backend + VecZnxDftCopyImpl<B>,
impl<B> VecZnxDftCopy<B> for Module<B>where
B: Backend + VecZnxDftCopyImpl<B>,
fn vec_znx_dft_copy<R, A>(
&self,
step: usize,
offset: usize,
res: &mut R,
res_col: usize,
a: &A,
a_col: usize,
)where
R: VecZnxDftToMut<B>,
A: VecZnxDftToRef<B>,
Source§impl<B> VecZnxDftFromBytes<B> for Module<B>where
B: Backend + VecZnxDftFromBytesImpl<B>,
impl<B> VecZnxDftFromBytes<B> for Module<B>where
B: Backend + VecZnxDftFromBytesImpl<B>,
fn vec_znx_dft_from_bytes( &self, cols: usize, size: usize, bytes: Vec<u8>, ) -> VecZnxDftOwned<B>
Source§impl<B> VecZnxDftSub<B> for Module<B>where
B: Backend + VecZnxDftSubImpl<B>,
impl<B> VecZnxDftSub<B> for Module<B>where
B: Backend + VecZnxDftSubImpl<B>,
Source§impl<B> VecZnxDftSubInplace<B> for Module<B>where
B: Backend + VecZnxDftSubInplaceImpl<B>,
impl<B> VecZnxDftSubInplace<B> for Module<B>where
B: Backend + VecZnxDftSubInplaceImpl<B>,
fn vec_znx_dft_sub_inplace<R, A>(
&self,
res: &mut R,
res_col: usize,
a: &A,
a_col: usize,
)where
R: VecZnxDftToMut<B>,
A: VecZnxDftToRef<B>,
Source§impl<B> VecZnxDftSubNegateInplace<B> for Module<B>where
B: Backend + VecZnxDftSubNegateInplaceImpl<B>,
impl<B> VecZnxDftSubNegateInplace<B> for Module<B>where
B: Backend + VecZnxDftSubNegateInplaceImpl<B>,
fn vec_znx_dft_sub_negate_inplace<R, A>(
&self,
res: &mut R,
res_col: usize,
a: &A,
a_col: usize,
)where
R: VecZnxDftToMut<B>,
A: VecZnxDftToRef<B>,
Source§impl<B> VecZnxDftZero<B> for Module<B>where
B: Backend + VecZnxDftZeroImpl<B>,
impl<B> VecZnxDftZero<B> for Module<B>where
B: Backend + VecZnxDftZeroImpl<B>,
fn vec_znx_dft_zero<R>(&self, res: &mut R)where
R: VecZnxDftToMut<B>,
Source§impl<B> VecZnxFillNormal for Module<B>where
B: Backend + VecZnxFillNormalImpl<B>,
impl<B> VecZnxFillNormal for Module<B>where
B: Backend + VecZnxFillNormalImpl<B>,
Source§impl<B> VecZnxFillUniform for Module<B>where
B: Backend + VecZnxFillUniformImpl<B>,
impl<B> VecZnxFillUniform for Module<B>where
B: Backend + VecZnxFillUniformImpl<B>,
Source§fn vec_znx_fill_uniform<R>(
&self,
base2k: usize,
res: &mut R,
res_col: usize,
source: &mut Source,
)where
R: VecZnxToMut,
fn vec_znx_fill_uniform<R>(
&self,
base2k: usize,
res: &mut R,
res_col: usize,
source: &mut Source,
)where
R: VecZnxToMut,
Fills the first
size size with uniform values in [-2^{base2k-1}, 2^{base2k-1}]Source§impl<B> VecZnxIdftApply<B> for Module<B>where
B: Backend + VecZnxIdftApplyImpl<B>,
impl<B> VecZnxIdftApply<B> for Module<B>where
B: Backend + VecZnxIdftApplyImpl<B>,
fn vec_znx_idft_apply<R, A>(
&self,
res: &mut R,
res_col: usize,
a: &A,
a_col: usize,
scratch: &mut Scratch<B>,
)where
R: VecZnxBigToMut<B>,
A: VecZnxDftToRef<B>,
Source§impl<B> VecZnxIdftApplyConsume<B> for Module<B>where
B: Backend + VecZnxIdftApplyConsumeImpl<B>,
impl<B> VecZnxIdftApplyConsume<B> for Module<B>where
B: Backend + VecZnxIdftApplyConsumeImpl<B>,
fn vec_znx_idft_apply_consume<D: Data>(
&self,
a: VecZnxDft<D, B>,
) -> VecZnxBig<D, B>where
VecZnxDft<D, B>: VecZnxDftToMut<B>,
Source§impl<B> VecZnxIdftApplyTmpA<B> for Module<B>where
B: Backend + VecZnxIdftApplyTmpAImpl<B>,
impl<B> VecZnxIdftApplyTmpA<B> for Module<B>where
B: Backend + VecZnxIdftApplyTmpAImpl<B>,
fn vec_znx_idft_apply_tmpa<R, A>(
&self,
res: &mut R,
res_col: usize,
a: &mut A,
a_col: usize,
)where
R: VecZnxBigToMut<B>,
A: VecZnxDftToMut<B>,
Source§impl<B> VecZnxIdftApplyTmpBytes for Module<B>where
B: Backend + VecZnxIdftApplyTmpBytesImpl<B>,
impl<B> VecZnxIdftApplyTmpBytes for Module<B>where
B: Backend + VecZnxIdftApplyTmpBytesImpl<B>,
fn vec_znx_idft_apply_tmp_bytes(&self) -> usize
Source§impl<B> VecZnxLsh<B> for Module<B>where
B: Backend + VecZnxLshImpl<B>,
impl<B> VecZnxLsh<B> for Module<B>where
B: Backend + VecZnxLshImpl<B>,
Source§fn vec_znx_lsh<R, A>(
&self,
base2k: usize,
k: usize,
res: &mut R,
res_col: usize,
a: &A,
a_col: usize,
scratch: &mut Scratch<B>,
)where
R: VecZnxToMut,
A: VecZnxToRef,
fn vec_znx_lsh<R, A>(
&self,
base2k: usize,
k: usize,
res: &mut R,
res_col: usize,
a: &A,
a_col: usize,
scratch: &mut Scratch<B>,
)where
R: VecZnxToMut,
A: VecZnxToRef,
Left shift by k bits all columns of
a.Source§impl<B> VecZnxLshInplace<B> for Module<B>where
B: Backend + VecZnxLshInplaceImpl<B>,
impl<B> VecZnxLshInplace<B> for Module<B>where
B: Backend + VecZnxLshInplaceImpl<B>,
Source§fn vec_znx_lsh_inplace<A>(
&self,
base2k: usize,
k: usize,
a: &mut A,
a_col: usize,
scratch: &mut Scratch<B>,
)where
A: VecZnxToMut,
fn vec_znx_lsh_inplace<A>(
&self,
base2k: usize,
k: usize,
a: &mut A,
a_col: usize,
scratch: &mut Scratch<B>,
)where
A: VecZnxToMut,
Left shift by k bits all columns of
a.Source§impl<B> VecZnxLshTmpBytes for Module<B>where
B: Backend + VecZnxLshTmpBytesImpl<B>,
impl<B> VecZnxLshTmpBytes for Module<B>where
B: Backend + VecZnxLshTmpBytesImpl<B>,
fn vec_znx_lsh_tmp_bytes(&self) -> usize
Source§impl<B> VecZnxMergeRings<B> for Module<B>where
B: Backend + VecZnxMergeRingsImpl<B>,
impl<B> VecZnxMergeRings<B> for Module<B>where
B: Backend + VecZnxMergeRingsImpl<B>,
Source§fn vec_znx_merge_rings<R, A>(
&self,
res: &mut R,
res_col: usize,
a: &[A],
a_col: usize,
scratch: &mut Scratch<B>,
)where
R: VecZnxToMut,
A: VecZnxToRef,
fn vec_znx_merge_rings<R, A>(
&self,
res: &mut R,
res_col: usize,
a: &[A],
a_col: usize,
scratch: &mut Scratch<B>,
)where
R: VecZnxToMut,
A: VecZnxToRef,
Source§impl<B> VecZnxMergeRingsTmpBytes for Module<B>where
B: Backend + VecZnxMergeRingsTmpBytesImpl<B>,
impl<B> VecZnxMergeRingsTmpBytes for Module<B>where
B: Backend + VecZnxMergeRingsTmpBytesImpl<B>,
fn vec_znx_merge_rings_tmp_bytes(&self) -> usize
Source§impl<B> VecZnxMulXpMinusOne for Module<B>where
B: Backend + VecZnxMulXpMinusOneImpl<B>,
impl<B> VecZnxMulXpMinusOne for Module<B>where
B: Backend + VecZnxMulXpMinusOneImpl<B>,
fn vec_znx_mul_xp_minus_one<R, A>(
&self,
p: i64,
res: &mut R,
res_col: usize,
a: &A,
a_col: usize,
)where
R: VecZnxToMut,
A: VecZnxToRef,
Source§impl<B> VecZnxMulXpMinusOneInplace<B> for Module<B>where
B: Backend + VecZnxMulXpMinusOneInplaceImpl<B>,
impl<B> VecZnxMulXpMinusOneInplace<B> for Module<B>where
B: Backend + VecZnxMulXpMinusOneInplaceImpl<B>,
fn vec_znx_mul_xp_minus_one_inplace<R>(
&self,
p: i64,
res: &mut R,
res_col: usize,
scratch: &mut Scratch<B>,
)where
R: VecZnxToMut,
Source§impl<B> VecZnxMulXpMinusOneInplaceTmpBytes for Module<B>where
B: Backend + VecZnxMulXpMinusOneInplaceTmpBytesImpl<B>,
impl<B> VecZnxMulXpMinusOneInplaceTmpBytes for Module<B>where
B: Backend + VecZnxMulXpMinusOneInplaceTmpBytesImpl<B>,
fn vec_znx_mul_xp_minus_one_inplace_tmp_bytes(&self) -> usize
Source§impl<B> VecZnxNegate for Module<B>where
B: Backend + VecZnxNegateImpl<B>,
impl<B> VecZnxNegate for Module<B>where
B: Backend + VecZnxNegateImpl<B>,
fn vec_znx_negate<R, A>(&self, res: &mut R, res_col: usize, a: &A, a_col: usize)where
R: VecZnxToMut,
A: VecZnxToRef,
Source§impl<B> VecZnxNegateInplace for Module<B>where
B: Backend + VecZnxNegateInplaceImpl<B>,
impl<B> VecZnxNegateInplace for Module<B>where
B: Backend + VecZnxNegateInplaceImpl<B>,
Source§fn vec_znx_negate_inplace<A>(&self, a: &mut A, a_col: usize)where
A: VecZnxToMut,
fn vec_znx_negate_inplace<A>(&self, a: &mut A, a_col: usize)where
A: VecZnxToMut,
Negates the selected column of
a.Source§impl<B> VecZnxNormalize<B> for Module<B>where
B: Backend + VecZnxNormalizeImpl<B>,
impl<B> VecZnxNormalize<B> for Module<B>where
B: Backend + VecZnxNormalizeImpl<B>,
Source§fn vec_znx_normalize<R, A>(
&self,
res_basek: usize,
res: &mut R,
res_col: usize,
a_basek: usize,
a: &A,
a_col: usize,
scratch: &mut Scratch<B>,
)where
R: VecZnxToMut,
A: VecZnxToRef,
fn vec_znx_normalize<R, A>(
&self,
res_basek: usize,
res: &mut R,
res_col: usize,
a_basek: usize,
a: &A,
a_col: usize,
scratch: &mut Scratch<B>,
)where
R: VecZnxToMut,
A: VecZnxToRef,
Normalizes the selected column of
a and stores the result into the selected column of res.Source§impl<B> VecZnxNormalizeInplace<B> for Module<B>where
B: Backend + VecZnxNormalizeInplaceImpl<B>,
impl<B> VecZnxNormalizeInplace<B> for Module<B>where
B: Backend + VecZnxNormalizeInplaceImpl<B>,
Source§fn vec_znx_normalize_inplace<A>(
&self,
base2k: usize,
a: &mut A,
a_col: usize,
scratch: &mut Scratch<B>,
)where
A: VecZnxToMut,
fn vec_znx_normalize_inplace<A>(
&self,
base2k: usize,
a: &mut A,
a_col: usize,
scratch: &mut Scratch<B>,
)where
A: VecZnxToMut,
Normalizes the selected column of
a.Source§impl<B> VecZnxNormalizeTmpBytes for Module<B>where
B: Backend + VecZnxNormalizeTmpBytesImpl<B>,
impl<B> VecZnxNormalizeTmpBytes for Module<B>where
B: Backend + VecZnxNormalizeTmpBytesImpl<B>,
Source§fn vec_znx_normalize_tmp_bytes(&self) -> usize
fn vec_znx_normalize_tmp_bytes(&self) -> usize
Returns the minimum number of bytes necessary for normalization.
Source§impl<B> VecZnxRotate for Module<B>where
B: Backend + VecZnxRotateImpl<B>,
impl<B> VecZnxRotate for Module<B>where
B: Backend + VecZnxRotateImpl<B>,
Source§fn vec_znx_rotate<R, A>(
&self,
k: i64,
res: &mut R,
res_col: usize,
a: &A,
a_col: usize,
)where
R: VecZnxToMut,
A: VecZnxToRef,
fn vec_znx_rotate<R, A>(
&self,
k: i64,
res: &mut R,
res_col: usize,
a: &A,
a_col: usize,
)where
R: VecZnxToMut,
A: VecZnxToRef,
Multiplies the selected column of
a by X^k and stores the result in res_col of res.Source§impl<B> VecZnxRotateInplace<B> for Module<B>where
B: Backend + VecZnxRotateInplaceImpl<B>,
impl<B> VecZnxRotateInplace<B> for Module<B>where
B: Backend + VecZnxRotateInplaceImpl<B>,
Source§fn vec_znx_rotate_inplace<A>(
&self,
k: i64,
a: &mut A,
a_col: usize,
scratch: &mut Scratch<B>,
)where
A: VecZnxToMut,
fn vec_znx_rotate_inplace<A>(
&self,
k: i64,
a: &mut A,
a_col: usize,
scratch: &mut Scratch<B>,
)where
A: VecZnxToMut,
Multiplies the selected column of
a by X^k.Source§impl<B> VecZnxRotateInplaceTmpBytes for Module<B>where
B: Backend + VecZnxRotateInplaceTmpBytesImpl<B>,
impl<B> VecZnxRotateInplaceTmpBytes for Module<B>where
B: Backend + VecZnxRotateInplaceTmpBytesImpl<B>,
fn vec_znx_rotate_inplace_tmp_bytes(&self) -> usize
Source§impl<B> VecZnxRsh<B> for Module<B>where
B: Backend + VecZnxRshImpl<B>,
impl<B> VecZnxRsh<B> for Module<B>where
B: Backend + VecZnxRshImpl<B>,
Source§fn vec_znx_rsh<R, A>(
&self,
base2k: usize,
k: usize,
res: &mut R,
res_col: usize,
a: &A,
a_col: usize,
scratch: &mut Scratch<B>,
)where
R: VecZnxToMut,
A: VecZnxToRef,
fn vec_znx_rsh<R, A>(
&self,
base2k: usize,
k: usize,
res: &mut R,
res_col: usize,
a: &A,
a_col: usize,
scratch: &mut Scratch<B>,
)where
R: VecZnxToMut,
A: VecZnxToRef,
Right shift by k bits all columns of
a.Source§impl<B> VecZnxRshInplace<B> for Module<B>where
B: Backend + VecZnxRshInplaceImpl<B>,
impl<B> VecZnxRshInplace<B> for Module<B>where
B: Backend + VecZnxRshInplaceImpl<B>,
Source§fn vec_znx_rsh_inplace<A>(
&self,
base2k: usize,
k: usize,
a: &mut A,
a_col: usize,
scratch: &mut Scratch<B>,
)where
A: VecZnxToMut,
fn vec_znx_rsh_inplace<A>(
&self,
base2k: usize,
k: usize,
a: &mut A,
a_col: usize,
scratch: &mut Scratch<B>,
)where
A: VecZnxToMut,
Right shift by k bits all columns of
a.Source§impl<B> VecZnxRshTmpBytes for Module<B>where
B: Backend + VecZnxRshTmpBytesImpl<B>,
impl<B> VecZnxRshTmpBytes for Module<B>where
B: Backend + VecZnxRshTmpBytesImpl<B>,
fn vec_znx_rsh_tmp_bytes(&self) -> usize
Source§impl<B> VecZnxSplitRing<B> for Module<B>where
B: Backend + VecZnxSplitRingImpl<B>,
impl<B> VecZnxSplitRing<B> for Module<B>where
B: Backend + VecZnxSplitRingImpl<B>,
Source§fn vec_znx_split_ring<R, A>(
&self,
res: &mut [R],
res_col: usize,
a: &A,
a_col: usize,
scratch: &mut Scratch<B>,
)where
R: VecZnxToMut,
A: VecZnxToRef,
fn vec_znx_split_ring<R, A>(
&self,
res: &mut [R],
res_col: usize,
a: &A,
a_col: usize,
scratch: &mut Scratch<B>,
)where
R: VecZnxToMut,
A: VecZnxToRef,
Splits the selected columns of
b into subrings and copies them them into the selected column of res. Read moreSource§impl<B> VecZnxSplitRingTmpBytes for Module<B>where
B: Backend + VecZnxSplitRingTmpBytesImpl<B>,
impl<B> VecZnxSplitRingTmpBytes for Module<B>where
B: Backend + VecZnxSplitRingTmpBytesImpl<B>,
fn vec_znx_split_ring_tmp_bytes(&self) -> usize
Source§impl<B> VecZnxSub for Module<B>where
B: Backend + VecZnxSubImpl<B>,
impl<B> VecZnxSub for Module<B>where
B: Backend + VecZnxSubImpl<B>,
Source§impl<B> VecZnxSubInplace for Module<B>where
B: Backend + VecZnxSubInplaceImpl<B>,
impl<B> VecZnxSubInplace for Module<B>where
B: Backend + VecZnxSubInplaceImpl<B>,
Source§fn vec_znx_sub_inplace<R, A>(
&self,
res: &mut R,
res_col: usize,
a: &A,
a_col: usize,
)where
R: VecZnxToMut,
A: VecZnxToRef,
fn vec_znx_sub_inplace<R, A>(
&self,
res: &mut R,
res_col: usize,
a: &A,
a_col: usize,
)where
R: VecZnxToMut,
A: VecZnxToRef,
Source§impl<B> VecZnxSubNegateInplace for Module<B>where
B: Backend + VecZnxSubNegateInplaceImpl<B>,
impl<B> VecZnxSubNegateInplace for Module<B>where
B: Backend + VecZnxSubNegateInplaceImpl<B>,
Source§fn vec_znx_sub_negate_inplace<R, A>(
&self,
res: &mut R,
res_col: usize,
a: &A,
a_col: usize,
)where
R: VecZnxToMut,
A: VecZnxToRef,
fn vec_znx_sub_negate_inplace<R, A>(
&self,
res: &mut R,
res_col: usize,
a: &A,
a_col: usize,
)where
R: VecZnxToMut,
A: VecZnxToRef,
Subtracts the selected column of
res from the selected column of a and inplace mutates res Read moreSource§impl<B> VecZnxSubScalar for Module<B>where
B: Backend + VecZnxSubScalarImpl<B>,
impl<B> VecZnxSubScalar for Module<B>where
B: Backend + VecZnxSubScalarImpl<B>,
Source§impl<B> VecZnxSubScalarInplace for Module<B>where
B: Backend + VecZnxSubScalarInplaceImpl<B>,
impl<B> VecZnxSubScalarInplace for Module<B>where
B: Backend + VecZnxSubScalarInplaceImpl<B>,
Source§fn vec_znx_sub_scalar_inplace<R, A>(
&self,
res: &mut R,
res_col: usize,
res_limb: usize,
a: &A,
a_col: usize,
)where
R: VecZnxToMut,
A: ScalarZnxToRef,
fn vec_znx_sub_scalar_inplace<R, A>(
&self,
res: &mut R,
res_col: usize,
res_limb: usize,
a: &A,
a_col: usize,
)where
R: VecZnxToMut,
A: ScalarZnxToRef,
Subtracts the selected column of
a on the selected column and limb of res.Source§impl<B> VecZnxSwitchRing for Module<B>where
B: Backend + VecZnxSwitchRingImpl<B>,
impl<B> VecZnxSwitchRing for Module<B>where
B: Backend + VecZnxSwitchRingImpl<B>,
fn vec_znx_switch_ring<R, A>(
&self,
res: &mut R,
res_col: usize,
a: &A,
a_col: usize,
)where
R: VecZnxToMut,
A: VecZnxToRef,
Source§impl<B> VmpApplyDft<B> for Module<B>where
B: Backend + VmpApplyDftImpl<B>,
impl<B> VmpApplyDft<B> for Module<B>where
B: Backend + VmpApplyDftImpl<B>,
Source§impl<B> VmpApplyDftTmpBytes for Module<B>where
B: Backend + VmpApplyDftTmpBytesImpl<B>,
impl<B> VmpApplyDftTmpBytes for Module<B>where
B: Backend + VmpApplyDftTmpBytesImpl<B>,
Source§impl<B> VmpApplyDftToDft<B> for Module<B>where
B: Backend + VmpApplyDftToDftImpl<B>,
impl<B> VmpApplyDftToDft<B> for Module<B>where
B: Backend + VmpApplyDftToDftImpl<B>,
Source§impl<B> VmpApplyDftToDftAdd<B> for Module<B>where
B: Backend + VmpApplyDftToDftAddImpl<B>,
impl<B> VmpApplyDftToDftAdd<B> for Module<B>where
B: Backend + VmpApplyDftToDftAddImpl<B>,
Source§impl<B> VmpApplyDftToDftAddTmpBytes for Module<B>where
B: Backend + VmpApplyDftToDftAddTmpBytesImpl<B>,
impl<B> VmpApplyDftToDftAddTmpBytes for Module<B>where
B: Backend + VmpApplyDftToDftAddTmpBytesImpl<B>,
Source§impl<B> VmpApplyDftToDftTmpBytes for Module<B>where
B: Backend + VmpApplyDftToDftTmpBytesImpl<B>,
impl<B> VmpApplyDftToDftTmpBytes for Module<B>where
B: Backend + VmpApplyDftToDftTmpBytesImpl<B>,
Source§impl<B> VmpPMatAlloc<B> for Module<B>where
B: Backend + VmpPMatAllocImpl<B>,
impl<B> VmpPMatAlloc<B> for Module<B>where
B: Backend + VmpPMatAllocImpl<B>,
fn vmp_pmat_alloc( &self, rows: usize, cols_in: usize, cols_out: usize, size: usize, ) -> VmpPMatOwned<B>
Source§impl<B> VmpPMatBytesOf for Module<B>where
B: Backend + VmpPMatAllocBytesImpl<B>,
impl<B> VmpPMatBytesOf for Module<B>where
B: Backend + VmpPMatAllocBytesImpl<B>,
Source§impl<B> VmpPMatFromBytes<B> for Module<B>where
B: Backend + VmpPMatFromBytesImpl<B>,
impl<B> VmpPMatFromBytes<B> for Module<B>where
B: Backend + VmpPMatFromBytesImpl<B>,
fn vmp_pmat_from_bytes( &self, rows: usize, cols_in: usize, cols_out: usize, size: usize, bytes: Vec<u8>, ) -> VmpPMatOwned<B>
Source§impl<B> VmpPrepare<B> for Module<B>where
B: Backend + VmpPrepareImpl<B>,
impl<B> VmpPrepare<B> for Module<B>where
B: Backend + VmpPrepareImpl<B>,
fn vmp_prepare<R, A>(&self, res: &mut R, a: &A, scratch: &mut Scratch<B>)where
R: VmpPMatToMut<B>,
A: MatZnxToRef,
Source§impl<B> VmpPrepareTmpBytes for Module<B>where
B: Backend + VmpPrepareTmpBytesImpl<B>,
impl<B> VmpPrepareTmpBytes for Module<B>where
B: Backend + VmpPrepareTmpBytesImpl<B>,
Source§impl<B> ZnAddNormal for Module<B>where
B: Backend + ZnAddNormalImpl<B>,
impl<B> ZnAddNormal for Module<B>where
B: Backend + ZnAddNormalImpl<B>,
Source§impl<B> ZnFillNormal for Module<B>where
B: Backend + ZnFillNormalImpl<B>,
impl<B> ZnFillNormal for Module<B>where
B: Backend + ZnFillNormalImpl<B>,
Source§impl<B> ZnFillUniform for Module<B>where
B: Backend + ZnFillUniformImpl<B>,
impl<B> ZnFillUniform for Module<B>where
B: Backend + ZnFillUniformImpl<B>,
Source§impl<B> ZnNormalizeInplace<B> for Module<B>where
B: Backend + ZnNormalizeInplaceImpl<B>,
impl<B> ZnNormalizeInplace<B> for Module<B>where
B: Backend + ZnNormalizeInplaceImpl<B>,
Source§impl<B> ZnNormalizeTmpBytes for Module<B>where
B: Backend + ZnNormalizeTmpBytesImpl<B>,
impl<B> ZnNormalizeTmpBytes for Module<B>where
B: Backend + ZnNormalizeTmpBytesImpl<B>,
fn zn_normalize_tmp_bytes(&self, n: usize) -> usize
impl<B: Backend> Send for Module<B>
impl<B: Backend> Sync for Module<B>
Auto Trait Implementations§
impl<B> Freeze for Module<B>
impl<B> RefUnwindSafe for Module<B>
impl<B> Unpin for Module<B>where
B: Unpin,
impl<B> UnwindSafe for Module<B>
Blanket Implementations§
Source§impl<T> BorrowMut<T> for Twhere
T: ?Sized,
impl<T> BorrowMut<T> for Twhere
T: ?Sized,
Source§fn borrow_mut(&mut self) -> &mut T
fn borrow_mut(&mut self) -> &mut T
Mutably borrows from an owned value. Read more
Source§impl<T> CheckedAs for T
impl<T> CheckedAs for T
Source§fn checked_as<Dst>(self) -> Option<Dst>where
T: CheckedCast<Dst>,
fn checked_as<Dst>(self) -> Option<Dst>where
T: CheckedCast<Dst>,
Casts the value.
Source§impl<Src, Dst> CheckedCastFrom<Src> for Dstwhere
Src: CheckedCast<Dst>,
impl<Src, Dst> CheckedCastFrom<Src> for Dstwhere
Src: CheckedCast<Dst>,
Source§fn checked_cast_from(src: Src) -> Option<Dst>
fn checked_cast_from(src: Src) -> Option<Dst>
Casts the value.
Source§impl<T> IntoEither for T
impl<T> IntoEither for T
Source§fn into_either(self, into_left: bool) -> Either<Self, Self>
fn into_either(self, into_left: bool) -> Either<Self, Self>
Converts
self into a Left variant of Either<Self, Self>
if into_left is true.
Converts self into a Right variant of Either<Self, Self>
otherwise. Read moreSource§fn into_either_with<F>(self, into_left: F) -> Either<Self, Self>
fn into_either_with<F>(self, into_left: F) -> Either<Self, Self>
Converts
self into a Left variant of Either<Self, Self>
if into_left(&self) returns true.
Converts self into a Right variant of Either<Self, Self>
otherwise. Read moreSource§impl<T> OverflowingAs for T
impl<T> OverflowingAs for T
Source§fn overflowing_as<Dst>(self) -> (Dst, bool)where
T: OverflowingCast<Dst>,
fn overflowing_as<Dst>(self) -> (Dst, bool)where
T: OverflowingCast<Dst>,
Casts the value.
Source§impl<Src, Dst> OverflowingCastFrom<Src> for Dstwhere
Src: OverflowingCast<Dst>,
impl<Src, Dst> OverflowingCastFrom<Src> for Dstwhere
Src: OverflowingCast<Dst>,
Source§fn overflowing_cast_from(src: Src) -> (Dst, bool)
fn overflowing_cast_from(src: Src) -> (Dst, bool)
Casts the value.
Source§impl<T> Pointable for T
impl<T> Pointable for T
Source§impl<T> SaturatingAs for T
impl<T> SaturatingAs for T
Source§fn saturating_as<Dst>(self) -> Dstwhere
T: SaturatingCast<Dst>,
fn saturating_as<Dst>(self) -> Dstwhere
T: SaturatingCast<Dst>,
Casts the value.
Source§impl<Src, Dst> SaturatingCastFrom<Src> for Dstwhere
Src: SaturatingCast<Dst>,
impl<Src, Dst> SaturatingCastFrom<Src> for Dstwhere
Src: SaturatingCast<Dst>,
Source§fn saturating_cast_from(src: Src) -> Dst
fn saturating_cast_from(src: Src) -> Dst
Casts the value.
Source§impl<T> UnwrappedAs for T
impl<T> UnwrappedAs for T
Source§fn unwrapped_as<Dst>(self) -> Dstwhere
T: UnwrappedCast<Dst>,
fn unwrapped_as<Dst>(self) -> Dstwhere
T: UnwrappedCast<Dst>,
Casts the value.
Source§impl<Src, Dst> UnwrappedCastFrom<Src> for Dstwhere
Src: UnwrappedCast<Dst>,
impl<Src, Dst> UnwrappedCastFrom<Src> for Dstwhere
Src: UnwrappedCast<Dst>,
Source§fn unwrapped_cast_from(src: Src) -> Dst
fn unwrapped_cast_from(src: Src) -> Dst
Casts the value.
Source§impl<T> WrappingAs for T
impl<T> WrappingAs for T
Source§fn wrapping_as<Dst>(self) -> Dstwhere
T: WrappingCast<Dst>,
fn wrapping_as<Dst>(self) -> Dstwhere
T: WrappingCast<Dst>,
Casts the value.
Source§impl<Src, Dst> WrappingCastFrom<Src> for Dstwhere
Src: WrappingCast<Dst>,
impl<Src, Dst> WrappingCastFrom<Src> for Dstwhere
Src: WrappingCast<Dst>,
Source§fn wrapping_cast_from(src: Src) -> Dst
fn wrapping_cast_from(src: Src) -> Dst
Casts the value.