1use rand_distr::Distribution;
2
3use crate::{
4 layouts::{Backend, Scratch, VecZnxBigOwned, VecZnxBigToMut, VecZnxBigToRef, VecZnxToMut, VecZnxToRef},
5 source::Source,
6};
7
8pub trait VecZnxBigAlloc<B: Backend> {
10 fn vec_znx_big_alloc(&self, n: usize, cols: usize, size: usize) -> VecZnxBigOwned<B>;
11}
12
13pub trait VecZnxBigAllocBytes {
15 fn vec_znx_big_alloc_bytes(&self, n: usize, cols: usize, size: usize) -> usize;
16}
17
18pub trait VecZnxBigFromBytes<B: Backend> {
21 fn vec_znx_big_from_bytes(&self, n: usize, cols: usize, size: usize, bytes: Vec<u8>) -> VecZnxBigOwned<B>;
22}
23
24#[allow(clippy::too_many_arguments)]
25pub trait VecZnxBigAddNormal<B: Backend> {
36 fn vec_znx_big_add_normal<R: VecZnxBigToMut<B>>(
37 &self,
38 basek: usize,
39 res: &mut R,
40 res_col: usize,
41 k: usize,
42 source: &mut Source,
43 sigma: f64,
44 bound: f64,
45 );
46}
47
48#[allow(clippy::too_many_arguments)]
49pub trait VecZnxBigFillNormal<B: Backend> {
50 fn vec_znx_big_fill_normal<R: VecZnxBigToMut<B>>(
51 &self,
52 basek: usize,
53 res: &mut R,
54 res_col: usize,
55 k: usize,
56 source: &mut Source,
57 sigma: f64,
58 bound: f64,
59 );
60}
61
62#[allow(clippy::too_many_arguments)]
63pub trait VecZnxBigFillDistF64<B: Backend> {
64 fn vec_znx_big_fill_dist_f64<R: VecZnxBigToMut<B>, D: Distribution<f64>>(
65 &self,
66 basek: usize,
67 res: &mut R,
68 res_col: usize,
69 k: usize,
70 source: &mut Source,
71 dist: D,
72 bound: f64,
73 );
74}
75
76#[allow(clippy::too_many_arguments)]
77pub trait VecZnxBigAddDistF64<B: Backend> {
78 fn vec_znx_big_add_dist_f64<R: VecZnxBigToMut<B>, D: Distribution<f64>>(
79 &self,
80 basek: usize,
81 res: &mut R,
82 res_col: usize,
83 k: usize,
84 source: &mut Source,
85 dist: D,
86 bound: f64,
87 );
88}
89
90pub trait VecZnxBigAdd<B: Backend> {
91 fn vec_znx_big_add<R, A, C>(&self, res: &mut R, res_col: usize, a: &A, a_col: usize, b: &C, b_col: usize)
93 where
94 R: VecZnxBigToMut<B>,
95 A: VecZnxBigToRef<B>,
96 C: VecZnxBigToRef<B>;
97}
98
99pub trait VecZnxBigAddInplace<B: Backend> {
100 fn vec_znx_big_add_inplace<R, A>(&self, res: &mut R, res_col: usize, a: &A, a_col: usize)
102 where
103 R: VecZnxBigToMut<B>,
104 A: VecZnxBigToRef<B>;
105}
106
107pub trait VecZnxBigAddSmall<B: Backend> {
108 fn vec_znx_big_add_small<R, A, C>(&self, res: &mut R, res_col: usize, a: &A, a_col: usize, b: &C, b_col: usize)
110 where
111 R: VecZnxBigToMut<B>,
112 A: VecZnxBigToRef<B>,
113 C: VecZnxToRef;
114}
115
116pub trait VecZnxBigAddSmallInplace<B: Backend> {
117 fn vec_znx_big_add_small_inplace<R, A>(&self, res: &mut R, res_col: usize, a: &A, a_col: usize)
119 where
120 R: VecZnxBigToMut<B>,
121 A: VecZnxToRef;
122}
123
124pub trait VecZnxBigSub<B: Backend> {
125 fn vec_znx_big_sub<R, A, C>(&self, res: &mut R, res_col: usize, a: &A, a_col: usize, b: &C, b_col: usize)
127 where
128 R: VecZnxBigToMut<B>,
129 A: VecZnxBigToRef<B>,
130 C: VecZnxBigToRef<B>;
131}
132
133pub trait VecZnxBigSubABInplace<B: Backend> {
134 fn vec_znx_big_sub_ab_inplace<R, A>(&self, res: &mut R, res_col: usize, a: &A, a_col: usize)
136 where
137 R: VecZnxBigToMut<B>,
138 A: VecZnxBigToRef<B>;
139}
140
141pub trait VecZnxBigSubBAInplace<B: Backend> {
142 fn vec_znx_big_sub_ba_inplace<R, A>(&self, res: &mut R, res_col: usize, a: &A, a_col: usize)
144 where
145 R: VecZnxBigToMut<B>,
146 A: VecZnxBigToRef<B>;
147}
148
149pub trait VecZnxBigSubSmallA<B: Backend> {
150 fn vec_znx_big_sub_small_a<R, A, C>(&self, res: &mut R, res_col: usize, a: &A, a_col: usize, b: &C, b_col: usize)
152 where
153 R: VecZnxBigToMut<B>,
154 A: VecZnxToRef,
155 C: VecZnxBigToRef<B>;
156}
157
158pub trait VecZnxBigSubSmallAInplace<B: Backend> {
159 fn vec_znx_big_sub_small_a_inplace<R, A>(&self, res: &mut R, res_col: usize, a: &A, a_col: usize)
161 where
162 R: VecZnxBigToMut<B>,
163 A: VecZnxToRef;
164}
165
166pub trait VecZnxBigSubSmallB<B: Backend> {
167 fn vec_znx_big_sub_small_b<R, A, C>(&self, res: &mut R, res_col: usize, a: &A, a_col: usize, b: &C, b_col: usize)
169 where
170 R: VecZnxBigToMut<B>,
171 A: VecZnxBigToRef<B>,
172 C: VecZnxToRef;
173}
174
175pub trait VecZnxBigSubSmallBInplace<B: Backend> {
176 fn vec_znx_big_sub_small_b_inplace<R, A>(&self, res: &mut R, res_col: usize, a: &A, a_col: usize)
178 where
179 R: VecZnxBigToMut<B>,
180 A: VecZnxToRef;
181}
182
183pub trait VecZnxBigNegateInplace<B: Backend> {
184 fn vec_znx_big_negate_inplace<A>(&self, a: &mut A, a_col: usize)
185 where
186 A: VecZnxBigToMut<B>;
187}
188
189pub trait VecZnxBigNormalizeTmpBytes {
190 fn vec_znx_big_normalize_tmp_bytes(&self, n: usize) -> usize;
191}
192
193pub trait VecZnxBigNormalize<B: Backend> {
194 fn vec_znx_big_normalize<R, A>(
195 &self,
196 basek: usize,
197 res: &mut R,
198 res_col: usize,
199 a: &A,
200 a_col: usize,
201 scratch: &mut Scratch<B>,
202 ) where
203 R: VecZnxToMut,
204 A: VecZnxBigToRef<B>;
205}
206
207pub trait VecZnxBigAutomorphism<B: Backend> {
208 fn vec_znx_big_automorphism<R, A>(&self, k: i64, res: &mut R, res_col: usize, a: &A, a_col: usize)
210 where
211 R: VecZnxBigToMut<B>,
212 A: VecZnxBigToRef<B>;
213}
214
215pub trait VecZnxBigAutomorphismInplace<B: Backend> {
216 fn vec_znx_big_automorphism_inplace<A>(&self, k: i64, a: &mut A, a_col: usize)
218 where
219 A: VecZnxBigToMut<B>;
220}