1use crate::params::{get_params, OptimizationType};
2use crate::reduce::{bigint_to_basefield, limbs_to_bigint, Reducer};
3use crate::AllocatedNonNativeFieldMulResultVar;
4use ark_ff::{BigInteger, FpParameters, PrimeField};
5use ark_r1cs_std::fields::fp::FpVar;
6use ark_r1cs_std::prelude::*;
7use ark_r1cs_std::ToConstraintFieldGadget;
8use ark_relations::r1cs::{OptimizationGoal, Result as R1CSResult};
9use ark_relations::{
10 ns,
11 r1cs::{ConstraintSystemRef, Namespace, SynthesisError},
12};
13use ark_std::cmp::{max, min};
14use ark_std::marker::PhantomData;
15use ark_std::{borrow::Borrow, vec, vec::Vec};
16
17#[derive(Debug)]
19#[must_use]
20pub struct AllocatedNonNativeFieldVar<TargetField: PrimeField, BaseField: PrimeField> {
21 pub cs: ConstraintSystemRef<BaseField>,
23 pub limbs: Vec<FpVar<BaseField>>,
25 pub num_of_additions_over_normal_form: BaseField,
27 pub is_in_the_normal_form: bool,
29 #[doc(hidden)]
30 pub target_phantom: PhantomData<TargetField>,
31}
32
33impl<TargetField: PrimeField, BaseField: PrimeField>
34 AllocatedNonNativeFieldVar<TargetField, BaseField>
35{
36 pub fn cs(&self) -> ConstraintSystemRef<BaseField> {
38 self.cs.clone()
39 }
40
41 pub fn limbs_to_value(
43 limbs: Vec<BaseField>,
44 optimization_type: OptimizationType,
45 ) -> TargetField {
46 let params = get_params(
47 TargetField::size_in_bits(),
48 BaseField::size_in_bits(),
49 optimization_type,
50 );
51
52 let mut base_repr: <TargetField as PrimeField>::BigInt = TargetField::one().into_repr();
53
54 base_repr.muln((params.bits_per_limb - 1) as u32);
57 let mut base: TargetField = TargetField::from_repr(base_repr).unwrap();
58 base = base + &base;
59
60 let mut result = TargetField::zero();
61 let mut power = TargetField::one();
62
63 for limb in limbs.iter().rev() {
64 let mut val = TargetField::zero();
65 let mut cur = TargetField::one();
66
67 for bit in limb.into_repr().to_bits_be().iter().rev() {
68 if *bit {
69 val += &cur;
70 }
71 cur.double_in_place();
72 }
73
74 result += &(val * power);
75 power *= &base;
76 }
77
78 result
79 }
80
81 pub fn value(&self) -> R1CSResult<TargetField> {
83 let mut limbs = Vec::new();
84 for limb in self.limbs.iter() {
85 limbs.push(limb.value()?);
86 }
87
88 Ok(Self::limbs_to_value(limbs, self.get_optimization_type()))
89 }
90
91 pub fn constant(cs: ConstraintSystemRef<BaseField>, value: TargetField) -> R1CSResult<Self> {
93 let optimization_type = match cs.optimization_goal() {
94 OptimizationGoal::None => OptimizationType::Constraints,
95 OptimizationGoal::Constraints => OptimizationType::Constraints,
96 OptimizationGoal::Weight => OptimizationType::Weight,
97 };
98
99 let limbs_value = Self::get_limbs_representations(&value, optimization_type)?;
100
101 let mut limbs = Vec::new();
102
103 for limb_value in limbs_value.iter() {
104 limbs.push(FpVar::<BaseField>::new_constant(
105 ns!(cs, "limb"),
106 limb_value,
107 )?);
108 }
109
110 Ok(Self {
111 cs,
112 limbs,
113 num_of_additions_over_normal_form: BaseField::zero(),
114 is_in_the_normal_form: true,
115 target_phantom: PhantomData,
116 })
117 }
118
119 pub fn one(cs: ConstraintSystemRef<BaseField>) -> R1CSResult<Self> {
121 Self::constant(cs, TargetField::one())
122 }
123
124 pub fn zero(cs: ConstraintSystemRef<BaseField>) -> R1CSResult<Self> {
126 Self::constant(cs, TargetField::zero())
127 }
128
129 #[tracing::instrument(target = "r1cs")]
131 pub fn add(&self, other: &Self) -> R1CSResult<Self> {
132 assert_eq!(self.get_optimization_type(), other.get_optimization_type());
133
134 let mut limbs = Vec::new();
135 for (this_limb, other_limb) in self.limbs.iter().zip(other.limbs.iter()) {
136 limbs.push(this_limb + other_limb);
137 }
138
139 let mut res = Self {
140 cs: self.cs(),
141 limbs,
142 num_of_additions_over_normal_form: self
143 .num_of_additions_over_normal_form
144 .add(&other.num_of_additions_over_normal_form)
145 .add(&BaseField::one()),
146 is_in_the_normal_form: false,
147 target_phantom: PhantomData,
148 };
149
150 Reducer::<TargetField, BaseField>::post_add_reduce(&mut res)?;
151 Ok(res)
152 }
153
154 #[tracing::instrument(target = "r1cs")]
156 pub fn add_constant(&self, other: &TargetField) -> R1CSResult<Self> {
157 let other_limbs = Self::get_limbs_representations(other, self.get_optimization_type())?;
158
159 let mut limbs = Vec::new();
160 for (this_limb, other_limb) in self.limbs.iter().zip(other_limbs.iter()) {
161 limbs.push(this_limb + *other_limb);
162 }
163
164 let mut res = Self {
165 cs: self.cs(),
166 limbs,
167 num_of_additions_over_normal_form: self
168 .num_of_additions_over_normal_form
169 .add(&BaseField::one()),
170 is_in_the_normal_form: false,
171 target_phantom: PhantomData,
172 };
173
174 Reducer::<TargetField, BaseField>::post_add_reduce(&mut res)?;
175
176 Ok(res)
177 }
178
179 #[tracing::instrument(target = "r1cs")]
181 pub fn sub_without_reduce(&self, other: &Self) -> R1CSResult<Self> {
182 assert_eq!(self.get_optimization_type(), other.get_optimization_type());
183
184 let params = get_params(
185 TargetField::size_in_bits(),
186 BaseField::size_in_bits(),
187 self.get_optimization_type(),
188 );
189
190 let mut surfeit = overhead!(other.num_of_additions_over_normal_form + BaseField::one()) + 1;
192 let mut other = other.clone();
193 if (surfeit + params.bits_per_limb > BaseField::size_in_bits() - 1)
194 || (surfeit
195 + (TargetField::size_in_bits() - params.bits_per_limb * (params.num_limbs - 1))
196 > BaseField::size_in_bits() - 1)
197 {
198 Reducer::reduce(&mut other)?;
199 surfeit = overhead!(other.num_of_additions_over_normal_form + BaseField::one()) + 1;
200 }
201
202 let mut pad_non_top_limb_repr: <BaseField as PrimeField>::BigInt =
204 BaseField::one().into_repr();
205 let mut pad_top_limb_repr: <BaseField as PrimeField>::BigInt = pad_non_top_limb_repr;
206
207 pad_non_top_limb_repr.muln((surfeit + params.bits_per_limb) as u32);
208 let pad_non_top_limb = BaseField::from_repr(pad_non_top_limb_repr).unwrap();
209
210 pad_top_limb_repr.muln(
211 (surfeit
212 + (TargetField::size_in_bits() - params.bits_per_limb * (params.num_limbs - 1)))
213 as u32,
214 );
215 let pad_top_limb = BaseField::from_repr(pad_top_limb_repr).unwrap();
216
217 let mut pad_limbs = Vec::new();
218 pad_limbs.push(pad_top_limb);
219 for _ in 0..self.limbs.len() - 1 {
220 pad_limbs.push(pad_non_top_limb);
221 }
222
223 let pad_to_kp_gap = Self::limbs_to_value(pad_limbs, self.get_optimization_type()).neg();
225 let pad_to_kp_limbs =
226 Self::get_limbs_representations(&pad_to_kp_gap, self.get_optimization_type())?;
227
228 let mut limbs = Vec::new();
230 for (i, ((this_limb, other_limb), pad_to_kp_limb)) in self
231 .limbs
232 .iter()
233 .zip(other.limbs.iter())
234 .zip(pad_to_kp_limbs.iter())
235 .enumerate()
236 {
237 if i != 0 {
238 limbs.push(this_limb + pad_non_top_limb + *pad_to_kp_limb - other_limb);
239 } else {
240 limbs.push(this_limb + pad_top_limb + *pad_to_kp_limb - other_limb);
241 }
242 }
243
244 let result = AllocatedNonNativeFieldVar::<TargetField, BaseField> {
245 cs: self.cs(),
246 limbs,
247 num_of_additions_over_normal_form: self.num_of_additions_over_normal_form
248 + (other.num_of_additions_over_normal_form + BaseField::one())
249 + (other.num_of_additions_over_normal_form + BaseField::one()),
250 is_in_the_normal_form: false,
251 target_phantom: PhantomData,
252 };
253
254 Ok(result)
255 }
256
257 #[tracing::instrument(target = "r1cs")]
259 pub fn sub(&self, other: &Self) -> R1CSResult<Self> {
260 assert_eq!(self.get_optimization_type(), other.get_optimization_type());
261
262 let mut result = self.sub_without_reduce(other)?;
263 Reducer::<TargetField, BaseField>::post_add_reduce(&mut result)?;
264 Ok(result)
265 }
266
267 #[tracing::instrument(target = "r1cs")]
269 pub fn sub_constant(&self, other: &TargetField) -> R1CSResult<Self> {
270 self.sub(&Self::constant(self.cs(), *other)?)
271 }
272
273 #[tracing::instrument(target = "r1cs")]
275 pub fn mul(&self, other: &Self) -> R1CSResult<Self> {
276 assert_eq!(self.get_optimization_type(), other.get_optimization_type());
277
278 self.mul_without_reduce(&other)?.reduce()
279 }
280
281 pub fn mul_constant(&self, other: &TargetField) -> R1CSResult<Self> {
283 self.mul(&Self::constant(self.cs(), *other)?)
284 }
285
286 #[tracing::instrument(target = "r1cs")]
288 pub fn negate(&self) -> R1CSResult<Self> {
289 Self::zero(self.cs())?.sub(self)
290 }
291
292 #[tracing::instrument(target = "r1cs")]
294 pub fn inverse(&self) -> R1CSResult<Self> {
295 let inverse = Self::new_witness(self.cs(), || {
296 Ok(self.value()?.inverse().unwrap_or_else(TargetField::zero))
297 })?;
298
299 let actual_result = self.clone().mul(&inverse)?;
300 actual_result.conditional_enforce_equal(&Self::one(self.cs())?, &Boolean::TRUE)?;
301 Ok(inverse)
302 }
303
304 pub fn get_limbs_representations(
307 elem: &TargetField,
308 optimization_type: OptimizationType,
309 ) -> R1CSResult<Vec<BaseField>> {
310 Self::get_limbs_representations_from_big_integer(&elem.into_repr(), optimization_type)
311 }
312
313 pub fn get_limbs_representations_from_big_integer(
315 elem: &<TargetField as PrimeField>::BigInt,
316 optimization_type: OptimizationType,
317 ) -> R1CSResult<Vec<BaseField>> {
318 let params = get_params(
319 TargetField::size_in_bits(),
320 BaseField::size_in_bits(),
321 optimization_type,
322 );
323
324 let mut limbs: Vec<BaseField> = Vec::new();
326 let mut cur = *elem;
327 for _ in 0..params.num_limbs {
328 let cur_bits = cur.to_bits_be(); let cur_mod_r = <BaseField as PrimeField>::BigInt::from_bits_be(
330 &cur_bits[cur_bits.len() - params.bits_per_limb..],
331 ); limbs.push(BaseField::from_repr(cur_mod_r).unwrap());
333 cur.divn(params.bits_per_limb as u32);
334 }
335
336 limbs.reverse();
338
339 Ok(limbs)
340 }
341
342 #[tracing::instrument(target = "r1cs")]
345 pub fn mul_without_reduce(
346 &self,
347 other: &Self,
348 ) -> R1CSResult<AllocatedNonNativeFieldMulResultVar<TargetField, BaseField>> {
349 assert_eq!(self.get_optimization_type(), other.get_optimization_type());
350
351 let params = get_params(
352 TargetField::size_in_bits(),
353 BaseField::size_in_bits(),
354 self.get_optimization_type(),
355 );
356
357 let mut self_reduced = self.clone();
359 let mut other_reduced = other.clone();
360 Reducer::<TargetField, BaseField>::pre_mul_reduce(&mut self_reduced, &mut other_reduced)?;
361
362 let mut prod_limbs = Vec::new();
363 if self.get_optimization_type() == OptimizationType::Weight {
364 let zero = FpVar::<BaseField>::zero();
365
366 for _ in 0..2 * params.num_limbs - 1 {
367 prod_limbs.push(zero.clone());
368 }
369
370 for i in 0..params.num_limbs {
371 for j in 0..params.num_limbs {
372 prod_limbs[i + j] =
373 &prod_limbs[i + j] + (&self_reduced.limbs[i] * &other_reduced.limbs[j]);
374 }
375 }
376 } else {
377 let cs = self.cs().or(other.cs());
378
379 for z_index in 0..2 * params.num_limbs - 1 {
380 prod_limbs.push(FpVar::new_witness(ns!(cs, "limb product"), || {
381 let mut z_i = BaseField::zero();
382 for i in 0..=min(params.num_limbs - 1, z_index) {
383 let j = z_index - i;
384 if j < params.num_limbs {
385 z_i += &self_reduced.limbs[i]
386 .value()?
387 .mul(&other_reduced.limbs[j].value()?);
388 }
389 }
390
391 Ok(z_i)
392 })?);
393 }
394
395 for c in 0..(2 * params.num_limbs - 1) {
396 let c_pows: Vec<_> = (0..(2 * params.num_limbs - 1))
397 .map(|i| BaseField::from((c + 1) as u128).pow(&vec![i as u64]))
398 .collect();
399
400 let x = self_reduced
401 .limbs
402 .iter()
403 .zip(c_pows.iter())
404 .map(|(var, c_pow)| var * *c_pow)
405 .fold(FpVar::zero(), |sum, i| sum + i);
406
407 let y = other_reduced
408 .limbs
409 .iter()
410 .zip(c_pows.iter())
411 .map(|(var, c_pow)| var * *c_pow)
412 .fold(FpVar::zero(), |sum, i| sum + i);
413
414 let z = prod_limbs
415 .iter()
416 .zip(c_pows.iter())
417 .map(|(var, c_pow)| var * *c_pow)
418 .fold(FpVar::zero(), |sum, i| sum + i);
419
420 z.enforce_equal(&(x * y))?;
421 }
422 }
423
424 Ok(AllocatedNonNativeFieldMulResultVar {
425 cs: self.cs(),
426 limbs: prod_limbs,
427 prod_of_num_of_additions: (self_reduced.num_of_additions_over_normal_form
428 + BaseField::one())
429 * (other_reduced.num_of_additions_over_normal_form + BaseField::one()),
430 target_phantom: PhantomData,
431 })
432 }
433
434 pub(crate) fn frobenius_map(&self, _power: usize) -> R1CSResult<Self> {
435 Ok(self.clone())
436 }
437
438 pub(crate) fn conditional_enforce_equal(
439 &self,
440 other: &Self,
441 should_enforce: &Boolean<BaseField>,
442 ) -> R1CSResult<()> {
443 assert_eq!(self.get_optimization_type(), other.get_optimization_type());
444
445 let params = get_params(
446 TargetField::size_in_bits(),
447 BaseField::size_in_bits(),
448 self.get_optimization_type(),
449 );
450
451 let p_representations =
453 AllocatedNonNativeFieldVar::<TargetField, BaseField>::get_limbs_representations_from_big_integer(
454 &<TargetField as PrimeField>::Params::MODULUS,
455 self.get_optimization_type()
456 )?;
457 let p_bigint = limbs_to_bigint(params.bits_per_limb, &p_representations);
458
459 let mut p_gadget_limbs = Vec::new();
460 for limb in p_representations.iter() {
461 p_gadget_limbs.push(FpVar::<BaseField>::Constant(*limb));
462 }
463 let p_gadget = AllocatedNonNativeFieldVar::<TargetField, BaseField> {
464 cs: self.cs(),
465 limbs: p_gadget_limbs,
466 num_of_additions_over_normal_form: BaseField::one(),
467 is_in_the_normal_form: false,
468 target_phantom: PhantomData,
469 };
470
471 let cs = self.cs().or(other.cs()).or(should_enforce.cs());
473 let mut delta = self.sub_without_reduce(other)?;
474 delta = should_enforce.select(&delta, &Self::zero(cs.clone())?)?;
475
476 let k_gadget = FpVar::<BaseField>::new_witness(ns!(cs, "k"), || {
478 let mut delta_limbs_values = Vec::<BaseField>::new();
479 for limb in delta.limbs.iter() {
480 delta_limbs_values.push(limb.value()?);
481 }
482
483 let delta_bigint = limbs_to_bigint(params.bits_per_limb, &delta_limbs_values);
484
485 Ok(bigint_to_basefield::<BaseField>(&(delta_bigint / p_bigint)))
486 })?;
487
488 let surfeit = overhead!(delta.num_of_additions_over_normal_form + BaseField::one()) + 1;
489 Reducer::<TargetField, BaseField>::limb_to_bits(&k_gadget, surfeit)?;
490
491 let mut kp_gadget_limbs = Vec::new();
493 for limb in p_gadget.limbs.iter() {
494 kp_gadget_limbs.push(limb * &k_gadget);
495 }
496
497 Reducer::<TargetField, BaseField>::group_and_check_equality(
499 surfeit,
500 params.bits_per_limb,
501 params.bits_per_limb,
502 &delta.limbs,
503 &kp_gadget_limbs,
504 )?;
505
506 Ok(())
507 }
508
509 #[tracing::instrument(target = "r1cs")]
510 pub(crate) fn conditional_enforce_not_equal(
511 &self,
512 other: &Self,
513 should_enforce: &Boolean<BaseField>,
514 ) -> R1CSResult<()> {
515 assert_eq!(self.get_optimization_type(), other.get_optimization_type());
516
517 let cs = self.cs().or(other.cs()).or(should_enforce.cs());
518
519 let _ = should_enforce
520 .select(&self.sub(other)?, &Self::one(cs)?)?
521 .inverse()?;
522
523 Ok(())
524 }
525
526 pub(crate) fn get_optimization_type(&self) -> OptimizationType {
527 match self.cs().optimization_goal() {
528 OptimizationGoal::None => OptimizationType::Constraints,
529 OptimizationGoal::Constraints => OptimizationType::Constraints,
530 OptimizationGoal::Weight => OptimizationType::Weight,
531 }
532 }
533}
534
535impl<TargetField: PrimeField, BaseField: PrimeField> ToBitsGadget<BaseField>
536 for AllocatedNonNativeFieldVar<TargetField, BaseField>
537{
538 #[tracing::instrument(target = "r1cs")]
539 fn to_bits_le(&self) -> R1CSResult<Vec<Boolean<BaseField>>> {
540 let params = get_params(
541 TargetField::size_in_bits(),
542 BaseField::size_in_bits(),
543 self.get_optimization_type(),
544 );
545
546 let mut self_normal = self.clone();
549 Reducer::<TargetField, BaseField>::pre_eq_reduce(&mut self_normal)?;
550
551 let mut bits = Vec::<Boolean<BaseField>>::new();
553 for limb in self_normal.limbs.iter() {
554 bits.extend_from_slice(&Reducer::<TargetField, BaseField>::limb_to_bits(
555 &limb,
556 params.bits_per_limb,
557 )?);
558 }
559 bits.reverse();
560
561 let mut b = TargetField::characteristic().to_vec();
562 assert_eq!(b[0] % 2, 1);
563 b[0] -= 1; let run = Boolean::<BaseField>::enforce_smaller_or_equal_than_le(&bits, b)?;
565
566 assert!(run.is_empty());
570
571 Ok(bits)
572 }
573}
574
575impl<TargetField: PrimeField, BaseField: PrimeField> ToBytesGadget<BaseField>
576 for AllocatedNonNativeFieldVar<TargetField, BaseField>
577{
578 #[tracing::instrument(target = "r1cs")]
579 fn to_bytes(&self) -> R1CSResult<Vec<UInt8<BaseField>>> {
580 let mut bits = self.to_bits_le()?;
581
582 let num_bits = TargetField::BigInt::NUM_LIMBS * 64;
583 assert!(bits.len() <= num_bits);
584 bits.resize_with(num_bits, || Boolean::constant(false));
585
586 let bytes = bits.chunks(8).map(UInt8::from_bits_le).collect();
587 Ok(bytes)
588 }
589}
590
591impl<TargetField: PrimeField, BaseField: PrimeField> CondSelectGadget<BaseField>
592 for AllocatedNonNativeFieldVar<TargetField, BaseField>
593{
594 #[tracing::instrument(target = "r1cs")]
595 fn conditionally_select(
596 cond: &Boolean<BaseField>,
597 true_value: &Self,
598 false_value: &Self,
599 ) -> R1CSResult<Self> {
600 assert_eq!(
601 true_value.get_optimization_type(),
602 false_value.get_optimization_type()
603 );
604
605 let mut limbs_sel = Vec::with_capacity(true_value.limbs.len());
606
607 for (x, y) in true_value.limbs.iter().zip(&false_value.limbs) {
608 limbs_sel.push(FpVar::<BaseField>::conditionally_select(cond, x, y)?);
609 }
610
611 Ok(Self {
612 cs: true_value.cs().or(false_value.cs()),
613 limbs: limbs_sel,
614 num_of_additions_over_normal_form: max(
615 true_value.num_of_additions_over_normal_form,
616 false_value.num_of_additions_over_normal_form,
617 ),
618 is_in_the_normal_form: true_value.is_in_the_normal_form
619 && false_value.is_in_the_normal_form,
620 target_phantom: PhantomData,
621 })
622 }
623}
624
625impl<TargetField: PrimeField, BaseField: PrimeField> TwoBitLookupGadget<BaseField>
626 for AllocatedNonNativeFieldVar<TargetField, BaseField>
627{
628 type TableConstant = TargetField;
629
630 #[tracing::instrument(target = "r1cs")]
631 fn two_bit_lookup(
632 bits: &[Boolean<BaseField>],
633 constants: &[Self::TableConstant],
634 ) -> R1CSResult<Self> {
635 debug_assert!(bits.len() == 2);
636 debug_assert!(constants.len() == 4);
637
638 let cs = bits.cs();
639
640 let optimization_type = match cs.optimization_goal() {
641 OptimizationGoal::None => OptimizationType::Constraints,
642 OptimizationGoal::Constraints => OptimizationType::Constraints,
643 OptimizationGoal::Weight => OptimizationType::Weight,
644 };
645
646 let params = get_params(
647 TargetField::size_in_bits(),
648 BaseField::size_in_bits(),
649 optimization_type,
650 );
651 let mut limbs_constants = Vec::new();
652 for _ in 0..params.num_limbs {
653 limbs_constants.push(Vec::new());
654 }
655
656 for constant in constants.iter() {
657 let representations =
658 AllocatedNonNativeFieldVar::<TargetField, BaseField>::get_limbs_representations(
659 constant,
660 optimization_type,
661 )?;
662
663 for (i, representation) in representations.iter().enumerate() {
664 limbs_constants[i].push(*representation);
665 }
666 }
667
668 let mut limbs = Vec::new();
669 for limbs_constant in limbs_constants.iter() {
670 limbs.push(FpVar::<BaseField>::two_bit_lookup(bits, limbs_constant)?);
671 }
672
673 Ok(AllocatedNonNativeFieldVar::<TargetField, BaseField> {
674 cs,
675 limbs,
676 num_of_additions_over_normal_form: BaseField::zero(),
677 is_in_the_normal_form: true,
678 target_phantom: PhantomData,
679 })
680 }
681}
682
683impl<TargetField: PrimeField, BaseField: PrimeField> ThreeBitCondNegLookupGadget<BaseField>
684 for AllocatedNonNativeFieldVar<TargetField, BaseField>
685{
686 type TableConstant = TargetField;
687
688 #[tracing::instrument(target = "r1cs")]
689 fn three_bit_cond_neg_lookup(
690 bits: &[Boolean<BaseField>],
691 b0b1: &Boolean<BaseField>,
692 constants: &[Self::TableConstant],
693 ) -> R1CSResult<Self> {
694 debug_assert!(bits.len() == 3);
695 debug_assert!(constants.len() == 4);
696
697 let cs = bits.cs().or(b0b1.cs());
698
699 let optimization_type = match cs.optimization_goal() {
700 OptimizationGoal::None => OptimizationType::Constraints,
701 OptimizationGoal::Constraints => OptimizationType::Constraints,
702 OptimizationGoal::Weight => OptimizationType::Weight,
703 };
704
705 let params = get_params(
706 TargetField::size_in_bits(),
707 BaseField::size_in_bits(),
708 optimization_type,
709 );
710
711 let mut limbs_constants = Vec::new();
712 for _ in 0..params.num_limbs {
713 limbs_constants.push(Vec::new());
714 }
715
716 for constant in constants.iter() {
717 let representations =
718 AllocatedNonNativeFieldVar::<TargetField, BaseField>::get_limbs_representations(
719 constant,
720 optimization_type,
721 )?;
722
723 for (i, representation) in representations.iter().enumerate() {
724 limbs_constants[i].push(*representation);
725 }
726 }
727
728 let mut limbs = Vec::new();
729 for limbs_constant in limbs_constants.iter() {
730 limbs.push(FpVar::<BaseField>::three_bit_cond_neg_lookup(
731 bits,
732 b0b1,
733 limbs_constant,
734 )?);
735 }
736
737 Ok(AllocatedNonNativeFieldVar::<TargetField, BaseField> {
738 cs,
739 limbs,
740 num_of_additions_over_normal_form: BaseField::zero(),
741 is_in_the_normal_form: true,
742 target_phantom: PhantomData,
743 })
744 }
745}
746
747impl<TargetField: PrimeField, BaseField: PrimeField> AllocVar<TargetField, BaseField>
748 for AllocatedNonNativeFieldVar<TargetField, BaseField>
749{
750 fn new_variable<T: Borrow<TargetField>>(
751 cs: impl Into<Namespace<BaseField>>,
752 f: impl FnOnce() -> Result<T, SynthesisError>,
753 mode: AllocationMode,
754 ) -> R1CSResult<Self> {
755 let ns = cs.into();
756 let cs = ns.cs();
757
758 let optimization_type = match cs.optimization_goal() {
759 OptimizationGoal::None => OptimizationType::Constraints,
760 OptimizationGoal::Constraints => OptimizationType::Constraints,
761 OptimizationGoal::Weight => OptimizationType::Weight,
762 };
763
764 let params = get_params(
765 TargetField::size_in_bits(),
766 BaseField::size_in_bits(),
767 optimization_type,
768 );
769 let zero = TargetField::zero();
770
771 let elem = match f() {
772 Ok(t) => *(t.borrow()),
773 Err(_) => zero,
774 };
775 let elem_representations = Self::get_limbs_representations(&elem, optimization_type)?;
776 let mut limbs = Vec::new();
777
778 for limb in elem_representations.iter() {
779 limbs.push(FpVar::<BaseField>::new_variable(
780 ark_relations::ns!(cs, "alloc"),
781 || Ok(limb),
782 mode,
783 )?);
784 }
785
786 let num_of_additions_over_normal_form = if mode != AllocationMode::Witness {
787 BaseField::zero()
788 } else {
789 BaseField::one()
790 };
791
792 if mode == AllocationMode::Witness {
793 for limb in limbs.iter().rev().take(params.num_limbs - 1) {
794 Reducer::<TargetField, BaseField>::limb_to_bits(limb, params.bits_per_limb)?;
795 }
796
797 Reducer::<TargetField, BaseField>::limb_to_bits(
798 &limbs[0],
799 TargetField::size_in_bits() - (params.num_limbs - 1) * params.bits_per_limb,
800 )?;
801 }
802
803 Ok(Self {
804 cs,
805 limbs,
806 num_of_additions_over_normal_form,
807 is_in_the_normal_form: mode != AllocationMode::Witness,
808 target_phantom: PhantomData,
809 })
810 }
811}
812
813impl<TargetField: PrimeField, BaseField: PrimeField> ToConstraintFieldGadget<BaseField>
814 for AllocatedNonNativeFieldVar<TargetField, BaseField>
815{
816 fn to_constraint_field(&self) -> R1CSResult<Vec<FpVar<BaseField>>> {
817 let bits = self.to_bits_le()?;
820
821 let params = get_params(
823 TargetField::size_in_bits(),
824 BaseField::size_in_bits(),
825 OptimizationType::Weight,
826 );
827
828 let mut limbs = bits
830 .chunks(params.bits_per_limb)
831 .map(|chunk| {
832 let mut limb = FpVar::<BaseField>::zero();
833 let mut w = BaseField::one();
834 for b in chunk.iter() {
835 limb += FpVar::from(b.clone()) * w;
836 w.double_in_place();
837 }
838 limb
839 })
840 .collect::<Vec<FpVar<BaseField>>>();
841
842 limbs.reverse();
843
844 Ok(limbs)
846 }
847}
848
849impl<TargetField: PrimeField, BaseField: PrimeField> Clone
854 for AllocatedNonNativeFieldVar<TargetField, BaseField>
855{
856 fn clone(&self) -> Self {
857 AllocatedNonNativeFieldVar {
858 cs: self.cs(),
859 limbs: self.limbs.clone(),
860 num_of_additions_over_normal_form: self.num_of_additions_over_normal_form,
861 is_in_the_normal_form: self.is_in_the_normal_form,
862 target_phantom: PhantomData,
863 }
864 }
865}