ark_poly_commit/hyrax/mod.rs
1use crate::{
2 hyrax::utils::{flat_to_matrix_column_major, tensor_prime},
3 utils::{inner_product, scalar_by_vector, vector_sum, Matrix},
4 Error, LabeledCommitment, LabeledPolynomial, PolynomialCommitment,
5};
6use ark_crypto_primitives::sponge::{Absorb, CryptographicSponge};
7use ark_ec::{AffineRepr, CurveGroup, VariableBaseMSM};
8use ark_ff::PrimeField;
9use ark_poly::MultilinearExtension;
10use ark_serialize::serialize_to_vec;
11use ark_std::{marker::PhantomData, rand::RngCore, string::ToString, vec::Vec, UniformRand};
12
13use blake2::Blake2s256;
14use digest::Digest;
15
16#[cfg(feature = "parallel")]
17use rayon::prelude::*;
18
19mod data_structures;
20pub use data_structures::*;
21#[cfg(test)]
22mod tests;
23mod utils;
24/// String of bytes used to seed the randomness during the setup function.
25/// Note that the latter should never be used in production environments.
26pub const PROTOCOL_NAME: &'static [u8] = b"Hyrax protocol";
27
28/// Hyrax polynomial committment scheme:
29/// A polynomial commitment scheme based on the hardness of the
30/// discrete logarithm problem in prime-order groups. This is a
31/// Fiat-Shamired version of the PCS described in the Hyrax paper
32/// [[WTsTW17]][hyrax].
33///
34/// [hyrax]: https://eprint.iacr.org/2017/1132.pdf
35///
36/// ### Future optimisations
37///
38/// - Add parallelisation. There is at least one natural place where
39/// parallelisation could bring performance gains: in essence, the prover
40/// commits to the polynomial by expressing it as an evaluation matrix and
41/// Pederson-multi-committing to each row. Each of this commitments can be
42/// computed independently from the rest, and therefore, in parallel. It is
43/// still to be seen how much of an improvement this would entail, since each
44/// Pederson multi-commitment boils down to a multi-exponentiation and this
45/// operation is itself parallelised.
46/// - Due to the homomorphic nature of Pedersen commitments, it is likely
47/// some of the following methods can be designed more efficiently than their
48/// default implementations: `batch_open`, `batch_check`,
49/// `open_combinations`, `check_combinations`. This is not discussed in the
50/// reference article, but the IPA and KZG modules might be a good starting
51/// point.
52/// - On a related note to the previous point, there might be a more
53/// efficient way to open several polynomials at a single point (this is the
54/// functionality of the `open` method) than the currently implemented
55/// technique, where only the computation of the vectors `L` and `R` is
56/// shared across polynomials.
57/// - The cited article proposes an optimisation in the section _Reducing the
58/// cost of proof-of-dot-prod_. It allows for non-square matrices (and hence
59/// removes the requirement for the number of variables to be even) and
60/// introduces a tradeoff between proof size and verifier time. It is
61/// probably worth pursuing.
62
63pub struct HyraxPC<
64 // The elliptic curve used for Pedersen commitments (only EC groups are
65 // supported as of now).
66 G: AffineRepr,
67 // A polynomial type representing multilinear polynomials
68 P: MultilinearExtension<G::ScalarField>,
69> {
70 _phantom: PhantomData<(G, P)>,
71}
72
73impl<G, P> HyraxPC<G, P>
74where
75 G: AffineRepr,
76 P: MultilinearExtension<G::ScalarField>,
77{
78 /// Pedersen commitment to a vector of scalars as described in appendix A.1
79 /// of the reference article.
80 /// The function does not add handle hiding term `h * r`.
81 /// It is only a wrapper around MSM.
82 ///
83 /// # Panics
84 ///
85 /// Panics if `key` and `scalars` do not have the same length
86 fn pedersen_commit(key: &[G], scalars: &[G::ScalarField]) -> G::Group {
87 assert_eq!(key.len(), scalars.len());
88 let scalars_bigint = ark_std::cfg_iter!(scalars)
89 .map(|s| s.into_bigint())
90 .collect::<Vec<_>>();
91 // Multi-exponentiation in the group of points of the EC
92 <G::Group as VariableBaseMSM>::msm_bigint(&key, &scalars_bigint)
93 }
94}
95
96impl<G, P> PolynomialCommitment<G::ScalarField, P> for HyraxPC<G, P>
97where
98 G: AffineRepr,
99 G::ScalarField: Absorb,
100 P: MultilinearExtension<G::ScalarField>,
101{
102 type UniversalParams = HyraxUniversalParams<G>;
103 type CommitterKey = HyraxCommitterKey<G>;
104 type VerifierKey = HyraxVerifierKey<G>;
105 type Commitment = HyraxCommitment<G>;
106 type CommitmentState = HyraxCommitmentState<G::ScalarField>;
107 type Proof = Vec<HyraxProof<G>>;
108 type BatchProof = Vec<Self::Proof>;
109 type Error = Error;
110
111 /// Outputs mock universal parameters for the Hyrax polynomial commitment
112 /// scheme. It does *not* return random keys across calls and should never
113 /// be used in settings where security is required - it is only useful for
114 /// testing.
115 ///
116 /// # Panics
117 ///
118 /// Panics if `num_vars` is None or contains an odd value.
119 fn setup<R: RngCore>(
120 _max_degree: usize,
121 num_vars: Option<usize>,
122 _rng: &mut R,
123 ) -> Result<Self::UniversalParams, Self::Error> {
124 if num_vars.is_none() {
125 return Err(Error::InvalidNumberOfVariables);
126 }
127
128 let n = num_vars.unwrap();
129
130 if n % 2 == 1 {
131 // Only polynomials with an even number of variables are
132 // supported in this implementation
133 return Err(Error::InvalidNumberOfVariables);
134 }
135
136 // Number of rows (or, equivalently, colums) of a square matrix
137 // containing the coefficients of an n-variate ML polynomial
138 let dim = 1 << n / 2;
139
140 // The following block of code is largely taking from the IPA module
141 // in this crate. It generates random points (not guaranteed to be
142 // generators, since the point at infinity should theoretically occur)
143 let points: Vec<_> = ark_std::cfg_into_iter!(0u64..dim + 1)
144 .map(|i| {
145 let hash =
146 Blake2s256::digest([PROTOCOL_NAME, &i.to_le_bytes()].concat().as_slice());
147 let mut p = G::from_random_bytes(&hash);
148 let mut j = 0u64;
149 while p.is_none() {
150 let mut bytes = PROTOCOL_NAME.to_vec();
151 bytes.extend(i.to_le_bytes());
152 bytes.extend(j.to_le_bytes());
153 let hash = Blake2s256::digest(bytes.as_slice());
154 p = G::from_random_bytes(&hash);
155 j += 1;
156 }
157 let point = p.unwrap();
158 point.mul_by_cofactor_to_group()
159 })
160 .collect();
161
162 // Converting from projective to affine representation
163 let mut points = G::Group::normalize_batch(&points);
164
165 let h: G = points.pop().unwrap();
166
167 Ok(HyraxUniversalParams { com_key: points, h })
168 }
169
170 /// Trims a key into a prover key and a verifier key. This should only
171 /// amount to discarding some of the points in said key if the prover
172 /// and verifier only wish to commit to polynomials with fewer variables
173 /// than the key can support. Since the number of variables is not
174 /// considered in the prototype, this function currently simply clones the
175 /// key.
176 fn trim(
177 pp: &Self::UniversalParams,
178 _supported_degree: usize,
179 _supported_hiding_bound: usize,
180 _enforced_degree_bounds: Option<&[usize]>,
181 ) -> Result<(Self::CommitterKey, Self::VerifierKey), Self::Error> {
182 Ok((pp.clone(), pp.clone()))
183 }
184
185 /// Produces a list of commitments to the passed polynomials. Cf. the
186 /// section "Square-root commitment scheme" from the reference article.
187 ///
188 /// # Panics
189 ///
190 /// Panics if `rng` is None, since Hyrax requires randomness in order to
191 /// commit to a polynomial
192 #[allow(unused_variables)]
193 fn commit<'a>(
194 ck: &Self::CommitterKey,
195 polynomials: impl IntoIterator<Item = &'a LabeledPolynomial<G::ScalarField, P>>,
196 rng: Option<&mut dyn RngCore>,
197 ) -> Result<
198 (
199 Vec<LabeledCommitment<Self::Commitment>>,
200 Vec<Self::CommitmentState>,
201 ),
202 Self::Error,
203 >
204 where
205 P: 'a,
206 {
207 let mut coms = Vec::new();
208 let mut states = Vec::new();
209
210 #[cfg(not(feature = "parallel"))]
211 let rng_inner = rng.expect("Committing to polynomials requires a random generator");
212
213 for l_poly in polynomials {
214 let label = l_poly.label();
215 let poly = l_poly.polynomial();
216
217 let n = poly.num_vars();
218 let dim = 1 << n / 2;
219
220 if n % 2 == 1 {
221 // Only polynomials with an even number of variables are
222 // supported in this implementation
223 return Err(Error::InvalidNumberOfVariables);
224 }
225
226 if n > ck.com_key.len() {
227 return Err(Error::InvalidNumberOfVariables);
228 }
229
230 let m = flat_to_matrix_column_major(&poly.to_evaluations(), dim, dim);
231
232 // Commiting to the matrix with one multi-commitment per row
233 let (row_coms, com_rands): (Vec<_>, Vec<_>) = cfg_iter!(m)
234 .map(|row| {
235 #[cfg(not(feature = "parallel"))]
236 let r = G::ScalarField::rand(rng_inner);
237 #[cfg(feature = "parallel")]
238 let r = G::ScalarField::rand(&mut rand::thread_rng());
239 let c = (Self::pedersen_commit(&ck.com_key, row) + ck.h * r).into();
240 (c, r)
241 })
242 .unzip();
243
244 let com = HyraxCommitment { row_coms };
245 let l_comm = LabeledCommitment::new(label.to_string(), com, Some(1));
246
247 coms.push(l_comm);
248 states.push(HyraxCommitmentState {
249 randomness: com_rands,
250 mat: Matrix::new_from_rows(m),
251 });
252 }
253
254 Ok((coms, states))
255 }
256
257 /// Opens a list of polynomial commitments at a desired point. This
258 /// requires the list of original polynomials (`labeled_polynomials`) as
259 /// well as the random values using by the Pedersen multi-commits during
260 /// the commitment phase (`randomness`). Cf. sections "Square-root
261 /// commitment scheme" and appendix A.2 from the reference article.
262 ///
263 /// # Panics
264 ///
265 /// Panics if
266 /// - `rng` is None, since Hyrax requires randomness in order to
267 /// open the commitment to a polynomial.
268 /// - The point doesn't have an even number of variables.
269 /// - The labels of a commitment doesn't match that of the corresponding
270 /// polynomial.
271 /// - The number of variables of a polynomial doesn't match that of the
272 /// point.
273 fn open<'a>(
274 ck: &Self::CommitterKey,
275 labeled_polynomials: impl IntoIterator<Item = &'a LabeledPolynomial<G::ScalarField, P>>,
276 commitments: impl IntoIterator<Item = &'a LabeledCommitment<Self::Commitment>>,
277 point: &'a P::Point,
278 sponge: &mut impl CryptographicSponge,
279 states: impl IntoIterator<Item = &'a Self::CommitmentState>,
280 rng: Option<&mut dyn RngCore>,
281 ) -> Result<Self::Proof, Self::Error>
282 where
283 Self::Commitment: 'a,
284 Self::CommitmentState: 'a,
285 P: 'a,
286 {
287 let n = point.len();
288
289 if n % 2 == 1 {
290 // Only polynomials with an even number of variables are
291 // supported in this implementation
292 return Err(Error::InvalidNumberOfVariables);
293 }
294
295 let dim = 1 << n / 2;
296
297 // Reversing the point is necessary because the MLE interface returns
298 // evaluations in little-endian order
299 let point_rev: Vec<G::ScalarField> = point.iter().rev().cloned().collect();
300
301 let point_lower = &point_rev[n / 2..];
302 let point_upper = &point_rev[..n / 2];
303
304 // Deriving the tensors which result in the evaluation of the polynomial
305 // when they are multiplied by the coefficient matrix.
306 let l = tensor_prime(point_lower);
307 let r = tensor_prime(point_upper);
308
309 let mut proofs = Vec::new();
310
311 let rng_inner = rng.expect("Opening polynomials requires randomness");
312
313 for (l_poly, (l_com, state)) in labeled_polynomials
314 .into_iter()
315 .zip(commitments.into_iter().zip(states.into_iter()))
316 {
317 let label = l_poly.label();
318 if label != l_com.label() {
319 return Err(Error::MismatchedLabels {
320 commitment_label: l_com.label().to_string(),
321 polynomial_label: label.to_string(),
322 });
323 }
324
325 let poly = l_poly.polynomial();
326 let com = l_com.commitment();
327
328 if poly.num_vars() != n {
329 return Err(Error::MismatchedNumVars {
330 poly_nv: poly.num_vars(),
331 point_nv: n,
332 });
333 }
334
335 // Absorbing public parameters
336 sponge.absorb(&serialize_to_vec!(*ck).map_err(|_| Error::TranscriptError)?);
337
338 // Absorbing the commitment to the polynomial
339 sponge.absorb(&serialize_to_vec!(com.row_coms).map_err(|_| Error::TranscriptError)?);
340
341 // Absorbing the point
342 sponge.absorb(point);
343
344 // Commiting to the matrix formed by the polynomial coefficients
345 let t = &state.mat;
346
347 let lt = t.row_mul(&l);
348
349 // t_prime coincides witht he Pedersen commitment to lt with the
350 // randomnes r_lt computed here
351 let r_lt = cfg_iter!(l)
352 .zip(&state.randomness)
353 .map(|(l, r)| *l * r)
354 .sum::<G::ScalarField>();
355
356 let eval = inner_product(<, &r);
357
358 // Singleton commit
359 let (com_eval, r_eval) = {
360 let r = G::ScalarField::rand(rng_inner);
361 ((ck.com_key[0] * eval + ck.h * r).into(), r)
362 };
363
364 // ******** Dot product argument ********
365 // Appendix A.2 in the reference article
366
367 let d: Vec<G::ScalarField> =
368 (0..dim).map(|_| G::ScalarField::rand(rng_inner)).collect();
369
370 let b = inner_product(&r, &d);
371
372 // Multi-commit
373 let r_d = G::ScalarField::rand(rng_inner);
374 let com_d = (Self::pedersen_commit(&ck.com_key, &d) + ck.h * r_d).into();
375
376 // Singleton commit
377 let r_b = G::ScalarField::rand(rng_inner);
378 let com_b = (ck.com_key[0] * b + ck.h * r_b).into();
379
380 // Absorbing the commitment to the evaluation
381 sponge.absorb(&serialize_to_vec!(com_eval).map_err(|_| Error::TranscriptError)?);
382
383 // Absorbing the two auxiliary commitments
384 sponge.absorb(&serialize_to_vec!(com_d).map_err(|_| Error::TranscriptError)?);
385 sponge.absorb(&serialize_to_vec!(com_b).map_err(|_| Error::TranscriptError)?);
386
387 // Receive the random challenge c from the verifier, i.e. squeeze
388 // it from the transcript.
389 let c = sponge.squeeze_field_elements(1)[0];
390
391 let z = vector_sum(&d, &scalar_by_vector(c, <));
392 let z_d = c * r_lt + r_d;
393 let z_b = c * r_eval + r_b;
394
395 proofs.push(HyraxProof {
396 com_eval,
397 com_d,
398 com_b,
399 z,
400 z_d,
401 z_b,
402 });
403 }
404
405 Ok(proofs)
406 }
407
408 /// Verifies a list of opening proofs and confirms the evaluation of the
409 /// committed polynomials at the desired point.
410 ///
411 /// # Panics
412 /// - If the point doesn't have an even number of variables.
413 /// - If the length of a commitment does not correspond to the length of the
414 /// point (specifically, commitment length should be 2^(point-length/2)).
415 ///
416 /// # Disregarded arguments
417 /// - `rng`
418 fn check<'a>(
419 vk: &Self::VerifierKey,
420 commitments: impl IntoIterator<Item = &'a LabeledCommitment<Self::Commitment>>,
421 point: &'a P::Point,
422 _values: impl IntoIterator<Item = G::ScalarField>,
423 proof: &Self::Proof,
424 sponge: &mut impl CryptographicSponge,
425 _rng: Option<&mut dyn RngCore>,
426 ) -> Result<bool, Self::Error>
427 where
428 Self::Commitment: 'a,
429 {
430 let n = point.len();
431
432 if n % 2 == 1 {
433 // Only polynomials with an even number of variables are
434 // supported in this implementation
435 return Err(Error::InvalidNumberOfVariables);
436 }
437
438 // Reversing the point is necessary because the MLE interface returns
439 // evaluations in little-endian order
440 let point_rev: Vec<G::ScalarField> = point.iter().rev().cloned().collect();
441
442 let point_lower = &point_rev[n / 2..];
443 let point_upper = &point_rev[..n / 2];
444
445 // Deriving the tensors which result in the evaluation of the polynomial
446 // when they are multiplied by the coefficient matrix.
447 let l = tensor_prime(point_lower);
448 let r = tensor_prime(point_upper);
449
450 for (com, h_proof) in commitments.into_iter().zip(proof.iter()) {
451 let row_coms = &com.commitment().row_coms;
452
453 // extract each field from h_proof
454 let HyraxProof {
455 com_eval,
456 com_d,
457 com_b,
458 z,
459 z_d,
460 z_b,
461 } = h_proof;
462
463 if row_coms.len() != 1 << n / 2 {
464 return Err(Error::IncorrectCommitmentSize {
465 encountered: row_coms.len(),
466 expected: 1 << n / 2,
467 });
468 }
469
470 // Absorbing public parameters
471 sponge.absorb(&serialize_to_vec!(*vk).map_err(|_| Error::TranscriptError)?);
472
473 // Absorbing the commitment to the polynomial
474 sponge.absorb(&serialize_to_vec!(*row_coms).map_err(|_| Error::TranscriptError)?);
475
476 // Absorbing the point
477 sponge.absorb(point);
478
479 // Absorbing the commitment to the evaluation
480 sponge.absorb(&serialize_to_vec!(*com_eval).map_err(|_| Error::TranscriptError)?);
481
482 // Absorbing the two auxiliary commitments
483 sponge.absorb(&serialize_to_vec!(*com_d).map_err(|_| Error::TranscriptError)?);
484 sponge.absorb(&serialize_to_vec!(*com_b).map_err(|_| Error::TranscriptError)?);
485
486 // Receive the random challenge c from the verifier, i.e. squeeze
487 // it from the transcript.
488 let c: G::ScalarField = sponge.squeeze_field_elements(1)[0];
489
490 // Second check from the paper (figure 6, equation (14))
491 // Moved here for potential early return
492 let com_dp = (vk.com_key[0] * inner_product(&r, z) + vk.h * z_b).into();
493 if com_dp != (com_eval.mul(c) + com_b).into() {
494 return Ok(false);
495 }
496
497 // Computing t_prime with a multi-exponentiation
498 let l_bigint = cfg_iter!(l)
499 .map(|chi| chi.into_bigint())
500 .collect::<Vec<_>>();
501 let t_prime: G = <G::Group as VariableBaseMSM>::msm_bigint(&row_coms, &l_bigint).into();
502
503 // First check from the paper (figure 6, equation (13))
504 let com_z_zd = (Self::pedersen_commit(&vk.com_key, z) + vk.h * z_d).into();
505 if com_z_zd != (t_prime.mul(c) + com_d).into() {
506 return Ok(false);
507 }
508 }
509
510 Ok(true)
511 }
512}