use crate::{
hyrax::utils::{flat_to_matrix_column_major, tensor_prime},
utils::{inner_product, scalar_by_vector, vector_sum, Matrix},
Error, LabeledCommitment, LabeledPolynomial, PolynomialCommitment,
};
use ark_crypto_primitives::sponge::{Absorb, CryptographicSponge};
use ark_ec::{AffineRepr, CurveGroup, VariableBaseMSM};
use ark_ff::PrimeField;
use ark_poly::MultilinearExtension;
use ark_serialize::serialize_to_vec;
use ark_std::{marker::PhantomData, rand::RngCore, string::ToString, vec::Vec, UniformRand};
use blake2::Blake2s256;
use digest::Digest;
#[cfg(feature = "parallel")]
use rayon::prelude::*;
mod data_structures;
pub use data_structures::*;
#[cfg(test)]
mod tests;
mod utils;
pub const PROTOCOL_NAME: &'static [u8] = b"Hyrax protocol";
pub struct HyraxPC<
G: AffineRepr,
P: MultilinearExtension<G::ScalarField>,
> {
_phantom: PhantomData<(G, P)>,
}
impl<G, P> HyraxPC<G, P>
where
G: AffineRepr,
P: MultilinearExtension<G::ScalarField>,
{
fn pedersen_commit(key: &[G], scalars: &[G::ScalarField]) -> G::Group {
assert_eq!(key.len(), scalars.len());
let scalars_bigint = ark_std::cfg_iter!(scalars)
.map(|s| s.into_bigint())
.collect::<Vec<_>>();
<G::Group as VariableBaseMSM>::msm_bigint(&key, &scalars_bigint)
}
}
impl<G, P> PolynomialCommitment<G::ScalarField, P> for HyraxPC<G, P>
where
G: AffineRepr,
G::ScalarField: Absorb,
P: MultilinearExtension<G::ScalarField>,
{
type UniversalParams = HyraxUniversalParams<G>;
type CommitterKey = HyraxCommitterKey<G>;
type VerifierKey = HyraxVerifierKey<G>;
type Commitment = HyraxCommitment<G>;
type CommitmentState = HyraxCommitmentState<G::ScalarField>;
type Proof = Vec<HyraxProof<G>>;
type BatchProof = Vec<Self::Proof>;
type Error = Error;
fn setup<R: RngCore>(
_max_degree: usize,
num_vars: Option<usize>,
_rng: &mut R,
) -> Result<Self::UniversalParams, Self::Error> {
if num_vars.is_none() {
return Err(Error::InvalidNumberOfVariables);
}
let n = num_vars.unwrap();
if n % 2 == 1 {
return Err(Error::InvalidNumberOfVariables);
}
let dim = 1 << n / 2;
let points: Vec<_> = ark_std::cfg_into_iter!(0u64..dim + 1)
.map(|i| {
let hash =
Blake2s256::digest([PROTOCOL_NAME, &i.to_le_bytes()].concat().as_slice());
let mut p = G::from_random_bytes(&hash);
let mut j = 0u64;
while p.is_none() {
let mut bytes = PROTOCOL_NAME.to_vec();
bytes.extend(i.to_le_bytes());
bytes.extend(j.to_le_bytes());
let hash = Blake2s256::digest(bytes.as_slice());
p = G::from_random_bytes(&hash);
j += 1;
}
let point = p.unwrap();
point.mul_by_cofactor_to_group()
})
.collect();
let mut points = G::Group::normalize_batch(&points);
let h: G = points.pop().unwrap();
Ok(HyraxUniversalParams { com_key: points, h })
}
fn trim(
pp: &Self::UniversalParams,
_supported_degree: usize,
_supported_hiding_bound: usize,
_enforced_degree_bounds: Option<&[usize]>,
) -> Result<(Self::CommitterKey, Self::VerifierKey), Self::Error> {
Ok((pp.clone(), pp.clone()))
}
#[allow(unused_variables)]
fn commit<'a>(
ck: &Self::CommitterKey,
polynomials: impl IntoIterator<Item = &'a LabeledPolynomial<G::ScalarField, P>>,
rng: Option<&mut dyn RngCore>,
) -> Result<
(
Vec<LabeledCommitment<Self::Commitment>>,
Vec<Self::CommitmentState>,
),
Self::Error,
>
where
P: 'a,
{
let mut coms = Vec::new();
let mut states = Vec::new();
#[cfg(not(feature = "parallel"))]
let rng_inner = rng.expect("Committing to polynomials requires a random generator");
for l_poly in polynomials {
let label = l_poly.label();
let poly = l_poly.polynomial();
let n = poly.num_vars();
let dim = 1 << n / 2;
if n % 2 == 1 {
return Err(Error::InvalidNumberOfVariables);
}
if n > ck.com_key.len() {
return Err(Error::InvalidNumberOfVariables);
}
let m = flat_to_matrix_column_major(&poly.to_evaluations(), dim, dim);
let (row_coms, com_rands): (Vec<_>, Vec<_>) = cfg_iter!(m)
.map(|row| {
#[cfg(not(feature = "parallel"))]
let r = G::ScalarField::rand(rng_inner);
#[cfg(feature = "parallel")]
let r = G::ScalarField::rand(&mut rand::thread_rng());
let c = (Self::pedersen_commit(&ck.com_key, row) + ck.h * r).into();
(c, r)
})
.unzip();
let com = HyraxCommitment { row_coms };
let l_comm = LabeledCommitment::new(label.to_string(), com, Some(1));
coms.push(l_comm);
states.push(HyraxCommitmentState {
randomness: com_rands,
mat: Matrix::new_from_rows(m),
});
}
Ok((coms, states))
}
fn open<'a>(
ck: &Self::CommitterKey,
labeled_polynomials: impl IntoIterator<Item = &'a LabeledPolynomial<G::ScalarField, P>>,
commitments: impl IntoIterator<Item = &'a LabeledCommitment<Self::Commitment>>,
point: &'a P::Point,
sponge: &mut impl CryptographicSponge,
states: impl IntoIterator<Item = &'a Self::CommitmentState>,
rng: Option<&mut dyn RngCore>,
) -> Result<Self::Proof, Self::Error>
where
Self::Commitment: 'a,
Self::CommitmentState: 'a,
P: 'a,
{
let n = point.len();
if n % 2 == 1 {
return Err(Error::InvalidNumberOfVariables);
}
let dim = 1 << n / 2;
let point_rev: Vec<G::ScalarField> = point.iter().rev().cloned().collect();
let point_lower = &point_rev[n / 2..];
let point_upper = &point_rev[..n / 2];
let l = tensor_prime(point_lower);
let r = tensor_prime(point_upper);
let mut proofs = Vec::new();
let rng_inner = rng.expect("Opening polynomials requires randomness");
for (l_poly, (l_com, state)) in labeled_polynomials
.into_iter()
.zip(commitments.into_iter().zip(states.into_iter()))
{
let label = l_poly.label();
if label != l_com.label() {
return Err(Error::MismatchedLabels {
commitment_label: l_com.label().to_string(),
polynomial_label: label.to_string(),
});
}
let poly = l_poly.polynomial();
let com = l_com.commitment();
if poly.num_vars() != n {
return Err(Error::MismatchedNumVars {
poly_nv: poly.num_vars(),
point_nv: n,
});
}
sponge.absorb(&serialize_to_vec!(*ck).map_err(|_| Error::TranscriptError)?);
sponge.absorb(&serialize_to_vec!(com.row_coms).map_err(|_| Error::TranscriptError)?);
sponge.absorb(point);
let t = &state.mat;
let lt = t.row_mul(&l);
let r_lt = cfg_iter!(l)
.zip(&state.randomness)
.map(|(l, r)| *l * r)
.sum::<G::ScalarField>();
let eval = inner_product(<, &r);
let (com_eval, r_eval) = {
let r = G::ScalarField::rand(rng_inner);
((ck.com_key[0] * eval + ck.h * r).into(), r)
};
let d: Vec<G::ScalarField> =
(0..dim).map(|_| G::ScalarField::rand(rng_inner)).collect();
let b = inner_product(&r, &d);
let r_d = G::ScalarField::rand(rng_inner);
let com_d = (Self::pedersen_commit(&ck.com_key, &d) + ck.h * r_d).into();
let r_b = G::ScalarField::rand(rng_inner);
let com_b = (ck.com_key[0] * b + ck.h * r_b).into();
sponge.absorb(&serialize_to_vec!(com_eval).map_err(|_| Error::TranscriptError)?);
sponge.absorb(&serialize_to_vec!(com_d).map_err(|_| Error::TranscriptError)?);
sponge.absorb(&serialize_to_vec!(com_b).map_err(|_| Error::TranscriptError)?);
let c = sponge.squeeze_field_elements(1)[0];
let z = vector_sum(&d, &scalar_by_vector(c, <));
let z_d = c * r_lt + r_d;
let z_b = c * r_eval + r_b;
proofs.push(HyraxProof {
com_eval,
com_d,
com_b,
z,
z_d,
z_b,
});
}
Ok(proofs)
}
fn check<'a>(
vk: &Self::VerifierKey,
commitments: impl IntoIterator<Item = &'a LabeledCommitment<Self::Commitment>>,
point: &'a P::Point,
_values: impl IntoIterator<Item = G::ScalarField>,
proof: &Self::Proof,
sponge: &mut impl CryptographicSponge,
_rng: Option<&mut dyn RngCore>,
) -> Result<bool, Self::Error>
where
Self::Commitment: 'a,
{
let n = point.len();
if n % 2 == 1 {
return Err(Error::InvalidNumberOfVariables);
}
let point_rev: Vec<G::ScalarField> = point.iter().rev().cloned().collect();
let point_lower = &point_rev[n / 2..];
let point_upper = &point_rev[..n / 2];
let l = tensor_prime(point_lower);
let r = tensor_prime(point_upper);
for (com, h_proof) in commitments.into_iter().zip(proof.iter()) {
let row_coms = &com.commitment().row_coms;
let HyraxProof {
com_eval,
com_d,
com_b,
z,
z_d,
z_b,
} = h_proof;
if row_coms.len() != 1 << n / 2 {
return Err(Error::IncorrectCommitmentSize {
encountered: row_coms.len(),
expected: 1 << n / 2,
});
}
sponge.absorb(&serialize_to_vec!(*vk).map_err(|_| Error::TranscriptError)?);
sponge.absorb(&serialize_to_vec!(*row_coms).map_err(|_| Error::TranscriptError)?);
sponge.absorb(point);
sponge.absorb(&serialize_to_vec!(*com_eval).map_err(|_| Error::TranscriptError)?);
sponge.absorb(&serialize_to_vec!(*com_d).map_err(|_| Error::TranscriptError)?);
sponge.absorb(&serialize_to_vec!(*com_b).map_err(|_| Error::TranscriptError)?);
let c: G::ScalarField = sponge.squeeze_field_elements(1)[0];
let com_dp = (vk.com_key[0] * inner_product(&r, z) + vk.h * z_b).into();
if com_dp != (com_eval.mul(c) + com_b).into() {
return Ok(false);
}
let l_bigint = cfg_iter!(l)
.map(|chi| chi.into_bigint())
.collect::<Vec<_>>();
let t_prime: G = <G::Group as VariableBaseMSM>::msm_bigint(&row_coms, &l_bigint).into();
let com_z_zd = (Self::pedersen_commit(&vk.com_key, z) + vk.h * z_d).into();
if com_z_zd != (t_prime.mul(c) + com_d).into() {
return Ok(false);
}
}
Ok(true)
}
}