use crate::{
core::{
circuits::boolean::{boolean_value::BooleanValue, byte::Byte},
global_value::value::FieldValue,
},
utils::{
field::ScalarField,
zkp::{
pedersen::{PedersenCommitment, PedersenOpening},
range_proof::{
batched_range_proof::{BatchedRangeProofContext, BATCHED_RANGE_PROOF_CONTEXT_LEN},
RangeProof,
},
transcript::Transcript,
},
},
};
pub const RANGE_PROOF_U64_LEN: usize = 672;
pub const BATCHED_RANGE_PROOF_U64_DATA_LEN: usize =
BATCHED_RANGE_PROOF_CONTEXT_LEN + RANGE_PROOF_U64_LEN;
#[derive(Clone)]
pub struct BatchedRangeProofU64Data {
pub context: BatchedRangeProofContext,
pub proof: RangeProofU64,
}
#[derive(Clone)]
pub struct RangeProofU64(RangeProof);
impl RangeProofU64 {
pub fn new(
amounts: Vec<FieldValue<ScalarField>>,
bit_lengths: Vec<usize>,
openings: Vec<&PedersenOpening>,
transcript: &mut Transcript<BooleanValue>,
) -> Self {
let batched_bit_length: usize = bit_lengths.iter().sum();
let expected_bit_length = usize::try_from(u64::BITS).unwrap();
assert_eq!(batched_bit_length, expected_bit_length);
RangeProofU64(RangeProof::new(amounts, bit_lengths, openings, transcript))
}
#[allow(dead_code)]
fn inner(&self) -> &RangeProof {
&self.0
}
}
impl BatchedRangeProofU64Data {
pub fn new(
commitments: Vec<&PedersenCommitment>,
amounts: Vec<FieldValue<ScalarField>>,
bit_lengths: Vec<usize>,
openings: Vec<&PedersenOpening>,
) -> Self {
let batched_bit_length: usize = bit_lengths.iter().sum();
let expected_bit_length = usize::try_from(u64::BITS).unwrap();
assert_eq!(batched_bit_length, expected_bit_length);
let context =
BatchedRangeProofContext::new(&commitments, &amounts, &bit_lengths, &openings);
let mut transcript = context.new_transcript();
let proof = RangeProofU64::new(amounts, bit_lengths, openings, &mut transcript);
Self { context, proof }
}
pub fn to_bytes(&self) -> [Byte<BooleanValue>; BATCHED_RANGE_PROOF_U64_DATA_LEN] {
let mut bytes = [Byte::<BooleanValue>::from(0); BATCHED_RANGE_PROOF_U64_DATA_LEN];
bytes[..BATCHED_RANGE_PROOF_CONTEXT_LEN].copy_from_slice(&self.context.to_bytes());
bytes[BATCHED_RANGE_PROOF_CONTEXT_LEN..].copy_from_slice(&self.proof.0.to_bytes());
bytes
}
}
#[cfg(test)]
mod tests {
use crate::{
core::{
bounds::FieldBounds,
expressions::{domain::Domain, expr::EvalValue, field_expr::FieldExpr, InputKind},
global_value::{global_expr_store::with_local_expr_store_as_global, value::FieldValue},
ir_builder::{ExprStore, IRBuilder},
},
utils::{
field::ScalarField,
used_field::UsedField,
zkp::{
pedersen::{Pedersen, PedersenCommitment, PedersenOpening},
range_proof::batched_range_proof_u64::BatchedRangeProofU64Data,
},
},
};
use rand::{Rng, RngCore};
use rustc_hash::FxHashMap;
use zk_elgamal_proof::{
encryption::pedersen::{
Pedersen as SolanaPedersen,
PedersenCommitment as SolanaPedersenCommitment,
PedersenOpening as SolanaPedersenOpening,
},
zk_elgamal_proof_program::proof_data::{
BatchedRangeProofU64Data as SolanaBatchedRangeProofU64Data,
ZkProofData,
},
};
#[allow(non_snake_case)]
#[test]
fn test_range_proof() {
let rng = &mut crate::utils::test_rng::get();
let bit_lengths = vec![16, 8, 32, 8];
let mut amounts = bit_lengths
.iter()
.map(|bit_length| {
let rand = rng.next_u64();
if *bit_length < 64 {
rand % (1u64 << bit_length)
} else {
rand
}
})
.collect::<Vec<u64>>();
let is_valid_proof = rng.gen_bool(0.5);
amounts
.iter_mut()
.zip(bit_lengths.clone())
.for_each(|(amount, bit_length)| {
if !is_valid_proof && bit_length < 64 {
*amount += 1u64 << bit_length;
};
});
let (commitments, openings): (Vec<SolanaPedersenCommitment>, Vec<SolanaPedersenOpening>) =
amounts
.iter()
.map(|amount| SolanaPedersen::new(*amount))
.unzip();
let solana_proof_data = SolanaBatchedRangeProofU64Data::new(
commitments
.iter()
.collect::<Vec<&SolanaPedersenCommitment>>(),
amounts.clone(),
bit_lengths.clone(),
openings.iter().collect::<Vec<&SolanaPedersenOpening>>(),
)
.unwrap();
let solana_verification = solana_proof_data.verify_proof();
assert_eq!(solana_verification.is_ok(), is_valid_proof);
let mut expr_store = IRBuilder::new(true);
let mut input_vals = FxHashMap::<usize, EvalValue>::default();
for i in 0..amounts.len() {
let _ = expr_store.push_field(FieldExpr::Input(
i,
FieldBounds::new(
ScalarField::from(0),
ScalarField::power_of_two(bit_lengths[i]) - ScalarField::from(1),
)
.as_input_info(InputKind::Secret),
));
input_vals.insert(i, EvalValue::Scalar(ScalarField::from(amounts[i])));
}
let outputs = with_local_expr_store_as_global(
|| {
let amounts = (0..amounts.len())
.map(FieldValue::<ScalarField>::from_id)
.collect::<Vec<FieldValue<ScalarField>>>();
let (commitments, openings): (Vec<PedersenCommitment>, Vec<PedersenOpening>) =
amounts.iter().map(|amount| Pedersen::new(*amount)).unzip();
let arcis_proof_data = BatchedRangeProofU64Data::new(
commitments.iter().collect::<Vec<&PedersenCommitment>>(),
amounts,
bit_lengths.clone(),
openings.iter().collect::<Vec<&PedersenOpening>>(),
);
arcis_proof_data
.to_bytes()
.into_iter()
.map(|byte| FieldValue::<ScalarField>::from(byte).get_id())
.collect::<Vec<usize>>()
},
&mut expr_store,
);
let ir = expr_store.into_ir(outputs);
let result = ir
.eval_with_log(
rng,
&mut input_vals,
false,
false,
!is_valid_proof,
std::iter::empty(),
)
.map(|x| {
x.into_iter()
.map(ScalarField::unwrap)
.collect::<Vec<ScalarField>>()
})
.unwrap();
let arcis_proof_data_bytes = result
.iter()
.map(|byte| byte.to_le_bytes()[0])
.collect::<Vec<u8>>();
let arcis_proof_data =
*bytemuck::try_from_bytes::<SolanaBatchedRangeProofU64Data>(&arcis_proof_data_bytes)
.unwrap();
let arcis_verification = arcis_proof_data.verify_proof();
assert_eq!(arcis_verification.is_ok(), is_valid_proof);
}
}