use alloc::{vec, vec::Vec};
use core::marker::PhantomData;
use ark_ff::{BigInteger, Field, Fp, FpConfig, PrimeField, SmallFp, SmallFpConfig};
use crate::{
codecs::{Decoding, Encoding},
error::VerificationError,
io::NargDeserialize,
VerificationResult,
};
fn parse_canonical_prime_field<F: PrimeField>(bytes: &[u8]) -> Option<F> {
if bytes.len() > (F::MODULUS_BIT_SIZE as usize).div_ceil(8) {
return None;
}
let bits = bytes
.iter()
.flat_map(|byte| (0..8).rev().map(move |shift| (byte >> shift) & 1 == 1))
.collect::<Vec<_>>();
let bigint = F::BigInt::from_bits_be(&bits);
F::from_bigint(bigint)
}
impl<C: ark_ff::FpConfig<N>, const N: usize> crate::Unit for Fp<C, N> {
const ZERO: Self = C::ZERO;
}
impl<P: SmallFpConfig> crate::Unit for SmallFp<P> {
const ZERO: Self = P::ZERO;
}
pub struct DecodingFieldBuffer<F: Field> {
buf: Vec<u8>,
_phantom: PhantomData<F>,
}
pub fn decoding_field_buffer_size<F: Field>() -> usize {
let base_field_modulus_bytes = u64::from(F::BasePrimeField::MODULUS_BIT_SIZE.div_ceil(8));
let length = (base_field_modulus_bytes + 32) * F::extension_degree();
length as usize
}
macro_rules! impl_deserialize {
(impl [$($generics:tt)*] for $type:ty) => {
impl<$($generics)*> NargDeserialize for $type {
fn deserialize_from_narg(buf: &mut &[u8]) -> VerificationResult<Self> {
let extension_degree = <Self as Field>::extension_degree() as usize;
let base_field_size = (<Self as Field>::BasePrimeField::MODULUS_BIT_SIZE
.div_ceil(8)) as usize;
let total_bytes = extension_degree * base_field_size;
if buf.len() < total_bytes {
return Err(VerificationError);
}
let mut base_elems = Vec::with_capacity(extension_degree);
for chunk in buf[..total_bytes].chunks_exact(base_field_size) {
let elem =
parse_canonical_prime_field::<<Self as Field>::BasePrimeField>(chunk)
.ok_or(VerificationError)?;
base_elems.push(elem);
}
debug_assert_eq!(base_elems.len(), extension_degree);
let value = Self::from_base_prime_field_elems(base_elems).ok_or(VerificationError)?;
*buf = &buf[total_bytes..];
Ok(value)
}
}
};
}
macro_rules! impl_encoding {
(impl [$($generics:tt)*] for $type:ty) => {
impl<$($generics)*> Encoding<[u8]> for $type {
fn encode(&self) -> impl AsRef<[u8]> {
let base_field_size = (<Self as Field>::BasePrimeField::MODULUS_BIT_SIZE
.div_ceil(8)) as usize;
let mut buf = Vec::with_capacity(base_field_size * <Self as Field>::extension_degree() as usize);
for base_element in self.to_base_prime_field_elements() {
let bytes = base_element.into_bigint().to_bytes_be();
let start = bytes.len().saturating_sub(base_field_size);
let padding = base_field_size.saturating_sub(bytes.len());
buf.extend(core::iter::repeat_n(0, padding));
buf.extend_from_slice(&bytes[start..]);
}
buf
}
}
};
}
macro_rules! impl_decoding {
(impl [$($generics:tt)*] for $type:ty) => {
impl<$($generics)*> Decoding<[u8]> for $type {
type Repr = DecodingFieldBuffer<Self>;
fn decode(repr: Self::Repr) -> Self {
debug_assert_eq!(repr.buf.len(), decoding_field_buffer_size::<Self>());
let base_field_size = decoding_field_buffer_size::<<Self as Field>::BasePrimeField>();
let result = repr.buf.chunks(base_field_size)
.map(|chunk| <Self as Field>::BasePrimeField::from_be_bytes_mod_order(chunk))
.collect::<Vec<_>>();
Self::from_base_prime_field_elems(result).unwrap()
}
}
}
}
impl_deserialize!(impl [C: FpConfig<N>, const N: usize] for Fp<C, N>);
impl_deserialize!(impl [C: ark_ff::Fp2Config] for ark_ff::Fp2<C>);
impl_deserialize!(impl [C: ark_ff::Fp3Config] for ark_ff::Fp3<C>);
impl_deserialize!(impl [C: ark_ff::Fp4Config] for ark_ff::Fp4<C>);
impl_deserialize!(impl [C: ark_ff::Fp6Config] for ark_ff::Fp6<C>);
impl_deserialize!(impl [C: ark_ff::Fp12Config] for ark_ff::Fp12<C>);
impl_deserialize!(impl [P: SmallFpConfig] for SmallFp<P>);
impl_encoding!(impl [C: FpConfig<N>, const N: usize] for Fp<C, N>);
impl_encoding!(impl [C: ark_ff::Fp2Config] for ark_ff::Fp2<C>);
impl_encoding!(impl [C: ark_ff::Fp3Config] for ark_ff::Fp3<C>);
impl_encoding!(impl [C: ark_ff::Fp4Config] for ark_ff::Fp4<C>);
impl_encoding!(impl [C: ark_ff::Fp6Config] for ark_ff::Fp6<C>);
impl_encoding!(impl [C: ark_ff::Fp12Config] for ark_ff::Fp12<C>);
impl_encoding!(impl [P: SmallFpConfig] for SmallFp<P>);
impl_decoding!(impl [C: FpConfig<N>, const N: usize] for Fp<C, N>);
impl_decoding!(impl [C: ark_ff::Fp2Config] for ark_ff::Fp2<C>);
impl_decoding!(impl [C: ark_ff::Fp3Config] for ark_ff::Fp3<C>);
impl_decoding!(impl [C: ark_ff::Fp4Config] for ark_ff::Fp4<C>);
impl_decoding!(impl [C: ark_ff::Fp6Config] for ark_ff::Fp6<C>);
impl_decoding!(impl [C: ark_ff::Fp12Config] for ark_ff::Fp12<C>);
impl_decoding!(impl [P: SmallFpConfig] for SmallFp<P>);
#[allow(unused)]
fn random_bits_in_random_modp<const N: usize>(b: ark_ff::BigInt<N>) -> usize {
use ark_ff::{BigInt, BigInteger};
for n in (0..=b.num_bits()).rev() {
let r_bits = &b.to_bits_le()[..n as usize];
let r = BigInt::<N>::from_bits_le(r_bits);
let log2_a_minus_r = r_bits.iter().rev().skip_while(|&&bit| bit).count() as u32;
if b.num_bits() + n - 1 - r.num_bits() - log2_a_minus_r >= 128 {
return n as usize;
}
}
0
}
impl<F: Field> Default for DecodingFieldBuffer<F> {
fn default() -> Self {
let base_field_modulus_bytes = u64::from(F::BasePrimeField::MODULUS_BIT_SIZE.div_ceil(8));
let len = (base_field_modulus_bytes + 32) * F::extension_degree();
Self {
buf: vec![0u8; len as usize],
_phantom: PhantomData,
}
}
}
impl<F: Field> AsMut<[u8]> for DecodingFieldBuffer<F> {
fn as_mut(&mut self) -> &mut [u8] {
self.buf.as_mut()
}
}
#[cfg(test)]
mod test_ark_ff {
use ark_ff::{BigInteger, PrimeField};
use crate::{
codecs::Encoding,
io::{NargDeserialize, NargSerialize},
};
ark_ff::define_field!(
modulus = "18446744069414584321",
generator = "7",
name = Goldilocks,
);
ark_ff::define_field!(modulus = "2147483647", generator = "7", name = M31,);
ark_ff::define_field!(modulus = "2013265921", generator = "31", name = BabyBear,);
ark_ff::define_field!(modulus = "2130706433", generator = "3", name = KoalaBear,);
ark_ff::define_field!(modulus = "65521", generator = "17", name = F16,);
fn roundtrip_testsuite<F>()
where
F: ark_ff::PrimeField
+ Encoding<[u8]>
+ crate::io::NargSerialize
+ crate::io::NargDeserialize,
{
for v in [0u64, 1, 42, 12345] {
let original = F::from(v);
let serialized = encode_to_vec(&original);
let mut slice: &[u8] = &serialized;
let deserialized = F::deserialize_from_narg(&mut slice)
.unwrap_or_else(|_| panic!("failed to deserialize value {v}"));
assert!(
slice.is_empty(),
"deserialize did not consume all bytes for value {v}"
);
assert_eq!(original, deserialized, "roundtrip mismatch for {v}");
}
let p_minus_1 = -F::ONE;
let ser = encode_to_vec(&p_minus_1);
let mut sl: &[u8] = &ser;
let de = F::deserialize_from_narg(&mut sl).expect("p-1 should deserialize");
assert!(sl.is_empty());
assert_eq!(de, p_minus_1);
}
fn encode_to_vec<F: Encoding<[u8]>>(x: &F) -> alloc::vec::Vec<u8> {
let mut dst = alloc::vec::Vec::new();
x.serialize_into_narg(&mut dst);
dst
}
fn deterministic_encoding_testsuite<F: ark_ff::Field + Encoding<[u8]>>() {
for v in [0u64, 1, 42, 12345] {
let elem = F::from(v);
let a = encode_to_vec(&elem);
let b = encode_to_vec(&elem);
assert_eq!(a, b, "encoding not deterministic for {v}");
}
}
fn distinct_values_encode_differently<F: ark_ff::PrimeField + Encoding<[u8]>>() {
let zero = encode_to_vec(&F::ZERO);
let one = encode_to_vec(&F::ONE);
let p_minus_1 = encode_to_vec(&(-F::ONE));
assert_ne!(zero, one);
assert_ne!(one, p_minus_1);
assert_ne!(zero, p_minus_1);
}
fn reject_modulus<F: ark_ff::PrimeField + core::fmt::Debug + crate::io::NargDeserialize>() {
let modulus_bytes = F::MODULUS.to_bytes_be();
let field_size = F::MODULUS_BIT_SIZE.div_ceil(8) as usize;
let start = modulus_bytes.len().saturating_sub(field_size);
let trimmed = &modulus_bytes[start..];
let mut sl: &[u8] = trimmed;
assert!(
F::deserialize_from_narg(&mut sl).is_err(),
"deserializing p should fail (modulus_bits={}, field_size={field_size}, trimmed={trimmed:?})",
F::MODULUS_BIT_SIZE,
);
}
fn bitflip_testsuite<F>()
where
F: ark_ff::PrimeField + Encoding<[u8]> + crate::io::NargDeserialize,
{
let original = F::from(42u64);
let encoded = encode_to_vec(&original);
for byte_idx in 0..encoded.len() {
for bit in 0..8u8 {
let mut flipped = encoded.clone();
flipped[byte_idx] ^= 1 << bit;
let mut sl: &[u8] = &flipped;
if let Ok(v) = F::deserialize_from_narg(&mut sl) {
assert_ne!(
v, original,
"bit-flip at byte {byte_idx} bit {bit} decoded to same value"
);
} }
}
}
fn wrong_length_testsuite<F>()
where
F: ark_ff::PrimeField + Encoding<[u8]> + crate::io::NargDeserialize,
{
let encoded = encode_to_vec(&F::from(1u64));
if !encoded.is_empty() {
let short = &encoded[..encoded.len() - 1];
let mut sl: &[u8] = short;
assert!(
F::deserialize_from_narg(&mut sl).is_err(),
"truncated buffer should fail"
);
}
}
#[test]
fn test_smallfp_roundtrip() {
roundtrip_testsuite::<Goldilocks>();
roundtrip_testsuite::<M31>();
roundtrip_testsuite::<BabyBear>();
roundtrip_testsuite::<KoalaBear>();
roundtrip_testsuite::<F16>();
}
#[test]
fn test_smallfp_deterministic_encoding() {
deterministic_encoding_testsuite::<Goldilocks>();
deterministic_encoding_testsuite::<M31>();
deterministic_encoding_testsuite::<BabyBear>();
deterministic_encoding_testsuite::<KoalaBear>();
deterministic_encoding_testsuite::<F16>();
}
#[test]
fn test_smallfp_distinct_values_encode_differently() {
distinct_values_encode_differently::<Goldilocks>();
distinct_values_encode_differently::<M31>();
distinct_values_encode_differently::<BabyBear>();
distinct_values_encode_differently::<KoalaBear>();
distinct_values_encode_differently::<F16>();
}
#[test]
fn test_smallfp_reject_modulus() {
reject_modulus::<Goldilocks>();
reject_modulus::<M31>();
reject_modulus::<BabyBear>();
reject_modulus::<KoalaBear>();
reject_modulus::<F16>();
}
#[test]
fn test_smallfp_bitflip() {
bitflip_testsuite::<Goldilocks>();
bitflip_testsuite::<M31>();
bitflip_testsuite::<BabyBear>();
bitflip_testsuite::<KoalaBear>();
bitflip_testsuite::<F16>();
}
#[test]
fn test_smallfp_wrong_length() {
wrong_length_testsuite::<Goldilocks>();
wrong_length_testsuite::<M31>();
wrong_length_testsuite::<BabyBear>();
wrong_length_testsuite::<KoalaBear>();
wrong_length_testsuite::<F16>();
}
#[test]
fn test_montfp_roundtrip() {
roundtrip_testsuite::<ark_bls12_381::Fr>();
roundtrip_testsuite::<ark_bls12_381::Fq>();
}
#[test]
fn test_montfp_reject_modulus() {
reject_modulus::<ark_bls12_381::Fr>();
reject_modulus::<ark_bls12_381::Fq>();
}
#[test]
fn test_montfp_bitflip() {
bitflip_testsuite::<ark_bls12_381::Fr>();
}
pub struct GoldilocksFp2Config;
impl ark_ff::Fp2Config for GoldilocksFp2Config {
type Fp = Goldilocks;
const NONRESIDUE: Self::Fp = ark_ff::SmallFp::from_raw(7);
const FROBENIUS_COEFF_FP2_C1: &'static [Self::Fp] = &[
ark_ff::SmallFp::from_raw(1),
ark_ff::SmallFp::from_raw(18_446_744_069_414_584_320),
];
}
pub type GoldilocksFp2 = ark_ff::Fp2<GoldilocksFp2Config>;
#[test]
fn test_encoding_small_fp_goldilocks_fp2() {
deterministic_encoding_testsuite::<GoldilocksFp2>();
}
#[test]
fn test_prime_field_encoding_is_left_padded_big_endian() {
let value = ark_secp256k1::Fr::from(1u64);
let encoded = Encoding::<[u8]>::encode(&value);
let bytes = encoded.as_ref();
assert_eq!(bytes.len(), 32);
assert!(bytes[..31].iter().all(|&byte| byte == 0));
assert_eq!(bytes[31], 1);
}
#[test]
fn test_prime_field_deserialize_rejects_modulus() {
let modulus = ark_secp256k1::Fr::MODULUS.to_bytes_be();
let mut slice = modulus.as_slice();
assert!(ark_secp256k1::Fr::deserialize_from_narg(&mut slice).is_err());
assert_eq!(slice, modulus.as_slice());
}
}