#[macro_use]
pub mod generator_serialization;
#[macro_use]
pub mod gate_serialization;
#[cfg(not(feature = "std"))]
use alloc::{collections::BTreeMap, sync::Arc, vec, vec::Vec};
use core::convert::Infallible;
use core::fmt::{Debug, Display, Formatter};
use core::mem::size_of;
use core::ops::Range;
#[cfg(feature = "std")]
use std::{collections::BTreeMap, sync::Arc};
pub use gate_serialization::default::DefaultGateSerializer;
pub use gate_serialization::GateSerializer;
pub use generator_serialization::default::DefaultGeneratorSerializer;
pub use generator_serialization::WitnessGeneratorSerializer;
use hashbrown::HashMap;
use qp_plonky2_core::{PolyFriZkConfig, ZkConfig, ZkMode};
use crate::field::extension::{Extendable, FieldExtension};
use crate::field::polynomial::PolynomialCoeffs;
use crate::field::types::{Field64, PrimeField64};
use crate::fri::oracle::PolynomialBatch;
use crate::fri::proof::{
CompressedFriProof, CompressedFriQueryRounds, FriBatchMaskProof, FriBatchMaskProofTarget,
FriBatchMaskQuery, FriBatchMaskQueryTarget, FriFinalPolys, FriFinalPolysTarget,
FriInitialTreeProof, FriInitialTreeProofTarget, FriProof, FriProofTarget, FriQueryRound,
FriQueryRoundTarget, FriQueryStep, FriQueryStepTarget,
};
use crate::fri::structure::{FriOracleLayout, FriOracleRepresentation};
use crate::fri::{
FriBatchMaskingParams, FriConfig, FriFinalPolyLayout, FriParams, FriReductionStrategy,
};
use crate::gadgets::polynomial::PolynomialCoeffsExtTarget;
use crate::gates::gate::GateRef;
use crate::gates::lookup::Lookup;
use crate::gates::selectors::SelectorsInfo;
use crate::hash::hash_types::{HashOutTarget, MerkleCapTarget, RichField};
use crate::hash::merkle_proofs::{MerkleProof, MerkleProofTarget};
use crate::hash::merkle_tree::{MerkleCap, MerkleTree};
use crate::iop::ext_target::ExtensionTarget;
use crate::iop::generator::WitnessGeneratorRef;
use crate::iop::target::{BoolTarget, Target};
use crate::iop::wire::Wire;
use crate::plonk::circuit_builder::LookupWire;
use crate::plonk::circuit_data::{
CircuitConfig, CircuitData, CommonCircuitData, ProverCircuitData, ProverOnlyCircuitData,
VerifierCircuitData, VerifierCircuitTarget, VerifierOnlyCircuitData,
};
use crate::plonk::config::{GenericConfig, GenericHashOut, Hasher};
use crate::plonk::plonk_common::{salt_size, PlonkOracle};
use crate::plonk::proof::{
CompressedProof, CompressedProofWithPublicInputs, OpeningSet, OpeningSetTarget, Proof,
ProofTarget, ProofWithPublicInputs, ProofWithPublicInputsTarget,
};
#[derive(Debug)]
pub struct IoError;
impl Display for IoError {
fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result {
Debug::fmt(self, f)
}
}
pub type IoResult<T> = Result<T, IoError>;
pub trait Remaining: Read {
fn remaining(&self) -> usize;
fn is_empty(&self) -> bool {
self.remaining() == 0
}
}
pub trait Read {
fn read_exact(&mut self, bytes: &mut [u8]) -> IoResult<()>;
#[inline]
fn read_bool(&mut self) -> IoResult<bool> {
let i = self.read_u8()?;
match i {
0 => Ok(false),
1 => Ok(true),
_ => Err(IoError),
}
}
#[inline]
fn read_target_bool(&mut self) -> IoResult<BoolTarget> {
Ok(BoolTarget::new_unsafe(self.read_target()?))
}
#[inline]
fn read_target_bool_vec(&mut self) -> IoResult<Vec<BoolTarget>> {
let length = self.read_usize()?;
(0..length)
.map(|_| self.read_target_bool())
.collect::<Result<Vec<_>, _>>()
}
#[inline]
fn read_u8(&mut self) -> IoResult<u8> {
let mut buf = [0; size_of::<u8>()];
self.read_exact(&mut buf)?;
Ok(buf[0])
}
#[inline]
fn read_u16(&mut self) -> IoResult<u16> {
let mut buf = [0; size_of::<u16>()];
self.read_exact(&mut buf)?;
Ok(u16::from_le_bytes(buf))
}
#[inline]
fn read_u32(&mut self) -> IoResult<u32> {
let mut buf = [0; size_of::<u32>()];
self.read_exact(&mut buf)?;
Ok(u32::from_le_bytes(buf))
}
#[inline]
fn read_usize(&mut self) -> IoResult<usize> {
let mut buf = [0; core::mem::size_of::<u64>()];
self.read_exact(&mut buf)?;
Ok(u64::from_le_bytes(buf) as usize)
}
#[inline]
fn read_usize_vec(&mut self) -> IoResult<Vec<usize>> {
let len = self.read_usize()?;
let mut res = Vec::with_capacity(len);
for _ in 0..len {
res.push(self.read_usize()?);
}
Ok(res)
}
#[inline]
fn read_field<F>(&mut self) -> IoResult<F>
where
F: Field64,
{
let mut buf = [0; size_of::<u64>()];
self.read_exact(&mut buf)?;
Ok(F::from_canonical_u64(u64::from_le_bytes(buf)))
}
#[inline]
fn read_field_vec<F>(&mut self, length: usize) -> IoResult<Vec<F>>
where
F: Field64,
{
(0..length)
.map(|_| self.read_field())
.collect::<Result<Vec<_>, _>>()
}
#[inline]
fn read_field_ext<F, const D: usize>(&mut self) -> IoResult<F::Extension>
where
F: Field64 + Extendable<D>,
{
let mut arr = [F::ZERO; D];
for a in arr.iter_mut() {
*a = self.read_field()?;
}
Ok(<F::Extension as FieldExtension<D>>::from_basefield_array(
arr,
))
}
#[inline]
fn read_field_ext_vec<F, const D: usize>(
&mut self,
length: usize,
) -> IoResult<Vec<F::Extension>>
where
F: RichField + Extendable<D>,
{
(0..length).map(|_| self.read_field_ext::<F, D>()).collect()
}
#[inline]
fn read_target(&mut self) -> IoResult<Target> {
let is_wire = self.read_bool()?;
if is_wire {
let row = self.read_usize()?;
let column = self.read_usize()?;
Ok(Target::wire(row, column))
} else {
let index = self.read_usize()?;
Ok(Target::VirtualTarget { index })
}
}
#[inline]
fn read_target_ext<const D: usize>(&mut self) -> IoResult<ExtensionTarget<D>> {
let mut res = [Target::wire(0, 0); D];
for r in res.iter_mut() {
*r = self.read_target()?;
}
Ok(ExtensionTarget(res))
}
#[inline]
fn read_target_array<const N: usize>(&mut self) -> IoResult<[Target; N]> {
(0..N)
.map(|_| self.read_target())
.collect::<Result<Vec<_>, _>>()
.map(|v| v.try_into().unwrap())
}
#[inline]
fn read_target_vec(&mut self) -> IoResult<Vec<Target>> {
let length = self.read_usize()?;
(0..length)
.map(|_| self.read_target())
.collect::<Result<Vec<_>, _>>()
}
#[inline]
fn read_target_ext_vec<const D: usize>(&mut self) -> IoResult<Vec<ExtensionTarget<D>>> {
let length = self.read_usize()?;
(0..length)
.map(|_| self.read_target_ext::<D>())
.collect::<Result<Vec<_>, _>>()
}
#[inline]
fn read_hash<F, H>(&mut self) -> IoResult<H::Hash>
where
F: RichField,
H: Hasher<F>,
{
let mut buf = vec![0; H::HASH_SIZE];
self.read_exact(&mut buf)?;
Ok(H::Hash::from_bytes(&buf))
}
#[inline]
fn read_target_hash(&mut self) -> IoResult<HashOutTarget> {
let mut elements = [Target::wire(0, 0); 4];
for e in elements.iter_mut() {
*e = self.read_target()?;
}
Ok(HashOutTarget { elements })
}
#[inline]
fn read_hash_vec<F, H>(&mut self, length: usize) -> IoResult<Vec<H::Hash>>
where
F: RichField,
H: Hasher<F>,
{
(0..length)
.map(|_| self.read_hash::<F, H>())
.collect::<Result<Vec<_>, _>>()
}
#[inline]
fn read_merkle_cap<F, H>(&mut self, cap_height: usize) -> IoResult<MerkleCap<F, H>>
where
F: RichField,
H: Hasher<F>,
{
let cap_length = 1 << cap_height;
Ok(MerkleCap(
(0..cap_length)
.map(|_| self.read_hash::<F, H>())
.collect::<Result<Vec<_>, _>>()?,
))
}
#[inline]
fn read_target_merkle_cap(&mut self) -> IoResult<MerkleCapTarget> {
let length = self.read_usize()?;
Ok(MerkleCapTarget(
(0..length)
.map(|_| self.read_target_hash())
.collect::<Result<Vec<_>, _>>()?,
))
}
#[inline]
fn read_merkle_tree<F, H>(&mut self) -> IoResult<MerkleTree<F, H>>
where
F: RichField,
H: Hasher<F>,
{
let leaves_len = self.read_usize()?;
let mut leaves = Vec::with_capacity(leaves_len);
for _ in 0..leaves_len {
let leaf_len = self.read_usize()?;
leaves.push(self.read_field_vec(leaf_len)?);
}
let digests_len = self.read_usize()?;
let digests = self.read_hash_vec::<F, H>(digests_len)?;
let cap_height = self.read_usize()?;
let cap = self.read_merkle_cap::<F, H>(cap_height)?;
Ok(MerkleTree {
leaves,
digests,
cap,
})
}
#[inline]
fn read_opening_set<F, C, const D: usize>(
&mut self,
common_data: &CommonCircuitData<F, D>,
) -> IoResult<OpeningSet<F, D>>
where
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
{
let config = &common_data.config;
let constants = self.read_field_ext_vec::<F, D>(common_data.num_constants)?;
let plonk_sigmas = self.read_field_ext_vec::<F, D>(config.num_routed_wires)?;
let wires = self.read_field_ext_vec::<F, D>(config.num_wires)?;
let plonk_zs = self.read_field_ext_vec::<F, D>(config.num_challenges)?;
let plonk_zs_next = self.read_field_ext_vec::<F, D>(config.num_challenges)?;
let lookup_zs = self.read_field_ext_vec::<F, D>(common_data.num_all_lookup_polys())?;
let lookup_zs_next = self.read_field_ext_vec::<F, D>(common_data.num_all_lookup_polys())?;
let partial_products = self
.read_field_ext_vec::<F, D>(common_data.num_partial_products * config.num_challenges)?;
let quotient_polys = self.read_field_ext_vec::<F, D>(
common_data.quotient_degree_factor * config.num_challenges,
)?;
Ok(OpeningSet {
constants,
plonk_sigmas,
wires,
plonk_zs,
plonk_zs_next,
partial_products,
quotient_polys,
lookup_zs,
lookup_zs_next,
})
}
#[inline]
fn read_target_opening_set<const D: usize>(&mut self) -> IoResult<OpeningSetTarget<D>> {
let constants = self.read_target_ext_vec::<D>()?;
let plonk_sigmas = self.read_target_ext_vec::<D>()?;
let wires = self.read_target_ext_vec::<D>()?;
let plonk_zs = self.read_target_ext_vec::<D>()?;
let plonk_zs_next = self.read_target_ext_vec::<D>()?;
let lookup_zs = self.read_target_ext_vec::<D>()?;
let next_lookup_zs = self.read_target_ext_vec::<D>()?;
let partial_products = self.read_target_ext_vec::<D>()?;
let quotient_polys = self.read_target_ext_vec::<D>()?;
Ok(OpeningSetTarget {
constants,
plonk_sigmas,
wires,
plonk_zs,
plonk_zs_next,
lookup_zs,
next_lookup_zs,
partial_products,
quotient_polys,
})
}
#[inline]
fn read_merkle_proof<F, H>(&mut self) -> IoResult<MerkleProof<F, H>>
where
F: RichField,
H: Hasher<F>,
{
let length = self.read_u8()?;
Ok(MerkleProof {
siblings: (0..length)
.map(|_| self.read_hash::<F, H>())
.collect::<Result<_, _>>()?,
})
}
#[inline]
fn read_target_merkle_proof(&mut self) -> IoResult<MerkleProofTarget> {
let length = self.read_u8()?;
Ok(MerkleProofTarget {
siblings: (0..length)
.map(|_| self.read_target_hash())
.collect::<Result<_, _>>()?,
})
}
#[inline]
fn read_fri_initial_proof<F, C, const D: usize>(
&mut self,
common_data: &CommonCircuitData<F, D>,
) -> IoResult<FriInitialTreeProof<F, C::Hasher>>
where
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
{
let salt = salt_size(common_data.fri_params.leaf_hiding);
let mut evals_proofs = Vec::with_capacity(4);
let constants_sigmas_v = self.read_field_vec(
common_data.fri_oracle_layouts[PlonkOracle::CONSTANTS_SIGMAS.index].raw_polys,
)?;
let constants_sigmas_p = self.read_merkle_proof()?;
evals_proofs.push((constants_sigmas_v, constants_sigmas_p));
let wires_v = self.read_field_vec(
common_data.fri_oracle_layouts[PlonkOracle::WIRES.index].raw_polys + salt,
)?;
let wires_p = self.read_merkle_proof()?;
evals_proofs.push((wires_v, wires_p));
let zs_partial_v = self.read_field_vec(
common_data.fri_oracle_layouts[PlonkOracle::ZS_PARTIAL_PRODUCTS.index].raw_polys + salt,
)?;
let zs_partial_p = self.read_merkle_proof()?;
evals_proofs.push((zs_partial_v, zs_partial_p));
let quotient_v = self.read_field_vec(
common_data.fri_oracle_layouts[PlonkOracle::QUOTIENT.index].raw_polys + salt,
)?;
let quotient_p = self.read_merkle_proof()?;
evals_proofs.push((quotient_v, quotient_p));
Ok(FriInitialTreeProof { evals_proofs })
}
#[inline]
fn read_target_fri_initial_proof(&mut self) -> IoResult<FriInitialTreeProofTarget> {
let len = self.read_usize()?;
let mut evals_proofs = Vec::with_capacity(len);
for _ in 0..len {
evals_proofs.push((self.read_target_vec()?, self.read_target_merkle_proof()?));
}
Ok(FriInitialTreeProofTarget { evals_proofs })
}
#[inline]
fn read_fri_query_step<F, C, const D: usize>(
&mut self,
arity: usize,
compressed: bool,
) -> IoResult<FriQueryStep<F, C::Hasher, D>>
where
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
{
let evals = self.read_field_ext_vec::<F, D>(arity - usize::from(compressed))?;
let merkle_proof = self.read_merkle_proof()?;
Ok(FriQueryStep {
evals,
merkle_proof,
})
}
#[inline]
fn read_target_fri_query_step<const D: usize>(&mut self) -> IoResult<FriQueryStepTarget<D>> {
let evals = self.read_target_ext_vec::<D>()?;
let merkle_proof = self.read_target_merkle_proof()?;
Ok(FriQueryStepTarget {
evals,
merkle_proof,
})
}
#[inline]
#[allow(clippy::type_complexity)]
fn read_fri_query_rounds<F, C, const D: usize>(
&mut self,
common_data: &CommonCircuitData<F, D>,
) -> IoResult<Vec<FriQueryRound<F, C::Hasher, D>>>
where
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
{
let config = &common_data.config;
let mut fqrs = Vec::with_capacity(config.fri_config.num_query_rounds);
for _ in 0..config.fri_config.num_query_rounds {
let initial_trees_proof = self.read_fri_initial_proof::<F, C, D>(common_data)?;
let steps = common_data
.fri_params
.reduction_arity_bits
.iter()
.map(|&ar| self.read_fri_query_step::<F, C, D>(1 << ar, false))
.collect::<Result<_, _>>()?;
fqrs.push(FriQueryRound {
initial_trees_proof,
steps,
})
}
Ok(fqrs)
}
#[inline]
fn read_target_fri_query_rounds<const D: usize>(
&mut self,
) -> IoResult<Vec<FriQueryRoundTarget<D>>> {
let num_query_rounds = self.read_usize()?;
let mut fqrs = Vec::with_capacity(num_query_rounds);
for _ in 0..num_query_rounds {
let initial_trees_proof = self.read_target_fri_initial_proof()?;
let num_steps = self.read_usize()?;
let steps = (0..num_steps)
.map(|_| self.read_target_fri_query_step::<D>())
.collect::<Result<Vec<_>, _>>()?;
fqrs.push(FriQueryRoundTarget {
initial_trees_proof,
steps,
})
}
Ok(fqrs)
}
#[inline]
fn read_fri_proof<F, C, const D: usize>(
&mut self,
common_data: &CommonCircuitData<F, D>,
) -> IoResult<FriProof<F, C::Hasher, D>>
where
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
{
let config = &common_data.config;
let commit_phase_merkle_caps = (0..common_data.fri_params.reduction_arity_bits.len())
.map(|_| self.read_merkle_cap(config.fri_config.cap_height))
.collect::<Result<Vec<_>, _>>()?;
let batch_mask_proof = self.read_optional_batch_mask_proof::<F, C, D>(common_data)?;
let query_round_proofs = self.read_fri_query_rounds::<F, C, D>(common_data)?;
let final_polys = FriFinalPolys {
layout: common_data.fri_params.final_poly_layout.clone(),
chunks: (0..common_data.fri_params.final_poly_chunks())
.map(|_| {
Ok(PolynomialCoeffs::new(self.read_field_ext_vec::<F, D>(
common_data.fri_params.final_poly_len(),
)?))
})
.collect::<IoResult<Vec<_>>>()?,
};
let pow_witness = self.read_field()?;
Ok(FriProof {
commit_phase_merkle_caps,
batch_mask_proof,
query_round_proofs,
final_polys,
pow_witness,
})
}
#[inline]
fn read_target_fri_proof<const D: usize>(&mut self) -> IoResult<FriProofTarget<D>> {
let length = self.read_usize()?;
let commit_phase_merkle_caps = (0..length)
.map(|_| self.read_target_merkle_cap())
.collect::<Result<Vec<_>, _>>()?;
let batch_mask_proof = self.read_optional_target_batch_mask_proof::<D>()?;
let query_round_proofs = self.read_target_fri_query_rounds::<D>()?;
let final_polys = FriFinalPolysTarget {
chunks: (0..self.read_usize()?)
.map(|_| Ok(PolynomialCoeffsExtTarget(self.read_target_ext_vec::<D>()?)))
.collect::<IoResult<Vec<_>>>()?,
};
let pow_witness = self.read_target()?;
Ok(FriProofTarget {
commit_phase_merkle_caps,
batch_mask_proof,
query_round_proofs,
final_polys,
pow_witness,
})
}
fn read_fri_reduction_strategy(&mut self) -> IoResult<FriReductionStrategy> {
let variant = self.read_u8()?;
match variant {
0 => {
let arities = self.read_usize_vec()?;
Ok(FriReductionStrategy::Fixed(arities))
}
1 => {
let arity_bits = self.read_usize()?;
let final_poly_bits = self.read_usize()?;
Ok(FriReductionStrategy::ConstantArityBits(
arity_bits,
final_poly_bits,
))
}
2 => {
let is_some = self.read_u8()?;
match is_some {
0 => Ok(FriReductionStrategy::MinSize(None)),
1 => {
let max = self.read_usize()?;
Ok(FriReductionStrategy::MinSize(Some(max)))
}
_ => Err(IoError),
}
}
_ => Err(IoError),
}
}
fn read_fri_config(&mut self) -> IoResult<FriConfig> {
let rate_bits = self.read_usize()?;
let cap_height = self.read_usize()?;
let num_query_rounds = self.read_usize()?;
let proof_of_work_bits = self.read_u32()?;
let reduction_strategy = self.read_fri_reduction_strategy()?;
Ok(FriConfig {
rate_bits,
cap_height,
num_query_rounds,
proof_of_work_bits,
reduction_strategy,
})
}
fn read_poly_fri_zk_config(&mut self) -> IoResult<PolyFriZkConfig> {
Ok(PolyFriZkConfig {
wire_mask_degree: self.read_usize()?,
z_mask_degree: self.read_usize()?,
fri_batch_mask_degree: self.read_usize()?,
})
}
fn read_zk_config(&mut self) -> IoResult<ZkConfig> {
let mode = match self.read_u8()? {
0 => ZkMode::Disabled,
1 => ZkMode::PolyFri(self.read_poly_fri_zk_config()?),
2 => ZkMode::RowBlinding,
_ => return Err(IoError),
};
let leaf_hiding = self.read_bool()?;
Ok(ZkConfig { mode, leaf_hiding })
}
fn read_circuit_config(&mut self) -> IoResult<CircuitConfig> {
let num_wires = self.read_usize()?;
let num_routed_wires = self.read_usize()?;
let num_constants = self.read_usize()?;
let security_bits = self.read_usize()?;
let num_challenges = self.read_usize()?;
let max_quotient_degree_factor = self.read_usize()?;
let use_base_arithmetic_gate = self.read_bool()?;
let zk_config = self.read_zk_config()?;
let fri_config = self.read_fri_config()?;
Ok(CircuitConfig {
num_wires,
num_routed_wires,
num_constants,
security_bits,
num_challenges,
max_quotient_degree_factor,
use_base_arithmetic_gate,
zk_config,
fri_config,
})
}
fn read_fri_params(&mut self) -> IoResult<FriParams> {
let config = self.read_fri_config()?;
let reduction_arity_bits = self.read_usize_vec()?;
let degree_bits = self.read_usize()?;
let leaf_hiding = self.read_bool()?;
let batch_masking = if self.read_bool()? {
Some(FriBatchMaskingParams {
mask_degree: self.read_usize()?,
})
} else {
None
};
let final_poly_layout = match self.read_u8()? {
0 => FriFinalPolyLayout::Single,
1 => FriFinalPolyLayout::Split {
chunk_degree_bits: self.read_usize()?,
chunks: self.read_usize()?,
},
_ => return Err(IoError),
};
Ok(FriParams {
config,
reduction_arity_bits,
degree_bits,
leaf_hiding,
batch_masking,
final_poly_layout,
})
}
fn read_gate<F: RichField + Extendable<D>, const D: usize>(
&mut self,
gate_serializer: &dyn GateSerializer<F, D>,
common_data: &CommonCircuitData<F, D>,
) -> IoResult<GateRef<F, D>>;
fn read_generator<F: RichField + Extendable<D>, const D: usize>(
&mut self,
generator_serializer: &dyn WitnessGeneratorSerializer<F, D>,
common_data: &CommonCircuitData<F, D>,
) -> IoResult<WitnessGeneratorRef<F, D>>;
fn read_selectors_info(&mut self) -> IoResult<SelectorsInfo> {
let selector_indices = self.read_usize_vec()?;
let groups_len = self.read_usize()?;
let mut groups = Vec::with_capacity(groups_len);
for _ in 0..groups_len {
let start = self.read_usize()?;
let end = self.read_usize()?;
groups.push(Range { start, end });
}
Ok(SelectorsInfo {
selector_indices,
groups,
})
}
fn read_fri_oracle_layout(&mut self) -> IoResult<FriOracleLayout> {
let raw_polys = self.read_usize()?;
let logical_polys = self.read_usize()?;
let representation = match self.read_u8()? {
0 => FriOracleRepresentation::Raw,
1 => FriOracleRepresentation::SplitMask {
split_power: self.read_usize()?,
},
_ => return Err(IoError),
};
Ok(FriOracleLayout {
raw_polys,
logical_polys,
representation,
})
}
fn read_polynomial_batch<
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
const D: usize,
>(
&mut self,
) -> IoResult<PolynomialBatch<F, C, D>> {
let poly_len = self.read_usize()?;
let mut polynomials = Vec::with_capacity(poly_len);
for _ in 0..poly_len {
let plen = self.read_usize()?;
polynomials.push(PolynomialCoeffs::new(self.read_field_vec(plen)?));
}
let merkle_tree = self.read_merkle_tree()?;
let degree_log = self.read_usize()?;
let rate_bits = self.read_usize()?;
let blinding = self.read_bool()?;
Ok(PolynomialBatch {
polynomials,
merkle_tree,
degree_log,
rate_bits,
blinding,
})
}
fn read_common_circuit_data<F: RichField + Extendable<D>, const D: usize>(
&mut self,
gate_serializer: &dyn GateSerializer<F, D>,
) -> IoResult<CommonCircuitData<F, D>> {
let config = self.read_circuit_config()?;
let trace_degree_bits = self.read_usize()?;
let fri_params = self.read_fri_params()?;
let public_initial_degree_bits = self.read_usize()?;
let fri_oracle_layouts_len = self.read_usize()?;
let fri_oracle_layouts = (0..fri_oracle_layouts_len)
.map(|_| self.read_fri_oracle_layout())
.collect::<IoResult<Vec<_>>>()?;
let selectors_info = self.read_selectors_info()?;
let quotient_degree_factor = self.read_usize()?;
let num_gate_constraints = self.read_usize()?;
let num_constants = self.read_usize()?;
let num_public_inputs = self.read_usize()?;
let k_is_len = self.read_usize()?;
let k_is = self.read_field_vec(k_is_len)?;
let num_partial_products = self.read_usize()?;
let num_lookup_polys = self.read_usize()?;
let num_lookup_selectors = self.read_usize()?;
let length = self.read_usize()?;
let mut luts = Vec::with_capacity(length);
for _ in 0..length {
luts.push(Arc::new(self.read_lut()?));
}
let gates_len = self.read_usize()?;
let mut gates = Vec::with_capacity(gates_len);
let mut common_data = CommonCircuitData {
config,
trace_degree_bits,
fri_params,
public_initial_degree_bits,
fri_oracle_layouts,
gates: vec![],
selectors_info,
quotient_degree_factor,
num_gate_constraints,
num_constants,
num_public_inputs,
k_is,
num_partial_products,
num_lookup_polys,
num_lookup_selectors,
luts,
};
for _ in 0..gates_len {
let gate = self.read_gate::<F, D>(gate_serializer, &common_data)?;
gates.push(gate);
}
common_data.gates = gates;
Ok(common_data)
}
fn read_circuit_data<
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
const D: usize,
>(
&mut self,
gate_serializer: &dyn GateSerializer<F, D>,
generator_serializer: &dyn WitnessGeneratorSerializer<F, D>,
) -> IoResult<CircuitData<F, C, D>> {
let common = self.read_common_circuit_data(gate_serializer)?;
let prover_only = self.read_prover_only_circuit_data(generator_serializer, &common)?;
let verifier_only = self.read_verifier_only_circuit_data()?;
Ok(CircuitData {
prover_only,
verifier_only,
common,
})
}
fn read_prover_only_circuit_data<
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
const D: usize,
>(
&mut self,
generator_serializer: &dyn WitnessGeneratorSerializer<F, D>,
common_data: &CommonCircuitData<F, D>,
) -> IoResult<ProverOnlyCircuitData<F, C, D>> {
let gen_len = self.read_usize()?;
let mut generators = Vec::with_capacity(gen_len);
for _ in 0..gen_len {
generators.push(self.read_generator(generator_serializer, common_data)?);
}
let map_len = self.read_usize()?;
let mut generator_indices_by_watches = BTreeMap::new();
for _ in 0..map_len {
let k = self.read_usize()?;
generator_indices_by_watches.insert(k, self.read_usize_vec()?);
}
let constants_sigmas_commitment = self.read_polynomial_batch()?;
let sigmas_len = self.read_usize()?;
let mut sigmas = Vec::with_capacity(sigmas_len);
for _ in 0..sigmas_len {
let sigma_len = self.read_usize()?;
sigmas.push(self.read_field_vec(sigma_len)?);
}
let subgroup_len = self.read_usize()?;
let subgroup = self.read_field_vec(subgroup_len)?;
let public_inputs = self.read_target_vec()?;
let representative_map = self.read_usize_vec()?;
let is_some = self.read_bool()?;
let fft_root_table = match is_some {
true => {
let table_len = self.read_usize()?;
let mut table = Vec::with_capacity(table_len);
for _ in 0..table_len {
let len = self.read_usize()?;
table.push(self.read_field_vec(len)?);
}
Some(table)
}
false => None,
};
let circuit_digest = self.read_hash::<F, <C as GenericConfig<D>>::Hasher>()?;
let length = self.read_usize()?;
let mut lookup_rows = Vec::with_capacity(length);
for _ in 0..length {
lookup_rows.push(LookupWire {
last_lu_gate: self.read_usize()?,
last_lut_gate: self.read_usize()?,
first_lut_gate: self.read_usize()?,
});
}
let length = self.read_usize()?;
let mut lut_to_lookups = Vec::with_capacity(length);
for _ in 0..length {
lut_to_lookups.push(self.read_target_lut()?);
}
Ok(ProverOnlyCircuitData {
generators,
generator_indices_by_watches,
constants_sigmas_commitment,
sigmas,
subgroup,
public_inputs,
representative_map,
fft_root_table,
circuit_digest,
lookup_rows,
lut_to_lookups,
})
}
fn read_prover_circuit_data<
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
const D: usize,
>(
&mut self,
gate_serializer: &dyn GateSerializer<F, D>,
generator_serializer: &dyn WitnessGeneratorSerializer<F, D>,
) -> IoResult<ProverCircuitData<F, C, D>> {
let common = self.read_common_circuit_data(gate_serializer)?;
let prover_only = self.read_prover_only_circuit_data(generator_serializer, &common)?;
Ok(ProverCircuitData {
prover_only,
common,
})
}
fn read_verifier_only_circuit_data<
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
const D: usize,
>(
&mut self,
) -> IoResult<VerifierOnlyCircuitData<C, D>> {
let height = self.read_usize()?;
let constants_sigmas_cap = self.read_merkle_cap(height)?;
let circuit_digest = self.read_hash::<F, <C as GenericConfig<D>>::Hasher>()?;
Ok(VerifierOnlyCircuitData {
constants_sigmas_cap,
circuit_digest,
})
}
fn read_verifier_circuit_data<
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
const D: usize,
>(
&mut self,
gate_serializer: &dyn GateSerializer<F, D>,
) -> IoResult<VerifierCircuitData<F, C, D>> {
let verifier_only = self.read_verifier_only_circuit_data()?;
let common = self.read_common_circuit_data(gate_serializer)?;
Ok(VerifierCircuitData {
verifier_only,
common,
})
}
fn read_target_verifier_circuit(&mut self) -> IoResult<VerifierCircuitTarget> {
let constants_sigmas_cap = self.read_target_merkle_cap()?;
let circuit_digest = self.read_target_hash()?;
Ok(VerifierCircuitTarget {
constants_sigmas_cap,
circuit_digest,
})
}
#[inline]
fn read_proof<F, C, const D: usize>(
&mut self,
common_data: &CommonCircuitData<F, D>,
) -> IoResult<Proof<F, C, D>>
where
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
{
let config = &common_data.config;
let wires_cap = self.read_merkle_cap(config.fri_config.cap_height)?;
let plonk_zs_partial_products_cap = self.read_merkle_cap(config.fri_config.cap_height)?;
let quotient_polys_cap = self.read_merkle_cap(config.fri_config.cap_height)?;
let openings = self.read_opening_set::<F, C, D>(common_data)?;
let opening_proof = self.read_fri_proof::<F, C, D>(common_data)?;
Ok(Proof {
wires_cap,
plonk_zs_partial_products_cap,
quotient_polys_cap,
openings,
opening_proof,
})
}
#[inline]
fn read_target_proof<const D: usize>(&mut self) -> IoResult<ProofTarget<D>> {
let wires_cap = self.read_target_merkle_cap()?;
let plonk_zs_partial_products_cap = self.read_target_merkle_cap()?;
let quotient_polys_cap = self.read_target_merkle_cap()?;
let openings = self.read_target_opening_set::<D>()?;
let opening_proof = self.read_target_fri_proof::<D>()?;
Ok(ProofTarget {
wires_cap,
plonk_zs_partial_products_cap,
quotient_polys_cap,
openings,
opening_proof,
})
}
#[inline]
fn read_proof_with_public_inputs<F, C, const D: usize>(
&mut self,
common_data: &CommonCircuitData<F, D>,
) -> IoResult<ProofWithPublicInputs<F, C, D>>
where
Self: Remaining,
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
{
let proof = self.read_proof(common_data)?;
let pi_len = self.read_usize()?;
let public_inputs = self.read_field_vec(pi_len)?;
Ok(ProofWithPublicInputs {
proof,
public_inputs,
})
}
#[inline]
fn read_target_proof_with_public_inputs<const D: usize>(
&mut self,
) -> IoResult<ProofWithPublicInputsTarget<D>> {
let proof = self.read_target_proof()?;
let public_inputs = self.read_target_vec()?;
Ok(ProofWithPublicInputsTarget {
proof,
public_inputs,
})
}
#[inline]
fn read_compressed_fri_query_rounds<F, C, const D: usize>(
&mut self,
common_data: &CommonCircuitData<F, D>,
) -> IoResult<CompressedFriQueryRounds<F, C::Hasher, D>>
where
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
{
let config = &common_data.config;
let original_indices = (0..config.fri_config.num_query_rounds)
.map(|_| self.read_u32().map(|i| i as usize))
.collect::<Result<Vec<_>, _>>()?;
let mut indices = original_indices.clone();
indices.sort_unstable();
indices.dedup();
let mut pairs = Vec::new();
for &i in &indices {
pairs.push((i, self.read_fri_initial_proof::<F, C, D>(common_data)?));
}
let initial_trees_proofs = HashMap::from_iter(pairs);
let mut steps = Vec::with_capacity(common_data.fri_params.reduction_arity_bits.len());
for &a in &common_data.fri_params.reduction_arity_bits {
indices.iter_mut().for_each(|x| {
*x >>= a;
});
indices.dedup();
let query_steps = (0..indices.len())
.map(|_| self.read_fri_query_step::<F, C, D>(1 << a, true))
.collect::<Result<Vec<_>, _>>()?;
steps.push(
indices
.iter()
.copied()
.zip(query_steps)
.collect::<HashMap<_, _>>(),
);
}
Ok(CompressedFriQueryRounds {
indices: original_indices,
initial_trees_proofs,
steps,
})
}
#[inline]
fn read_compressed_fri_proof<F, C, const D: usize>(
&mut self,
common_data: &CommonCircuitData<F, D>,
) -> IoResult<CompressedFriProof<F, C::Hasher, D>>
where
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
{
let config = &common_data.config;
let commit_phase_merkle_caps = (0..common_data.fri_params.reduction_arity_bits.len())
.map(|_| self.read_merkle_cap(config.fri_config.cap_height))
.collect::<Result<Vec<_>, _>>()?;
let batch_mask_proof = self.read_optional_batch_mask_proof::<F, C, D>(common_data)?;
let query_round_proofs = self.read_compressed_fri_query_rounds::<F, C, D>(common_data)?;
let final_polys = FriFinalPolys {
layout: common_data.fri_params.final_poly_layout.clone(),
chunks: (0..common_data.fri_params.final_poly_chunks())
.map(|_| {
Ok(PolynomialCoeffs::new(self.read_field_ext_vec::<F, D>(
common_data.fri_params.final_poly_len(),
)?))
})
.collect::<IoResult<Vec<_>>>()?,
};
let pow_witness = self.read_field()?;
Ok(CompressedFriProof {
commit_phase_merkle_caps,
batch_mask_proof,
query_round_proofs,
final_polys,
pow_witness,
})
}
fn read_optional_batch_mask_proof<F, C, const D: usize>(
&mut self,
common_data: &CommonCircuitData<F, D>,
) -> IoResult<Option<FriBatchMaskProof<F, C::Hasher, D>>>
where
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
{
if !self.read_bool()? {
return Ok(None);
}
let cap = self.read_merkle_cap(common_data.fri_params.config.cap_height)?;
let query_openings = (0..common_data.fri_params.config.num_query_rounds)
.map(|_| {
Ok(FriBatchMaskQuery {
values: self
.read_field_ext_vec::<F, D>(common_data.fri_params.final_poly_chunks())?,
merkle_proof: self.read_merkle_proof()?,
})
})
.collect::<IoResult<Vec<_>>>()?;
Ok(Some(FriBatchMaskProof {
cap,
query_openings,
}))
}
fn read_optional_target_batch_mask_proof<const D: usize>(
&mut self,
) -> IoResult<Option<FriBatchMaskProofTarget<D>>> {
if !self.read_bool()? {
return Ok(None);
}
let cap = self.read_target_merkle_cap()?;
let query_openings = (0..self.read_usize()?)
.map(|_| {
Ok(FriBatchMaskQueryTarget {
values: self.read_target_ext_vec::<D>()?,
merkle_proof: self.read_target_merkle_proof()?,
})
})
.collect::<IoResult<Vec<_>>>()?;
Ok(Some(FriBatchMaskProofTarget {
cap,
query_openings,
}))
}
#[inline]
fn read_compressed_proof<F, C, const D: usize>(
&mut self,
common_data: &CommonCircuitData<F, D>,
) -> IoResult<CompressedProof<F, C, D>>
where
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
{
let config = &common_data.config;
let wires_cap = self.read_merkle_cap(config.fri_config.cap_height)?;
let plonk_zs_partial_products_cap = self.read_merkle_cap(config.fri_config.cap_height)?;
let quotient_polys_cap = self.read_merkle_cap(config.fri_config.cap_height)?;
let openings = self.read_opening_set::<F, C, D>(common_data)?;
let opening_proof = self.read_compressed_fri_proof::<F, C, D>(common_data)?;
Ok(CompressedProof {
wires_cap,
plonk_zs_partial_products_cap,
quotient_polys_cap,
openings,
opening_proof,
})
}
#[inline]
fn read_compressed_proof_with_public_inputs<F, C, const D: usize>(
&mut self,
common_data: &CommonCircuitData<F, D>,
) -> IoResult<CompressedProofWithPublicInputs<F, C, D>>
where
Self: Remaining,
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
{
let proof = self.read_compressed_proof(common_data)?;
let public_inputs = self.read_field_vec(self.remaining() / size_of::<u64>())?;
Ok(CompressedProofWithPublicInputs {
proof,
public_inputs,
})
}
#[inline]
fn read_lut(&mut self) -> IoResult<Vec<(u16, u16)>> {
let length = self.read_usize()?;
let mut lut = Vec::with_capacity(length);
for _ in 0..length {
lut.push((self.read_u16()?, self.read_u16()?));
}
Ok(lut)
}
#[inline]
fn read_target_lut(&mut self) -> IoResult<Lookup> {
let length = self.read_usize()?;
let mut lut = Vec::with_capacity(length);
for _ in 0..length {
lut.push((self.read_target()?, self.read_target()?));
}
Ok(lut)
}
}
pub trait Write {
type Error;
fn write_all(&mut self, bytes: &[u8]) -> IoResult<()>;
#[inline]
fn write_bool(&mut self, x: bool) -> IoResult<()> {
self.write_u8(u8::from(x))
}
#[inline]
fn write_target_bool(&mut self, x: BoolTarget) -> IoResult<()> {
self.write_target(x.target)
}
#[inline]
fn write_target_bool_vec(&mut self, v: &[BoolTarget]) -> IoResult<()> {
self.write_usize(v.len())?;
for &elem in v.iter() {
self.write_target_bool(elem)?;
}
Ok(())
}
#[inline]
fn write_u8(&mut self, x: u8) -> IoResult<()> {
self.write_all(&[x])
}
#[inline]
fn write_u16(&mut self, x: u16) -> IoResult<()> {
self.write_all(&x.to_le_bytes())
}
#[inline]
fn write_u32(&mut self, x: u32) -> IoResult<()> {
self.write_all(&x.to_le_bytes())
}
#[inline]
fn write_usize(&mut self, x: usize) -> IoResult<()> {
self.write_all(&(x as u64).to_le_bytes())
}
#[inline]
fn write_usize_vec(&mut self, v: &[usize]) -> IoResult<()> {
self.write_usize(v.len())?;
for &elem in v.iter() {
self.write_usize(elem)?;
}
Ok(())
}
#[inline]
fn write_field<F>(&mut self, x: F) -> IoResult<()>
where
F: PrimeField64,
{
self.write_all(&x.to_canonical_u64().to_le_bytes())
}
#[inline]
fn write_field_vec<F>(&mut self, v: &[F]) -> IoResult<()>
where
F: PrimeField64,
{
for &a in v {
self.write_field(a)?;
}
Ok(())
}
#[inline]
fn write_field_ext<F, const D: usize>(&mut self, x: F::Extension) -> IoResult<()>
where
F: RichField + Extendable<D>,
{
for &a in &x.to_basefield_array() {
self.write_field(a)?;
}
Ok(())
}
#[inline]
fn write_field_ext_vec<F, const D: usize>(&mut self, v: &[F::Extension]) -> IoResult<()>
where
F: RichField + Extendable<D>,
{
for &a in v {
self.write_field_ext::<F, D>(a)?;
}
Ok(())
}
#[inline]
fn write_target(&mut self, x: Target) -> IoResult<()> {
match x {
Target::Wire(Wire { row, column }) => {
self.write_bool(true)?;
self.write_usize(row)?;
self.write_usize(column)?;
}
Target::VirtualTarget { index } => {
self.write_bool(false)?;
self.write_usize(index)?;
}
};
Ok(())
}
#[inline]
fn write_target_ext<const D: usize>(&mut self, x: ExtensionTarget<D>) -> IoResult<()> {
for &elem in x.0.iter() {
self.write_target(elem)?;
}
Ok(())
}
#[inline]
fn write_target_array<const N: usize>(&mut self, v: &[Target; N]) -> IoResult<()> {
for &elem in v.iter() {
self.write_target(elem)?;
}
Ok(())
}
#[inline]
fn write_target_vec(&mut self, v: &[Target]) -> IoResult<()> {
self.write_usize(v.len())?;
for &elem in v.iter() {
self.write_target(elem)?;
}
Ok(())
}
#[inline]
fn write_target_ext_vec<const D: usize>(&mut self, v: &[ExtensionTarget<D>]) -> IoResult<()> {
self.write_usize(v.len())?;
for &elem in v.iter() {
self.write_target_ext(elem)?;
}
Ok(())
}
#[inline]
fn write_hash<F, H>(&mut self, h: H::Hash) -> IoResult<()>
where
F: RichField,
H: Hasher<F>,
{
self.write_all(&h.to_bytes())
}
#[inline]
fn write_target_hash(&mut self, h: &HashOutTarget) -> IoResult<()> {
for r in h.elements.iter() {
self.write_target(*r)?;
}
Ok(())
}
#[inline]
fn write_hash_vec<F, H>(&mut self, v: &[H::Hash]) -> IoResult<()>
where
F: RichField,
H: Hasher<F>,
{
self.write_usize(v.len())?;
for &elem in v.iter() {
self.write_hash::<F, H>(elem)?;
}
Ok(())
}
#[inline]
fn write_merkle_cap<F, H>(&mut self, cap: &MerkleCap<F, H>) -> IoResult<()>
where
F: RichField,
H: Hasher<F>,
{
for &a in &cap.0 {
self.write_hash::<F, H>(a)?;
}
Ok(())
}
#[inline]
fn write_target_merkle_cap(&mut self, cap: &MerkleCapTarget) -> IoResult<()> {
self.write_usize(cap.0.len())?;
for a in &cap.0 {
self.write_target_hash(a)?;
}
Ok(())
}
#[inline]
fn write_merkle_tree<F, H>(&mut self, tree: &MerkleTree<F, H>) -> IoResult<()>
where
F: RichField,
H: Hasher<F>,
{
self.write_usize(tree.leaves.len())?;
for i in 0..tree.leaves.len() {
self.write_usize(tree.leaves[i].len())?;
self.write_field_vec(&tree.leaves[i])?;
}
self.write_hash_vec::<F, H>(&tree.digests)?;
self.write_usize(tree.cap.height())?;
self.write_merkle_cap(&tree.cap)?;
Ok(())
}
#[inline]
fn write_opening_set<F, const D: usize>(&mut self, os: &OpeningSet<F, D>) -> IoResult<()>
where
F: RichField + Extendable<D>,
{
self.write_field_ext_vec::<F, D>(&os.constants)?;
self.write_field_ext_vec::<F, D>(&os.plonk_sigmas)?;
self.write_field_ext_vec::<F, D>(&os.wires)?;
self.write_field_ext_vec::<F, D>(&os.plonk_zs)?;
self.write_field_ext_vec::<F, D>(&os.plonk_zs_next)?;
self.write_field_ext_vec::<F, D>(&os.lookup_zs)?;
self.write_field_ext_vec::<F, D>(&os.lookup_zs_next)?;
self.write_field_ext_vec::<F, D>(&os.partial_products)?;
self.write_field_ext_vec::<F, D>(&os.quotient_polys)
}
#[inline]
fn write_target_opening_set<const D: usize>(
&mut self,
os: &OpeningSetTarget<D>,
) -> IoResult<()> {
self.write_target_ext_vec::<D>(&os.constants)?;
self.write_target_ext_vec::<D>(&os.plonk_sigmas)?;
self.write_target_ext_vec::<D>(&os.wires)?;
self.write_target_ext_vec::<D>(&os.plonk_zs)?;
self.write_target_ext_vec::<D>(&os.plonk_zs_next)?;
self.write_target_ext_vec::<D>(&os.lookup_zs)?;
self.write_target_ext_vec::<D>(&os.next_lookup_zs)?;
self.write_target_ext_vec::<D>(&os.partial_products)?;
self.write_target_ext_vec::<D>(&os.quotient_polys)
}
#[inline]
fn write_merkle_proof<F, H>(&mut self, p: &MerkleProof<F, H>) -> IoResult<()>
where
F: RichField,
H: Hasher<F>,
{
let length = p.siblings.len();
self.write_u8(
length
.try_into()
.expect("Merkle proof length must fit in u8."),
)?;
for &h in &p.siblings {
self.write_hash::<F, H>(h)?;
}
Ok(())
}
#[inline]
fn write_target_merkle_proof(&mut self, pt: &MerkleProofTarget) -> IoResult<()> {
let length = pt.siblings.len();
self.write_u8(
length
.try_into()
.expect("Merkle proof length must fit in u8."),
)?;
for h in &pt.siblings {
self.write_target_hash(h)?;
}
Ok(())
}
#[inline]
fn write_fri_initial_proof<F, C, const D: usize>(
&mut self,
fitp: &FriInitialTreeProof<F, C::Hasher>,
) -> IoResult<()>
where
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
{
for (v, p) in &fitp.evals_proofs {
self.write_field_vec(v)?;
self.write_merkle_proof(p)?;
}
Ok(())
}
#[inline]
fn write_target_fri_initial_proof(
&mut self,
fitpt: &FriInitialTreeProofTarget,
) -> IoResult<()> {
self.write_usize(fitpt.evals_proofs.len())?;
for (v, p) in &fitpt.evals_proofs {
self.write_target_vec(v)?;
self.write_target_merkle_proof(p)?;
}
Ok(())
}
#[inline]
fn write_fri_query_step<F, C, const D: usize>(
&mut self,
fqs: &FriQueryStep<F, C::Hasher, D>,
) -> IoResult<()>
where
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
{
self.write_field_ext_vec::<F, D>(&fqs.evals)?;
self.write_merkle_proof(&fqs.merkle_proof)
}
#[inline]
fn write_target_fri_query_step<const D: usize>(
&mut self,
fqst: &FriQueryStepTarget<D>,
) -> IoResult<()> {
self.write_target_ext_vec(&fqst.evals)?;
self.write_target_merkle_proof(&fqst.merkle_proof)
}
#[inline]
fn write_fri_query_rounds<F, C, const D: usize>(
&mut self,
fqrs: &[FriQueryRound<F, C::Hasher, D>],
) -> IoResult<()>
where
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
{
for fqr in fqrs {
self.write_fri_initial_proof::<F, C, D>(&fqr.initial_trees_proof)?;
for fqs in &fqr.steps {
self.write_fri_query_step::<F, C, D>(fqs)?;
}
}
Ok(())
}
#[inline]
fn write_target_fri_query_rounds<const D: usize>(
&mut self,
fqrst: &[FriQueryRoundTarget<D>],
) -> IoResult<()> {
self.write_usize(fqrst.len())?;
for fqr in fqrst {
self.write_target_fri_initial_proof(&fqr.initial_trees_proof)?;
self.write_usize(fqr.steps.len())?;
for fqs in &fqr.steps {
self.write_target_fri_query_step::<D>(fqs)?;
}
}
Ok(())
}
#[inline]
fn write_fri_proof<F, C, const D: usize>(
&mut self,
fp: &FriProof<F, C::Hasher, D>,
) -> IoResult<()>
where
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
{
for cap in &fp.commit_phase_merkle_caps {
self.write_merkle_cap(cap)?;
}
self.write_optional_batch_mask_proof::<F, C, D>(&fp.batch_mask_proof)?;
self.write_fri_query_rounds::<F, C, D>(&fp.query_round_proofs)?;
for chunk in &fp.final_polys.chunks {
self.write_field_ext_vec::<F, D>(&chunk.coeffs)?;
}
self.write_field(fp.pow_witness)
}
#[inline]
fn write_target_fri_proof<const D: usize>(&mut self, fpt: &FriProofTarget<D>) -> IoResult<()> {
self.write_usize(fpt.commit_phase_merkle_caps.len())?;
for cap in &fpt.commit_phase_merkle_caps {
self.write_target_merkle_cap(cap)?;
}
self.write_optional_target_batch_mask_proof::<D>(&fpt.batch_mask_proof)?;
self.write_target_fri_query_rounds::<D>(&fpt.query_round_proofs)?;
self.write_usize(fpt.final_polys.chunks.len())?;
for chunk in &fpt.final_polys.chunks {
self.write_target_ext_vec::<D>(&chunk.0)?;
}
self.write_target(fpt.pow_witness)
}
fn write_fri_reduction_strategy(
&mut self,
reduction_strategy: &FriReductionStrategy,
) -> IoResult<()> {
match reduction_strategy {
FriReductionStrategy::Fixed(seq) => {
self.write_u8(0)?;
self.write_usize_vec(seq.as_slice())?;
Ok(())
}
FriReductionStrategy::ConstantArityBits(arity_bits, final_poly_bits) => {
self.write_u8(1)?;
self.write_usize(*arity_bits)?;
self.write_usize(*final_poly_bits)?;
Ok(())
}
FriReductionStrategy::MinSize(max) => {
self.write_u8(2)?;
if let Some(max) = max {
self.write_u8(1)?;
self.write_usize(*max)?;
} else {
self.write_u8(0)?;
}
Ok(())
}
}
}
fn write_fri_config(&mut self, config: &FriConfig) -> IoResult<()> {
let FriConfig {
rate_bits,
cap_height,
num_query_rounds,
proof_of_work_bits,
reduction_strategy,
} = &config;
self.write_usize(*rate_bits)?;
self.write_usize(*cap_height)?;
self.write_usize(*num_query_rounds)?;
self.write_u32(*proof_of_work_bits)?;
self.write_fri_reduction_strategy(reduction_strategy)?;
Ok(())
}
fn write_fri_params(&mut self, fri_params: &FriParams) -> IoResult<()> {
let FriParams {
config,
reduction_arity_bits,
degree_bits,
leaf_hiding,
batch_masking,
final_poly_layout,
} = fri_params;
self.write_fri_config(config)?;
self.write_usize_vec(reduction_arity_bits.as_slice())?;
self.write_usize(*degree_bits)?;
self.write_bool(*leaf_hiding)?;
self.write_bool(batch_masking.is_some())?;
if let Some(batch_masking) = batch_masking {
self.write_usize(batch_masking.mask_degree)?;
}
match final_poly_layout {
FriFinalPolyLayout::Single => {
self.write_u8(0)?;
}
FriFinalPolyLayout::Split {
chunk_degree_bits,
chunks,
} => {
self.write_u8(1)?;
self.write_usize(*chunk_degree_bits)?;
self.write_usize(*chunks)?;
}
}
Ok(())
}
fn write_poly_fri_zk_config(&mut self, config: &PolyFriZkConfig) -> IoResult<()> {
self.write_usize(config.wire_mask_degree)?;
self.write_usize(config.z_mask_degree)?;
self.write_usize(config.fri_batch_mask_degree)?;
Ok(())
}
fn write_zk_config(&mut self, zk_config: &ZkConfig) -> IoResult<()> {
match &zk_config.mode {
ZkMode::Disabled => self.write_u8(0)?,
ZkMode::PolyFri(config) => {
self.write_u8(1)?;
self.write_poly_fri_zk_config(config)?;
}
ZkMode::RowBlinding => self.write_u8(2)?,
}
self.write_bool(zk_config.leaf_hiding)?;
Ok(())
}
fn write_circuit_config(&mut self, config: &CircuitConfig) -> IoResult<()> {
let CircuitConfig {
num_wires,
num_routed_wires,
num_constants,
security_bits,
num_challenges,
max_quotient_degree_factor,
use_base_arithmetic_gate,
zk_config,
fri_config,
} = config;
self.write_usize(*num_wires)?;
self.write_usize(*num_routed_wires)?;
self.write_usize(*num_constants)?;
self.write_usize(*security_bits)?;
self.write_usize(*num_challenges)?;
self.write_usize(*max_quotient_degree_factor)?;
self.write_bool(*use_base_arithmetic_gate)?;
self.write_zk_config(zk_config)?;
self.write_fri_config(fri_config)?;
Ok(())
}
fn write_gate<F: RichField + Extendable<D>, const D: usize>(
&mut self,
gate: &GateRef<F, D>,
gate_serializer: &dyn GateSerializer<F, D>,
common_data: &CommonCircuitData<F, D>,
) -> IoResult<()>;
fn write_generator<F: RichField + Extendable<D>, const D: usize>(
&mut self,
generator: &WitnessGeneratorRef<F, D>,
generator_serializer: &dyn WitnessGeneratorSerializer<F, D>,
common_data: &CommonCircuitData<F, D>,
) -> IoResult<()>;
fn write_selectors_info(&mut self, selectors_info: &SelectorsInfo) -> IoResult<()> {
let SelectorsInfo {
selector_indices,
groups,
} = selectors_info;
self.write_usize_vec(selector_indices.as_slice())?;
self.write_usize(groups.len())?;
for group in groups.iter() {
self.write_usize(group.start)?;
self.write_usize(group.end)?;
}
Ok(())
}
fn write_polynomial_batch<
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
const D: usize,
>(
&mut self,
poly_batch: &PolynomialBatch<F, C, D>,
) -> IoResult<()> {
self.write_usize(poly_batch.polynomials.len())?;
for i in 0..poly_batch.polynomials.len() {
self.write_usize(poly_batch.polynomials[i].coeffs.len())?;
self.write_field_vec(&poly_batch.polynomials[i].coeffs)?;
}
self.write_merkle_tree(&poly_batch.merkle_tree)?;
self.write_usize(poly_batch.degree_log)?;
self.write_usize(poly_batch.rate_bits)?;
self.write_bool(poly_batch.blinding)?;
Ok(())
}
fn write_fri_oracle_layout(&mut self, layout: &FriOracleLayout) -> IoResult<()> {
self.write_usize(layout.raw_polys)?;
self.write_usize(layout.logical_polys)?;
match layout.representation {
FriOracleRepresentation::Raw => self.write_u8(0)?,
FriOracleRepresentation::SplitMask { split_power } => {
self.write_u8(1)?;
self.write_usize(split_power)?;
}
}
Ok(())
}
fn write_common_circuit_data<F: RichField + Extendable<D>, const D: usize>(
&mut self,
common_data: &CommonCircuitData<F, D>,
gate_serializer: &dyn GateSerializer<F, D>,
) -> IoResult<()> {
let CommonCircuitData {
config,
trace_degree_bits,
fri_params,
public_initial_degree_bits,
fri_oracle_layouts,
gates,
selectors_info,
quotient_degree_factor,
num_gate_constraints,
num_constants,
num_public_inputs,
k_is,
num_partial_products,
num_lookup_polys,
num_lookup_selectors,
luts,
} = common_data;
self.write_circuit_config(config)?;
self.write_usize(*trace_degree_bits)?;
self.write_fri_params(fri_params)?;
self.write_usize(*public_initial_degree_bits)?;
self.write_usize(fri_oracle_layouts.len())?;
for layout in fri_oracle_layouts {
self.write_fri_oracle_layout(layout)?;
}
self.write_selectors_info(selectors_info)?;
self.write_usize(*quotient_degree_factor)?;
self.write_usize(*num_gate_constraints)?;
self.write_usize(*num_constants)?;
self.write_usize(*num_public_inputs)?;
self.write_usize(k_is.len())?;
self.write_field_vec(k_is.as_slice())?;
self.write_usize(*num_partial_products)?;
self.write_usize(*num_lookup_polys)?;
self.write_usize(*num_lookup_selectors)?;
self.write_usize(luts.len())?;
for lut in luts.iter() {
self.write_lut(lut)?;
}
self.write_usize(gates.len())?;
for gate in gates.iter() {
self.write_gate::<F, D>(gate, gate_serializer, common_data)?;
}
Ok(())
}
fn write_circuit_data<
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
const D: usize,
>(
&mut self,
circuit_data: &CircuitData<F, C, D>,
gate_serializer: &dyn GateSerializer<F, D>,
generator_serializer: &dyn WitnessGeneratorSerializer<F, D>,
) -> IoResult<()> {
self.write_common_circuit_data(&circuit_data.common, gate_serializer)?;
self.write_prover_only_circuit_data(
&circuit_data.prover_only,
generator_serializer,
&circuit_data.common,
)?;
self.write_verifier_only_circuit_data(&circuit_data.verifier_only)
}
fn write_prover_only_circuit_data<
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
const D: usize,
>(
&mut self,
prover_only_circuit_data: &ProverOnlyCircuitData<F, C, D>,
generator_serializer: &dyn WitnessGeneratorSerializer<F, D>,
common_data: &CommonCircuitData<F, D>,
) -> IoResult<()> {
let ProverOnlyCircuitData {
generators,
generator_indices_by_watches,
constants_sigmas_commitment,
sigmas,
subgroup,
public_inputs,
representative_map,
fft_root_table,
circuit_digest,
lookup_rows,
lut_to_lookups,
} = prover_only_circuit_data;
self.write_usize(generators.len())?;
for generator in generators.iter() {
self.write_generator::<F, D>(generator, generator_serializer, common_data)?;
}
self.write_usize(generator_indices_by_watches.len())?;
for (k, v) in generator_indices_by_watches {
self.write_usize(*k)?;
self.write_usize_vec(v)?;
}
self.write_polynomial_batch(constants_sigmas_commitment)?;
self.write_usize(sigmas.len())?;
for i in 0..sigmas.len() {
self.write_usize(sigmas[i].len())?;
self.write_field_vec(&sigmas[i])?;
}
self.write_usize(subgroup.len())?;
self.write_field_vec(subgroup)?;
self.write_target_vec(public_inputs)?;
self.write_usize_vec(representative_map)?;
match fft_root_table {
Some(table) => {
self.write_bool(true)?;
self.write_usize(table.len())?;
for i in 0..table.len() {
self.write_usize(table[i].len())?;
self.write_field_vec(&table[i])?;
}
}
None => self.write_bool(false)?,
}
self.write_hash::<F, <C as GenericConfig<D>>::Hasher>(*circuit_digest)?;
self.write_usize(lookup_rows.len())?;
for wire in lookup_rows.iter() {
self.write_usize(wire.last_lu_gate)?;
self.write_usize(wire.last_lut_gate)?;
self.write_usize(wire.first_lut_gate)?;
}
self.write_usize(lut_to_lookups.len())?;
for tlut in lut_to_lookups.iter() {
self.write_target_lut(tlut)?;
}
Ok(())
}
fn write_prover_circuit_data<
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
const D: usize,
>(
&mut self,
prover_circuit_data: &ProverCircuitData<F, C, D>,
gate_serializer: &dyn GateSerializer<F, D>,
generator_serializer: &dyn WitnessGeneratorSerializer<F, D>,
) -> IoResult<()> {
self.write_common_circuit_data(&prover_circuit_data.common, gate_serializer)?;
self.write_prover_only_circuit_data(
&prover_circuit_data.prover_only,
generator_serializer,
&prover_circuit_data.common,
)
}
fn write_verifier_only_circuit_data<
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
const D: usize,
>(
&mut self,
verifier_only_circuit_data: &VerifierOnlyCircuitData<C, D>,
) -> IoResult<()> {
let VerifierOnlyCircuitData {
constants_sigmas_cap,
circuit_digest,
} = verifier_only_circuit_data;
self.write_usize(constants_sigmas_cap.height())?;
self.write_merkle_cap(constants_sigmas_cap)?;
self.write_hash::<F, <C as GenericConfig<D>>::Hasher>(*circuit_digest)?;
Ok(())
}
fn write_verifier_circuit_data<
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
const D: usize,
>(
&mut self,
verifier_circuit_data: &VerifierCircuitData<F, C, D>,
gate_serializer: &dyn GateSerializer<F, D>,
) -> IoResult<()> {
self.write_verifier_only_circuit_data(&verifier_circuit_data.verifier_only)?;
self.write_common_circuit_data(&verifier_circuit_data.common, gate_serializer)
}
fn write_target_verifier_circuit(
&mut self,
verifier_circuit: &VerifierCircuitTarget,
) -> IoResult<()> {
let VerifierCircuitTarget {
constants_sigmas_cap,
circuit_digest,
} = verifier_circuit;
self.write_target_merkle_cap(constants_sigmas_cap)?;
self.write_target_hash(circuit_digest)?;
Ok(())
}
#[inline]
fn write_proof<F, C, const D: usize>(&mut self, proof: &Proof<F, C, D>) -> IoResult<()>
where
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
{
self.write_merkle_cap(&proof.wires_cap)?;
self.write_merkle_cap(&proof.plonk_zs_partial_products_cap)?;
self.write_merkle_cap(&proof.quotient_polys_cap)?;
self.write_opening_set(&proof.openings)?;
self.write_fri_proof::<F, C, D>(&proof.opening_proof)
}
#[inline]
fn write_target_proof<const D: usize>(&mut self, proof: &ProofTarget<D>) -> IoResult<()> {
self.write_target_merkle_cap(&proof.wires_cap)?;
self.write_target_merkle_cap(&proof.plonk_zs_partial_products_cap)?;
self.write_target_merkle_cap(&proof.quotient_polys_cap)?;
self.write_target_opening_set(&proof.openings)?;
self.write_target_fri_proof::<D>(&proof.opening_proof)
}
#[inline]
fn write_proof_with_public_inputs<F, C, const D: usize>(
&mut self,
proof_with_pis: &ProofWithPublicInputs<F, C, D>,
) -> IoResult<()>
where
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
{
let ProofWithPublicInputs {
proof,
public_inputs,
} = proof_with_pis;
self.write_proof(proof)?;
self.write_usize(public_inputs.len())?;
self.write_field_vec(public_inputs)
}
#[inline]
fn write_target_proof_with_public_inputs<const D: usize>(
&mut self,
proof_with_pis: &ProofWithPublicInputsTarget<D>,
) -> IoResult<()> {
let ProofWithPublicInputsTarget {
proof,
public_inputs,
} = proof_with_pis;
self.write_target_proof(proof)?;
self.write_target_vec(public_inputs)
}
#[inline]
fn write_compressed_fri_query_rounds<F, C, const D: usize>(
&mut self,
cfqrs: &CompressedFriQueryRounds<F, C::Hasher, D>,
) -> IoResult<()>
where
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
{
for &i in &cfqrs.indices {
self.write_u32(i as u32)?;
}
let mut initial_trees_proofs = cfqrs.initial_trees_proofs.iter().collect::<Vec<_>>();
initial_trees_proofs.sort_by_key(|&x| x.0);
for (_, itp) in initial_trees_proofs {
self.write_fri_initial_proof::<F, C, D>(itp)?;
}
for h in &cfqrs.steps {
let mut fri_query_steps = h.iter().collect::<Vec<_>>();
fri_query_steps.sort_by_key(|&x| x.0);
for (_, fqs) in fri_query_steps {
self.write_fri_query_step::<F, C, D>(fqs)?;
}
}
Ok(())
}
#[inline]
fn write_compressed_fri_proof<F, C, const D: usize>(
&mut self,
fp: &CompressedFriProof<F, C::Hasher, D>,
) -> IoResult<()>
where
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
{
for cap in &fp.commit_phase_merkle_caps {
self.write_merkle_cap(cap)?;
}
self.write_optional_batch_mask_proof::<F, C, D>(&fp.batch_mask_proof)?;
self.write_compressed_fri_query_rounds::<F, C, D>(&fp.query_round_proofs)?;
for chunk in &fp.final_polys.chunks {
self.write_field_ext_vec::<F, D>(&chunk.coeffs)?;
}
self.write_field(fp.pow_witness)
}
fn write_optional_batch_mask_proof<F, C, const D: usize>(
&mut self,
batch_mask_proof: &Option<FriBatchMaskProof<F, C::Hasher, D>>,
) -> IoResult<()>
where
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
{
self.write_bool(batch_mask_proof.is_some())?;
if let Some(batch_mask_proof) = batch_mask_proof {
self.write_merkle_cap(&batch_mask_proof.cap)?;
for query_opening in &batch_mask_proof.query_openings {
self.write_field_ext_vec::<F, D>(&query_opening.values)?;
self.write_merkle_proof(&query_opening.merkle_proof)?;
}
}
Ok(())
}
fn write_optional_target_batch_mask_proof<const D: usize>(
&mut self,
batch_mask_proof: &Option<FriBatchMaskProofTarget<D>>,
) -> IoResult<()> {
self.write_bool(batch_mask_proof.is_some())?;
if let Some(batch_mask_proof) = batch_mask_proof {
self.write_target_merkle_cap(&batch_mask_proof.cap)?;
self.write_usize(batch_mask_proof.query_openings.len())?;
for query_opening in &batch_mask_proof.query_openings {
self.write_target_ext_vec::<D>(&query_opening.values)?;
self.write_target_merkle_proof(&query_opening.merkle_proof)?;
}
}
Ok(())
}
#[inline]
fn write_compressed_proof<F, C, const D: usize>(
&mut self,
proof: &CompressedProof<F, C, D>,
) -> IoResult<()>
where
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
{
self.write_merkle_cap(&proof.wires_cap)?;
self.write_merkle_cap(&proof.plonk_zs_partial_products_cap)?;
self.write_merkle_cap(&proof.quotient_polys_cap)?;
self.write_opening_set(&proof.openings)?;
self.write_compressed_fri_proof::<F, C, D>(&proof.opening_proof)
}
#[inline]
fn write_compressed_proof_with_public_inputs<F, C, const D: usize>(
&mut self,
proof_with_pis: &CompressedProofWithPublicInputs<F, C, D>,
) -> IoResult<()>
where
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
{
let CompressedProofWithPublicInputs {
proof,
public_inputs,
} = proof_with_pis;
self.write_compressed_proof(proof)?;
self.write_field_vec(public_inputs)
}
#[inline]
fn write_lut(&mut self, lut: &[(u16, u16)]) -> IoResult<()> {
self.write_usize(lut.len())?;
for (a, b) in lut.iter() {
self.write_u16(*a)?;
self.write_u16(*b)?;
}
Ok(())
}
#[inline]
fn write_target_lut(&mut self, lut: &[(Target, Target)]) -> IoResult<()> {
self.write_usize(lut.len())?;
for (a, b) in lut.iter() {
self.write_target(*a)?;
self.write_target(*b)?;
}
Ok(())
}
}
impl Write for Vec<u8> {
type Error = Infallible;
#[inline]
fn write_all(&mut self, bytes: &[u8]) -> IoResult<()> {
self.extend_from_slice(bytes);
Ok(())
}
fn write_gate<F: RichField + Extendable<D>, const D: usize>(
&mut self,
gate: &GateRef<F, D>,
gate_serializer: &dyn GateSerializer<F, D>,
common_data: &CommonCircuitData<F, D>,
) -> IoResult<()> {
gate_serializer.write_gate(self, gate, common_data)
}
fn write_generator<F: RichField + Extendable<D>, const D: usize>(
&mut self,
generator: &WitnessGeneratorRef<F, D>,
generator_serializer: &dyn WitnessGeneratorSerializer<F, D>,
common_data: &CommonCircuitData<F, D>,
) -> IoResult<()> {
generator_serializer.write_generator(self, generator, common_data)
}
}
#[derive(Debug)]
pub struct Buffer<'a> {
bytes: &'a [u8],
pos: usize,
}
impl<'a> Buffer<'a> {
#[inline]
pub const fn new(bytes: &'a [u8]) -> Self {
Self { bytes, pos: 0 }
}
#[inline]
pub const fn pos(&self) -> usize {
self.pos
}
#[inline]
pub const fn bytes(&self) -> &'a [u8] {
self.bytes
}
#[inline]
pub fn unread_bytes(&self) -> &'a [u8] {
&self.bytes()[self.pos()..]
}
}
impl Remaining for Buffer<'_> {
fn remaining(&self) -> usize {
self.bytes.len() - self.pos()
}
}
impl Read for Buffer<'_> {
#[inline]
fn read_exact(&mut self, bytes: &mut [u8]) -> IoResult<()> {
let n = bytes.len();
if self.remaining() < n {
Err(IoError)
} else {
bytes.copy_from_slice(&self.bytes[self.pos..][..n]);
self.pos += n;
Ok(())
}
}
fn read_gate<F: RichField + Extendable<D>, const D: usize>(
&mut self,
gate_serializer: &dyn GateSerializer<F, D>,
common_data: &CommonCircuitData<F, D>,
) -> IoResult<GateRef<F, D>> {
gate_serializer.read_gate(self, common_data)
}
fn read_generator<F: RichField + Extendable<D>, const D: usize>(
&mut self,
generator_serializer: &dyn WitnessGeneratorSerializer<F, D>,
common_data: &CommonCircuitData<F, D>,
) -> IoResult<WitnessGeneratorRef<F, D>> {
generator_serializer.read_generator(self, common_data)
}
}