use bincode::Options;
use nova_snark::{
nova::{CompressedSNARK, ProverKey, PublicParams, VerifierKey},
provider::{ipa_pc, PallasEngine, VestaEngine},
spartan::snark::RelaxedR1CSSNARK,
traits::Engine,
};
use serde::{Deserialize, Serialize};
use sha2::{Digest, Sha256};
use std::sync::Arc;
type E1 = PallasEngine;
type E2 = VestaEngine;
type EE1 = ipa_pc::EvaluationEngine<E1>;
type EE2 = ipa_pc::EvaluationEngine<E2>;
type S1 = RelaxedR1CSSNARK<E1, EE1>;
type S2 = RelaxedR1CSSNARK<E2, EE2>;
pub type FieldElement = <E1 as Engine>::Scalar;
type C = crate::circuit::PorCircuit<FieldElement>;
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub struct ChallengeID(pub [u8; 32]);
#[derive(Serialize, Deserialize)]
pub struct Proof {
pub compressed_snark: CompressedSNARK<E1, E2, C, S1, S2>,
pub challenge_ids: Vec<ChallengeID>,
}
mod proof_format {
pub const MAGIC: &[u8] = b"NPOR";
pub const VERSION: u16 = 1;
pub const HEADER_SIZE: usize = 10;
}
impl Proof {
pub fn to_bytes(&self) -> crate::Result<Vec<u8>> {
use crate::KontorPoRError;
let mut result = Vec::new();
result.extend_from_slice(proof_format::MAGIC);
result.extend_from_slice(&proof_format::VERSION.to_le_bytes());
let options = bincode::DefaultOptions::new()
.with_fixint_encoding()
.with_little_endian()
.reject_trailing_bytes();
let proof_bytes = options.serialize(self).map_err(|e| {
KontorPoRError::Serialization(format!("Failed to serialize proof: {}", e))
})?;
let length = proof_bytes.len() as u32;
result.extend_from_slice(&length.to_le_bytes());
result.extend_from_slice(&proof_bytes);
Ok(result)
}
pub fn from_bytes(bytes: &[u8]) -> crate::Result<Self> {
use crate::KontorPoRError;
if bytes.len() < proof_format::HEADER_SIZE {
return Err(KontorPoRError::Serialization(
"Proof bytes too short for header".to_string(),
));
}
let magic = &bytes[0..4];
if magic != proof_format::MAGIC {
return Err(KontorPoRError::Serialization(
"Invalid magic bytes in proof".to_string(),
));
}
let version = u16::from_le_bytes([bytes[4], bytes[5]]);
if version != proof_format::VERSION {
return Err(KontorPoRError::Serialization(format!(
"Unsupported proof format version: {}",
version
)));
}
let length = u32::from_le_bytes([bytes[6], bytes[7], bytes[8], bytes[9]]) as usize;
let expected_len = proof_format::HEADER_SIZE + length;
if bytes.len() < expected_len {
return Err(KontorPoRError::Serialization(
"Proof bytes truncated".to_string(),
));
}
if bytes.len() > expected_len {
return Err(KontorPoRError::Serialization(
"Proof bytes contain trailing data".to_string(),
));
}
let proof_bytes = &bytes[proof_format::HEADER_SIZE..expected_len];
let options = bincode::DefaultOptions::new()
.with_fixint_encoding()
.with_little_endian()
.reject_trailing_bytes();
let proof = options.deserialize(proof_bytes).map_err(|e| {
KontorPoRError::Serialization(format!("Failed to deserialize proof: {}", e))
})?;
Ok(proof)
}
}
#[derive(Clone)]
pub struct KeyPair {
pub(crate) pk: Arc<ProverKey<E1, E2, C, S1, S2>>,
pub(crate) vk: Arc<VerifierKey<E1, E2, C, S1, S2>>,
}
pub struct PorParams {
pub(crate) pp: Arc<PublicParams<E1, E2, C>>,
pub(crate) keys: KeyPair,
pub file_tree_depth: usize,
pub max_supported_depth: usize,
pub aggregated_tree_depth: usize,
}
impl Clone for PorParams {
fn clone(&self) -> Self {
Self {
pp: Arc::clone(&self.pp),
keys: self.keys.clone(),
file_tree_depth: self.file_tree_depth,
max_supported_depth: self.max_supported_depth,
aggregated_tree_depth: self.aggregated_tree_depth,
}
}
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct FileMetadata {
pub root: FieldElement,
pub file_id: String,
pub padded_len: usize,
pub original_size: usize,
pub filename: String,
}
impl FileMetadata {
pub fn num_data_symbols(&self) -> usize {
self.original_size.div_ceil(crate::config::CHUNK_SIZE_BYTES)
}
pub fn num_codewords(&self) -> usize {
self.num_data_symbols()
.div_ceil(crate::config::DATA_SYMBOLS_PER_CODEWORD)
}
pub fn total_symbols(&self) -> usize {
self.num_codewords() * crate::config::TOTAL_SYMBOLS_PER_CODEWORD
}
pub fn depth(&self) -> usize {
if self.padded_len == 0 {
0
} else {
self.padded_len.trailing_zeros() as usize
}
}
}
#[derive(Debug, Clone)]
pub struct PreparedFile {
pub(crate) tree: crate::merkle::MerkleTree,
pub file_id: String,
pub root: FieldElement,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Challenge {
pub file_metadata: FileMetadata,
pub block_height: u64,
pub num_challenges: usize,
pub seed: FieldElement,
pub prover_id: String,
}
impl Challenge {
pub fn new(
file_metadata: FileMetadata,
block_height: u64,
num_challenges: usize,
seed: FieldElement,
prover_id: String,
) -> Self {
Self {
file_metadata,
block_height,
num_challenges,
seed,
prover_id,
}
}
#[doc(hidden)]
pub fn new_test(
file_metadata: FileMetadata,
block_height: u64,
num_challenges: usize,
seed: FieldElement,
) -> Self {
Self::new(
file_metadata,
block_height,
num_challenges,
seed,
String::from("test_prover"),
)
}
pub fn id(&self) -> ChallengeID {
use crate::poseidon::domain_tags;
use ff::PrimeField;
let mut hasher = Sha256::new();
let tag: FieldElement = domain_tags::challenge_id();
hasher.update(tag.to_repr());
hasher.update(self.block_height.to_le_bytes());
hasher.update(self.seed.to_repr());
hasher.update(self.file_metadata.file_id.as_bytes());
hasher.update(self.file_metadata.root.to_repr());
hasher.update((self.file_metadata.padded_len.trailing_zeros() as u64).to_le_bytes());
hasher.update((self.num_challenges as u64).to_le_bytes());
hasher.update(self.prover_id.as_bytes());
let result = hasher.finalize();
ChallengeID(result.into())
}
}