1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
use crate::{
    channel::*, constraints::Constraints, polynomial::DensePolynomial, proof_of_work, Proof,
};
#[cfg(feature = "std")]
use std::error;
use std::{collections::BTreeMap, convert::TryInto, fmt, prelude::v1::*};
use zkp_hash::Hash;
use zkp_merkle_tree::{Commitment, Error as MerkleError, Proof as MerkleProof};
use zkp_primefield::{fft, geometric_series::root_series, FieldElement};
use zkp_u256::U256;

type Result<T> = std::result::Result<T, Error>;

// TODO - We could parametrize root unavailable with the size asked for and fri
// error with which layer failed.
#[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
pub enum Error {
    RootUnavailable,
    InvalidPoW,
    InvalidLDECommitment,
    InvalidConstraintCommitment,
    InvalidFriCommitment,
    HashMapFailure,
    ProofTooLong,
    OodsCalculationFailure,
    OodsMismatch,
    FriCalculationFailure,
    Merkle(MerkleError),
}

impl fmt::Display for Error {
    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
        use Error::*;
        match *self {
            RootUnavailable => write!(f, "The prime field doesn't have a root of this order"),
            InvalidPoW => write!(f, "The suggested proof of work failed to verify"),
            InvalidLDECommitment => write!(f, "The LDE merkle proof is incorrect"),
            InvalidConstraintCommitment => write!(f, "The constraint merkle proof is incorrect"),
            InvalidFriCommitment => write!(f, "A FRI layer commitment is incorrect"),
            HashMapFailure => {
                write!(
                    f,
                    "Verifier attempted to look up an empty entry in the hash map"
                )
            }
            ProofTooLong => write!(f, "The proof length doesn't match the specification"),
            OodsCalculationFailure => {
                write!(
                    f,
                    "The calculated odds value doesn't match the committed one"
                )
            }
            FriCalculationFailure => {
                write!(
                    f,
                    "The final FRI calculation suggests the committed polynomial isn't low degree"
                )
            }
            OodsMismatch => write!(f, "Calculated oods value doesn't match the committed one"),
            // This is a wrapper, so defer to the underlying types' implementation of `fmt`.
            Merkle(ref e) => std::fmt::Display::fmt(e, f),
        }
    }
}

impl From<MerkleError> for Error {
    fn from(err: MerkleError) -> Self {
        Self::Merkle(err)
    }
}

// False positives on the Latex math.
#[allow(clippy::doc_markdown)]
/// # Stark verify
///
/// ## Input
///
/// A `VerifierChannel` containing the proof.
/// A `ConstraintSystem` which captures the claim that is made.
/// A `ProofParams` object which configures the proof.
///
/// ## Verification process
///
/// ### Step 1: Read all commitments and draw random values
///
/// * Read the trace polynomial commitment commitment.
/// * Draw the constraint combination coefficients $\alpha_i$ and $\beta_i$.
/// * Read the combined constraint polynomial commitment.
/// * Draw the deep point $z$.
/// * Read the deep values of the trace polynomials $T_i(z)$ ,$T_i(\omega \cdot
/// z)$.
/// * Read the deep values of the combined constraint polynomial
/// $A_i(z^\mathrm{d})$ Draw the coefficients for the final combination
/// $\alpha_i$, $\beta_i$ and $\gamma_i$.
/// * Read the final polynomial commitment.
/// * Draw the FRI folding coefficient.
/// * Repeatedly read the FRI layer commitments and folding coefficients.
/// * Read the final FRI polynomial.
///
/// ### Step 2: Verify proof of work
///
/// * Draw proof of work challenge.
/// * Read proof of work solution.
/// * Verify proof of work solution.
///
/// ### Step 3: Read query decommitments
///
/// * Draw query indices
/// * Read evaluations of trace polynomial
/// $T_0(x_0), T_1(x_0), \dots, T_0(x_1), T_1(x_1), \dots$
/// * Read and verify merkle decommitments for trace polynomial
/// * Read evaluations of the combined constraint polynomial
/// $A_0(x_0), A_1(x_0), \dots, A_0(x_1), A_1(x_1), \dots$
/// * Read and verify merkle decommitments for combined constraint polynomial
///
/// ### Step 4: FRI decommitments and final layer verification
///
/// <!-- TODO -->
///
/// ### Step 5: Verify deep point evaluation
///
/// Using the disclosed values of $T_i(z)$ and $T_i(\omega \cdot z)$, compute
/// the combined constraint polynomial at the deep point $C(z)$.
///
/// $$
/// C(z) = \sum_i (\alpha_i + \beta_i \cdot z^{d_i}) \cdot C_i(z, T_0(z),
/// T_0(\omega \cdot z), T_1(z), \dots)
/// $$
///
/// Using the disclosed values of $A_i(z^{\mathrm{d}})$ compute $C(z)$.
///
/// $$
/// C'(z) = \sum_i z^i \cdot A_i(z^{\mathrm{d}})
/// $$
///
/// Verify that $C(z) = C'(z)$.
///
/// ### Step 6: Compute first FRI layer values
///
/// Divide out the deep point from the trace and constraint decommitments
///
/// $$
/// T_i'(x_j) = \frac{T_i(x_j) - T_i(z)}{x_j - z}
/// $$
///
/// $$
/// T_i''(x_j) = \frac{T_i(x_j) - T_i(\omega \cdot z)}{x_j - \omega \cdot z}
/// $$
///
/// $$
/// A_i'(x_j) = \frac{A_i(x_j) - A_i(z^{\mathrm{d}})}{x_j - z^{\mathrm{d}}}
/// $$
///
/// and combine to create evaluations of the final polynomial $P(x_i)$.
///
/// $$
/// P(x_j) = \sum_i \left(\alpha_i \cdot T_i'(x_j) + \beta_i \cdot T_i''(x_j)
/// \right) + \sum_i \gamma_i \cdot A_i'(x_j)
/// $$
///
/// ### Step 7: Verify FRI proof
///
/// * Draw coeffient
/// * Reduce layer $n$ times
/// * Read and verify decommitments
/// * Repeat
/// * Evaluate the final layer
///
/// <!-- TODO: ellaborate FRI verification -->
// TODO: Refactor into smaller function
#[allow(clippy::too_many_lines)]
pub fn verify(constraints: &Constraints, proof: &Proof) -> Result<()> {
    let proof = proof.as_bytes();
    let trace_length = constraints.trace_nrows();
    let trace_cols = constraints.trace_ncolumns();
    let eval_domain_size = trace_length * constraints.blowup;
    let eval_x = root_series(eval_domain_size).collect::<Vec<_>>();

    let mut channel = VerifierChannel::new(proof.to_vec());
    channel.initialize(constraints.channel_seed());

    // Get the low degree root commitment, and constraint root commitment
    // TODO: Make it work as channel.read()
    let low_degree_extension_root = Replayable::<Hash>::replay(&mut channel);
    let lde_commitment = Commitment::from_size_hash(eval_domain_size, &low_degree_extension_root)?;
    let mut constraint_coefficients: Vec<FieldElement> = Vec::with_capacity(2 * constraints.len());
    for _ in 0..constraints.len() {
        constraint_coefficients.push(channel.get_random());
        constraint_coefficients.push(channel.get_random());
    }
    let constraint_evaluated_root = Replayable::<Hash>::replay(&mut channel);
    let constraint_commitment =
        Commitment::from_size_hash(eval_domain_size, &constraint_evaluated_root)?;

    // Get the oods information from the proof and random
    let oods_point: FieldElement = channel.get_random();
    let mut oods_values: Vec<FieldElement> = Vec::with_capacity(2 * trace_cols + 1);
    let constraints_trace_degree = constraints.degree();
    for _ in 0..(2 * trace_cols + constraints_trace_degree) {
        oods_values.push(Replayable::<FieldElement>::replay(&mut channel));
    }
    let mut oods_coefficients: Vec<FieldElement> = Vec::with_capacity(2 * trace_cols + 1);
    for _ in 0..2 * trace_cols + constraints_trace_degree {
        oods_coefficients.push(channel.get_random());
    }

    let mut fri_commitments: Vec<Commitment> = Vec::with_capacity(constraints.fri_layout.len() + 1);
    let mut eval_points: Vec<FieldElement> = Vec::with_capacity(constraints.fri_layout.len() + 1);
    let mut fri_size = eval_domain_size >> constraints.fri_layout[0];
    // Get first fri root:
    fri_commitments.push(Commitment::from_size_hash(
        fri_size,
        &Replayable::<Hash>::replay(&mut channel),
    )?);
    // Get fri roots and eval points from the channel random
    for &x in constraints.fri_layout.iter().skip(1) {
        fri_size >>= x;
        // TODO: When is x equal to zero?
        let eval_point = if x == 0 {
            FieldElement::ONE
        } else {
            channel.get_random()
        };
        eval_points.push(eval_point);
        fri_commitments.push(Commitment::from_size_hash(
            fri_size,
            &Replayable::<Hash>::replay(&mut channel),
        )?);
    }
    // Gets the last layer and the polynomial coefficients
    eval_points.push(channel.get_random());
    let last_layer_coefficient: Vec<FieldElement> =
        Replayable::<FieldElement>::replay_many(&mut channel, fri_size / constraints.blowup);

    // Gets the proof of work from the proof.
    let pow_seed: proof_of_work::ChallengeSeed = channel.get_random();
    let pow_challenge = pow_seed.with_difficulty(constraints.pow_bits);
    let pow_response = Replayable::<proof_of_work::Response>::replay(&mut channel);
    if !pow_challenge.verify(pow_response) {
        return Err(Error::InvalidPoW);
    }

    // Gets queries from channel
    let queries = get_indices(
        constraints.num_queries,
        eval_domain_size.trailing_zeros(),
        &mut channel,
    );

    // Get values and check decommitment of low degree extension
    let lde_values: Vec<(usize, Vec<U256>)> = queries
        .iter()
        .map(|&index| {
            let held = Replayable::<U256>::replay_many(&mut channel, trace_cols);
            (index, held)
        })
        .collect();
    let lde_proof_length = lde_commitment.proof_size(&queries)?;
    let lde_hashes = Replayable::<Hash>::replay_many(&mut channel, lde_proof_length);
    let lde_proof = MerkleProof::from_hashes(&lde_commitment, &queries, &lde_hashes)?;
    // Note - we could express this a merkle error instead but this adds specificity
    if lde_proof.verify(&lde_values).is_err() {
        return Err(Error::InvalidLDECommitment);
    }

    // Gets the values and checks the constraint decommitment
    let mut constraint_values = Vec::with_capacity(queries.len());
    for query_index in &queries {
        constraint_values.push((
            *query_index,
            Replayable::<FieldElement>::replay_many(&mut channel, constraints_trace_degree),
        ));
    }
    let constraint_proof_length = constraint_commitment.proof_size(&queries)?;
    let constraint_hashes: Vec<Hash> =
        Replayable::<Hash>::replay_many(&mut channel, constraint_proof_length);
    let constraint_proof =
        MerkleProof::from_hashes(&constraint_commitment, &queries, &constraint_hashes)?;
    // Note - we could express this a merkle error instead but this adds specificity
    if constraint_proof.verify(&constraint_values).is_err() {
        return Err(Error::InvalidConstraintCommitment);
    }

    let coset_sizes = constraints
        .fri_layout
        .iter()
        .map(|k| 1_usize << k)
        .collect::<Vec<_>>();
    let mut fri_indices: Vec<usize> = queries
        .to_vec()
        .iter()
        .map(|x| x / coset_sizes[0])
        .collect();

    // Folded fri values from the previous layer
    let mut fri_folds: BTreeMap<usize, FieldElement> = BTreeMap::new();

    let mut previous_indices = queries.to_vec().clone();
    let mut step = 1;
    let mut len = eval_domain_size;
    for (k, commitment) in fri_commitments.iter().enumerate() {
        let mut fri_layer_values = Vec::new();

        fri_indices.dedup();
        for i in &fri_indices {
            let mut coset: Vec<FieldElement> = Vec::new();
            for j in 0..coset_sizes[k] {
                let n = i * coset_sizes[k] + j;
                if let Ok(z) = previous_indices.binary_search(&n) {
                    if k > 0 {
                        coset.push(match fri_folds.get(&n) {
                            Some(x) => x.clone(),
                            None => return Err(Error::HashMapFailure),
                        });
                    } else {
                        let z_reverse = fft::permute_index(eval_domain_size, queries[z]);
                        coset.push(out_of_domain_element(
                            lde_values[z].1.as_slice(),
                            &constraint_values[z].1,
                            &eval_x[z_reverse],
                            &oods_point,
                            oods_values.as_slice(),
                            oods_coefficients.as_slice(),
                            eval_domain_size,
                            constraints.blowup,
                        )?);
                    }
                } else {
                    coset.push(Replayable::<FieldElement>::replay(&mut channel));
                }
            }
            fri_layer_values.push((*i, coset));
        }
        // Fold and record foldings
        let mut layer_folds = BTreeMap::new();
        for (i, coset) in &fri_layer_values {
            let _old_value = layer_folds.insert(
                *i,
                fri_fold(
                    coset.as_slice(),
                    &eval_points[k],
                    step,
                    (coset_sizes[k] / 2) * i,
                    len,
                    eval_x.as_slice(),
                ),
            );
        }

        let merkle_proof_length = commitment.proof_size(&fri_indices)?;
        let merkle_hashes = Replayable::<Hash>::replay_many(&mut channel, merkle_proof_length);
        let merkle_proof = MerkleProof::from_hashes(commitment, &fri_indices, &merkle_hashes)?;
        fri_folds = layer_folds;

        for _ in 0..constraints.fri_layout[k] {
            step *= 2;
        }
        len /= coset_sizes[k];

        // Note - we could express this a merkle error instead but this adds specificity
        if merkle_proof.verify(&fri_layer_values).is_err() {
            return Err(Error::InvalidFriCommitment);
        };

        previous_indices = fri_indices.clone();
        if k + 1 < constraints.fri_layout.len() {
            fri_indices = fri_indices
                .iter()
                .map(|ind| ind / coset_sizes[k + 1])
                .collect();
        }
    }
    if !channel.at_end() {
        return Err(Error::ProofTooLong);
    }

    // Checks that the calculated fri folded queries are the points interpolated by
    // the decommited polynomial.
    let interp_root = match FieldElement::root(len) {
        Some(x) => x,
        None => return Err(Error::RootUnavailable),
    };
    for key in &previous_indices {
        let calculated = fri_folds[key].clone();
        let x_pow = interp_root.pow(fft::permute_index(len, *key));
        let committed = DensePolynomial::new(&last_layer_coefficient).evaluate(&x_pow);

        if committed != calculated.clone() {
            return Err(Error::OodsCalculationFailure);
        }
    }

    let (trace_values, constraint_values) = oods_values.split_at(2 * trace_cols);
    if oods_value_from_trace_values(
        &constraints,
        &constraint_coefficients,
        &trace_values,
        &oods_point,
    ) != oods_value_from_constraint_values(&constraint_values, &oods_point)
    {
        return Err(Error::OodsMismatch);
    }
    Ok(())
}

fn oods_value_from_trace_values(
    constraints: &Constraints,
    coefficients: &[FieldElement],
    trace_values: &[FieldElement],
    oods_point: &FieldElement,
) -> FieldElement {
    let trace = |i: usize, j: isize| {
        let j: usize = j.try_into().unwrap();
        assert!(j == 0 || j == 1);
        trace_values[2 * i + j].clone()
    };
    constraints
        .combine(coefficients)
        .evaluate(oods_point, &trace)
}

fn oods_value_from_constraint_values(
    constraint_values: &[FieldElement],
    oods_point: &FieldElement,
) -> FieldElement {
    let mut result = FieldElement::ZERO;
    let mut power = FieldElement::ONE;
    for value in constraint_values {
        result += value * &power;
        power *= oods_point;
    }
    result
}

// TODO: Clean up
#[allow(clippy::cast_possible_truncation)]
fn get_indices(num: usize, bits: u32, proof: &mut VerifierChannel) -> Vec<usize> {
    let mut query_indices = Vec::with_capacity(num + 3);
    while query_indices.len() < num {
        let val: U256 = proof.get_random();
        query_indices.push(((val.clone() >> (0x100 - 0x040)).c0 & (2_u64.pow(bits) - 1)) as usize);
        query_indices.push(((val.clone() >> (0x100 - 0x080)).c0 & (2_u64.pow(bits) - 1)) as usize);
        query_indices.push(((val.clone() >> (0x100 - 0x0C0)).c0 & (2_u64.pow(bits) - 1)) as usize);
        query_indices.push((val.c0 & (2_u64.pow(bits) - 1)) as usize);
    }
    query_indices.truncate(num);
    (&mut query_indices).sort_unstable();
    query_indices
}

fn fri_fold(
    coset: &[FieldElement],
    eval_point: &FieldElement,
    mut step: usize,
    mut index: usize,
    mut len: usize,
    eval_x: &[FieldElement],
) -> FieldElement {
    let mut mutable_eval_copy = eval_point.clone();
    let mut coset_full: Vec<FieldElement> = coset.to_vec();
    while coset_full.len() > 1 {
        let mut next_coset = Vec::with_capacity(coset.len() / 2);

        for (k, pair) in coset_full.chunks(2).enumerate() {
            let x = &eval_x[fft::permute_index(len / 2, index + k) * step];
            next_coset.push(fri_single_fold(&pair[0], &pair[1], x, &mutable_eval_copy));
        }
        len /= 2;
        index /= 2;
        step *= 2;
        mutable_eval_copy = mutable_eval_copy.square();
        coset_full = next_coset;
    }
    coset_full[0].clone()
}

fn fri_single_fold(
    poly_at_x: &FieldElement,
    poly_at_neg_x: &FieldElement,
    x: &FieldElement,
    eval_point: &FieldElement,
) -> FieldElement {
    (poly_at_x + poly_at_neg_x) + eval_point / x * (poly_at_x - poly_at_neg_x)
}

// TODO - Make sure this is general
#[allow(clippy::too_many_arguments)]
fn out_of_domain_element(
    poly_points_u: &[U256],
    constraint_oods_values: &[FieldElement],
    x_cord: &FieldElement,
    oods_point: &FieldElement,
    oods_values: &[FieldElement],
    oods_coefficients: &[FieldElement],
    eval_domain_size: usize,
    blowup: usize,
) -> Result<FieldElement> {
    let poly_points: Vec<FieldElement> = poly_points_u
        .iter()
        .map(|i| FieldElement::from_montgomery(i.clone()))
        .collect();
    let x_transform = x_cord * FieldElement::GENERATOR;
    let omega = match FieldElement::root(eval_domain_size) {
        Some(x) => x,
        None => return Err(Error::RootUnavailable),
    };
    let g = omega.pow(blowup);
    let mut r = FieldElement::ZERO;

    for x in 0..poly_points.len() {
        r += &oods_coefficients[2 * x] * (&poly_points[x] - &oods_values[2 * x])
            / (&x_transform - oods_point);
        r += &oods_coefficients[2 * x + 1] * (&poly_points[x] - &oods_values[2 * x + 1])
            / (&x_transform - &g * oods_point);
    }
    for (i, constraint_oods_value) in constraint_oods_values.iter().enumerate() {
        r += &oods_coefficients[2 * poly_points.len() + i]
            * (constraint_oods_value - &oods_values[poly_points.len() * 2 + i])
            / (&x_transform - oods_point.pow(constraint_oods_values.len()));
    }
    Ok(r)
}

#[cfg(feature = "std")]
impl error::Error for Error {
    fn source(&self) -> Option<&(dyn error::Error + 'static)> {
        match *self {
            Self::Merkle(ref e) => Some(e),
            _ => None,
        }
    }
}

#[cfg(test)]
mod tests {
    use super::*;
    use crate::{
        prove,
        traits::tests::{Claim, Witness},
        Provable, Verifiable,
    };
    use zkp_macros_decl::u256h;

    #[test]
    fn verifier_fib_test() {
        let public = Claim {
            index: 1000,
            value: FieldElement::from(u256h!(
                "0142c45e5d743d10eae7ebb70f1526c65de7dbcdb65b322b6ddc36a812591e8f"
            )),
        };
        let private = Witness {
            secret: FieldElement::from(u256h!(
                "00000000000000000000000000000000000000000000000000000000cafebabe"
            )),
        };
        let constraints = public.constraints();
        let trace = public.trace(&private);
        let actual = prove(&constraints, &trace).unwrap();

        assert!(verify(&constraints, &actual).is_ok());
    }
}