concordium_base/id/
utils.rs

1//! A collection of auxiliary functions that don't belong anywhere else.
2
3use super::{secret_sharing::Threshold, types::*};
4use crate::{
5    common::{
6        to_bytes,
7        types::{KeyIndex, TransactionTime},
8        ParseResult,
9    },
10    curve_arithmetic::{multiexp, Curve, Field, Pairing, PrimeField, Value},
11    elgamal::*,
12    pedersen_commitment::Commitment,
13};
14use anyhow::bail;
15use ed25519_dalek::Verifier;
16use either::Either;
17use rand::*;
18use sha2::{Digest, Sha256};
19use std::collections::{btree_map::BTreeMap, BTreeSet};
20
21/// Given a list of commitments g^{a_i}h^{r_i}
22/// and a point x (the share number), compute
23/// g^p(x)h^r(x) where
24/// p(x) = a_0 + a_1 x + ... + a_n x^n
25/// r(x) = r_0 + r_1 x + ... + r_n x^n
26pub fn commitment_to_share<C: Curve>(
27    share_number: &C::Scalar,
28    coeff_commitments: &[Commitment<C>],
29) -> Commitment<C> {
30    let n = coeff_commitments.len();
31    if n == 0 {
32        return Commitment(C::zero_point());
33    }
34    let mut exponents = Vec::with_capacity(n);
35    let mut exponent: C::Scalar = C::Scalar::one();
36    for _ in 0..n {
37        exponents.push(exponent);
38        exponent.mul_assign(share_number);
39    }
40    Commitment(multiexp(coeff_commitments, &exponents))
41}
42
43/// Interpret the array as coefficients of a polynomial starting at 0,
44/// and evaluate the polynomial at the given point.
45pub fn evaluate_poly<F: Field, R: AsRef<F>>(coeffs: &[R], point: &F) -> F {
46    let mut eval: F = F::zero();
47    // Horner's scheme at point point
48    for rand in coeffs.iter().rev() {
49        eval.mul_assign(point);
50        eval.add_assign(rand.as_ref());
51    }
52    eval
53}
54
55/// This function is used for encryption of the PRF key share in chunks,
56/// where the chunks are written in little endian.
57/// The arguments are
58/// - context - the global context,
59/// - pk - the public key for encryption
60/// - share - the share we want to encrypt
61///
62/// The output is a 3-tuple concisting of
63/// 8 Cipher's, 8 Randomness's and 8 scalars.
64/// The ciphers and randomnesses come from the
65/// encryption in chunks itself.
66/// The scalars are the chunks themselves (in little endian).
67#[allow(clippy::type_complexity)]
68pub fn encrypt_prf_share<C: Curve, R: Rng>(
69    context: &GlobalContext<C>,
70    pk: &PublicKey<C>,
71    share: &Value<C>,
72    csprng: &mut R,
73) -> ([Cipher<C>; 8], [Randomness<C>; 8], [C::Scalar; 8]) {
74    // The generator for encryption in the exponent is the second component of the
75    // commitment key, the 'h'.
76    let h = context.encryption_in_exponent_generator();
77    // let mut ciphers = encrypt_in_chunks_given_generator(pk, share, CHUNK_SIZE, h,
78    // csprng);
79    let chunks = value_to_chunks::<C>(share, CHUNK_SIZE);
80    let mut ciphers = pk.encrypt_exponent_vec_given_generator(&chunks, h, csprng);
81    // these are guaranteed to exist because we used `ChunkSize::ThirtyTwo`. The
82    // encryptions are in little-endian limbs, so the last one is the encryption
83    // of the high bits.
84    let (encryption_8, randomness_8) = ciphers.pop().unwrap();
85    let (encryption_7, randomness_7) = ciphers.pop().unwrap();
86    let (encryption_6, randomness_6) = ciphers.pop().unwrap();
87    let (encryption_5, randomness_5) = ciphers.pop().unwrap();
88    let (encryption_4, randomness_4) = ciphers.pop().unwrap();
89    let (encryption_3, randomness_3) = ciphers.pop().unwrap();
90    let (encryption_2, randomness_2) = ciphers.pop().unwrap();
91    let (encryption_1, randomness_1) = ciphers.pop().unwrap();
92
93    let enc = [
94        encryption_1,
95        encryption_2,
96        encryption_3,
97        encryption_4,
98        encryption_5,
99        encryption_6,
100        encryption_7,
101        encryption_8,
102    ];
103    let rand = [
104        randomness_1,
105        randomness_2,
106        randomness_3,
107        randomness_4,
108        randomness_5,
109        randomness_6,
110        randomness_7,
111        randomness_8,
112    ];
113    let chunks = [
114        *chunks[0], *chunks[1], *chunks[2], *chunks[3], *chunks[4], *chunks[5], *chunks[6],
115        *chunks[7],
116    ];
117    (enc, rand, chunks)
118}
119
120/// Encode attribute tags into a big-integer bits. The tags are encoded from
121/// least significant bit up, i.e., LSB of the result is set IFF tag0 is in the
122/// list. This function will fail if
123/// - there are repeated attributes in the list
124/// - there are tags in the list which do not fit into the field capacity
125pub fn encode_tags<'a, F: PrimeField, I: std::iter::IntoIterator<Item = &'a AttributeTag>>(
126    i: I,
127) -> ParseResult<F> {
128    // Since F is supposed to be a field, its capacity must be at least 1, hence the
129    // next line is safe. Maximum tag that can be stored.
130    let max_tag = F::CAPACITY - 1;
131    let f = F::zero();
132    let mut limbs = f.into_repr().to_vec(); // this is an array of 64 bit limbs, with least significant digit first
133    for &AttributeTag(tag) in i.into_iter() {
134        let idx = tag / 64;
135        let place = tag % 64;
136        if u32::from(tag) > max_tag || usize::from(idx) > limbs.len() {
137            bail!("Tag out of range: {}", tag)
138        }
139        let mask: u64 = 1 << place;
140        if limbs[usize::from(idx)] & mask != 0 {
141            bail!("Duplicate tag {}", tag)
142        } else {
143            limbs[usize::from(idx)] |= mask;
144        }
145    }
146    // This should not fail (since we check capacity), but in case it does we just
147    // propagate the error.
148    Ok(F::from_repr(limbs.as_slice())?)
149}
150
151/// Encode anonymity revokers into a list of scalars.
152/// The encoding is as follows.
153/// Given a list of identity providers, and a capacity C,
154/// we encode it into multiple scalars, in big-endian representation.
155/// The encodings are shifted, and the last (least significant) bit
156/// of the scalar encodes whether there are more scalars to follow.
157/// That is the case if and only if the bit is 1.
158/// The field must be big enough to encode u64.
159///
160/// This function will encode sorted identities
161pub fn encode_ars<F: PrimeField>(ars: &BTreeSet<ArIdentity>) -> Option<Vec<F>> {
162    let max_bit: usize = (F::CAPACITY - 1) as usize;
163
164    // Collect into an __acending__ vector.
165    let ars = ars.iter().copied().collect::<Vec<_>>();
166
167    // NB: This 32 must be the same as the size of the ArIdentity.
168    let num_ars_per_element = max_bit / 32;
169    let chunks = ars.chunks(num_ars_per_element);
170    let num_scalars = chunks.len();
171    let mut scalars = Vec::with_capacity(num_scalars);
172    let mut two = F::one();
173    two.add_assign(&F::one());
174    let two = two;
175
176    for chunk in chunks {
177        let mut f = F::zero().into_repr();
178        for (i, &ar_id) in chunk.iter().enumerate() {
179            let ar_id: u32 = ar_id.into();
180            let x: u64 = if i % 2 == 0 {
181                u64::from(ar_id)
182            } else {
183                u64::from(ar_id) << 32
184            };
185            f[i / 2] |= x;
186        }
187        let mut scalar = F::from_repr(f.as_slice()).ok()?;
188        // shift one bit left.
189        scalar.mul_assign(&two);
190        scalars.push(scalar)
191    }
192    if num_scalars == 0 {
193        scalars.push(F::zero())
194    }
195    // This should not fail since we've explicitly added an element to
196    // make sure we have enough scalars, but we still just propagate the error.
197    scalars.last_mut()?.add_assign(&F::one());
198    Some(scalars)
199}
200
201/// Encode two yearmonth values into a scalar.
202/// This encodes them after converting them into u32, first putting created_at,
203/// and then valid_to into the scalar. Thus create_at starts at the
204/// least-significant bit. The threshold is stored in the next byte.
205///
206/// NB: The field's capacity must be at least 128 bits.
207pub fn encode_public_credential_values<F: PrimeField>(
208    created_at: YearMonth,
209    valid_to: YearMonth,
210    threshold: Threshold,
211) -> ParseResult<F> {
212    let mut f = F::zero().into_repr();
213    let ca: u32 = created_at.into();
214    let vt: u32 = valid_to.into();
215    let s = u64::from(vt) << 32 | u64::from(ca);
216    f[0] = s; // limbs are little endian.
217    let threshold: u8 = threshold.into();
218    f[1] = u64::from(threshold);
219    Ok(F::from_repr(f.as_slice())?)
220}
221
222/// This function verifies that the signature inside
223/// the AccountOwnershipProof is a signature of the given message.
224/// The arguments are
225///  - keys - the verification keys
226///  - threshold - the signature threshold
227///  - proof_acc_sk - the AccountOwnershipProof containing the signature to be
228///    verified
229///  - msg - the message
230pub fn verify_account_ownership_proof(
231    keys: &BTreeMap<KeyIndex, VerifyKey>,
232    threshold: SignatureThreshold,
233    proof_acc_sk: &AccountOwnershipProof,
234    msg: &[u8],
235) -> bool {
236    // we check all the keys that were provided, and check enough were provided
237    // compared to the threshold
238    // We also make sure that no more than 255 keys are provided, as well as
239    // - all keys are distinct
240    // - at least one key is provided
241    // - there are the same number of proofs and keys
242    let Ok(num_proofs) = proof_acc_sk.num_proofs() else {
243        return false;
244    };
245    if num_proofs < threshold
246        || keys.len() > 255
247        || keys.is_empty()
248        || usize::from(u8::from(num_proofs)) != keys.len()
249    {
250        return false;
251    }
252    // set of processed keys already
253    let mut processed = BTreeSet::new();
254    for (idx, key) in keys.iter() {
255        // insert returns true if key was __not__ present
256        if !processed.insert(key) {
257            return false;
258        }
259        if let Some(sig) = proof_acc_sk.sigs.get(idx) {
260            let VerifyKey::Ed25519VerifyKey(ref key) = key;
261            if key.verify(msg, sig).is_err() {
262                return false;
263            }
264        } else {
265            return false;
266        }
267    }
268    true
269}
270
271/// Compute the hash of the credential deployment that should be signed by the
272/// account keys for deployment.
273/// If `new_or_existing` is `Left` then this credential will create a new
274/// account, and the transaction expiry should be signed.
275/// otherwise it is going to be deployed to the given account address, and the
276/// address should be signed.
277pub fn credential_hash_to_sign<
278    P: Pairing,
279    C: Curve<Scalar = P::ScalarField>,
280    AttributeType: Attribute<C::Scalar>,
281>(
282    values: &CredentialDeploymentValues<C, AttributeType>,
283    proofs: &IdOwnershipProofs<P, C>,
284    new_or_existing: &Either<TransactionTime, AccountAddress>,
285) -> Vec<u8> {
286    let mut hasher = Sha256::new();
287    hasher.update(to_bytes(&values));
288    hasher.update(to_bytes(&proofs));
289    // the serialization of Either has 0 tag for the left variant, and 1 for the
290    // right
291    hasher.update(to_bytes(&new_or_existing));
292    let to_sign = &hasher.finalize();
293    to_sign.to_vec()
294}
295
296/// Given two ordered iterators call the corresponding functions in the
297/// increasing order of keys. That is, essentially merge the two iterators into
298/// an ordered iterator and then map, but this is all done inline.
299pub fn merge_iter<'a, K: Ord + 'a, V1: 'a, V2: 'a, I1, I2, F>(i1: I1, i2: I2, mut f: F)
300where
301    I1: std::iter::IntoIterator<Item = (&'a K, &'a V1)>,
302    I2: std::iter::IntoIterator<Item = (&'a K, &'a V2)>,
303    F: FnMut(Either<&'a V1, &'a V2>), {
304    let mut iter_1 = i1.into_iter().peekable();
305    let mut iter_2 = i2.into_iter().peekable();
306    while let (Some(&(tag_1, v_1)), Some(&(tag_2, v_2))) = (iter_1.peek(), iter_2.peek()) {
307        if tag_1 < tag_2 {
308            f(Either::Left(v_1));
309            // advance the first iterator
310            let _ = iter_1.next().is_none();
311        } else {
312            f(Either::Right(v_2));
313            // advance the second iterator
314            let _ = iter_2.next();
315        }
316    }
317    for (_, v) in iter_1 {
318        f(Either::Left(v))
319    }
320    for (_, v) in iter_2 {
321        f(Either::Right(v))
322    }
323}
324
325#[cfg(test)]
326mod tests {
327    use super::*;
328    use crate::{common::to_bytes, curve_arithmetic::arkworks_instances::ArkField};
329    use ark_bls12_381::Fr;
330    use rand::{thread_rng, Rng};
331    use std::collections::BTreeMap;
332
333    #[test]
334    pub fn test_last_bit() {
335        let ars = (1..10).map(ArIdentity::new).collect::<BTreeSet<_>>();
336        let encoded = encode_ars::<ArkField<Fr>>(&ars).expect("Encodign should succeed.");
337        // Field size of Fr is 254 bits, so what we expect is to have two scalars
338        assert_eq!(encoded.len(), 2, "Encoded ARs should fit into two scalars.");
339        let s1 = to_bytes(&encoded[0]);
340        let s2 = to_bytes(&encoded[1]);
341        // last bit of the first one must be 0
342        assert_eq!(s1[31] & 1u8, 0u8, "Last bit of the first scalar must be 0.");
343        assert_eq!(
344            s2[31] & 1u8,
345            1u8,
346            "Last bit of the second scalar must be 1."
347        );
348    }
349
350    #[test]
351    // Test that the encoding of anonymity revokers is injective.
352    pub fn test_encoding_injective() {
353        let mut csprng = thread_rng();
354        let mut seen = BTreeMap::new();
355        for n in 1..50 {
356            let mut xs = vec![ArIdentity::new(1); n];
357            for x in xs.iter_mut() {
358                *x = ArIdentity::new(csprng.gen_range(1..100));
359            }
360            let set = xs.iter().copied().collect::<BTreeSet<_>>();
361            let encoded = encode_ars::<ArkField<Fr>>(&set).expect("Encoding should succeed.");
362            if let Some(set_ex) = seen.insert(encoded.clone(), set.clone()) {
363                assert_eq!(set, set_ex);
364            }
365        }
366    }
367}