ark_scale/
lib.rs

1// -*- mode: rust; -*-
2//
3// Copyright (c) 2019 Web 3 Foundation
4//
5// Authors:
6// - Jeffrey Burdges <jeff@web3.foundation>
7
8#![cfg_attr(not(feature = "std"), no_std)]
9#![deny(unsafe_code)]
10#![doc = include_str!("../README.md")]
11
12use ark_std::{
13    borrow::Borrow,
14    fmt,
15    io::{self, Read, Write},
16    vec::Vec,
17};
18
19type ArkResult<T> = Result<T, io::Error>;
20use ark_serialize::{
21    CanonicalDeserialize, CanonicalSerialize, Compress, SerializationError, Validate,
22};
23pub use ark_serialize::{self as ark_serialize};
24
25pub use scale_codec::{self as scale, MaxEncodedLen}; // max_encoded_len::ConstEncodedLen
26use scale::{Decode, Encode, EncodeLike, Input, Output};
27use scale_info::TypeInfo;
28// type ScaleResult<T> = Result<T,scale::Error>;
29
30pub mod rw;
31use rw::*;
32
33mod max_encoded_len;
34pub use max_encoded_len::*;
35
36#[cfg(feature = "hazmat")]
37pub mod hazmat;
38
39#[cfg(feature = "ff")]
40pub mod ff;
41
42#[cfg(feature = "ec")]
43pub mod ec;
44
45#[cfg(test)]
46mod tests;
47
48/*
49error: `(Compress, Validate)` is forbidden as the type of a const generic parameter
50   --> src/lib.rs:145:33
51    = note: the only supported types are integers, `bool` and `char`
52*/
53
54/// Arkworks' serialization modes, morally (Compress, Validate) but
55/// const generics only supports integers, `bool` and `char` right now.
56pub type Usage = u8; // (Compress, Validate)
57
58/// Arkworks' serialization modes hack.
59pub const fn make_usage(compress: Compress, validate: Validate) -> Usage {
60    let c = match compress {
61        Compress::Yes => 0,
62        Compress::No => 1,
63    };
64    let v = match validate {
65        Validate::Yes => 0,
66        Validate::No => 2,
67    };
68    c | v
69}
70
71pub const fn is_compressed(u: Usage) -> Compress {
72    // u.0
73    assert!(u < 4);
74    if u & 1 == 1 {
75        Compress::No
76    } else {
77        Compress::Yes
78    }
79}
80
81pub const fn is_validated(u: Usage) -> Validate {
82    // u.1
83    assert!(u < 4);
84    if u & 2 == 2 {
85        Validate::No
86    } else {
87        Validate::Yes
88    }
89}
90
91/// ArkScale usage for typical wire formats, like block data and gossip messages.  Always safe.
92pub const WIRE: Usage = make_usage(Compress::Yes, Validate::Yes);
93
94/// ArkScale usage which neither compresses nor validates inputs,
95/// only for usage in host calls and on-chain storage where the runtime already performed
96/// validation checks.
97pub const HOST_CALL: Usage = make_usage(Compress::No, Validate::No);
98
99/// Arkworks type wrapped for serialization by Scale
100#[derive(Clone, Eq, PartialEq, Debug)] // CanonicalSerialize, CanonicalDeserialize
101#[repr(transparent)]
102pub struct ArkScale<T, const U: Usage = WIRE>(pub T);
103
104impl<T, const U: Usage> From<T> for ArkScale<T, U> {
105    fn from(t: T) -> ArkScale<T, U> {
106        ArkScale(t)
107    }
108}
109
110impl<T: CanonicalDeserialize, const U: Usage> Decode for ArkScale<T, U> {
111    fn decode<I: Input>(input: &mut I) -> Result<Self, scale::Error> {
112        <T as CanonicalDeserialize>::deserialize_with_mode(
113            InputAsRead(input),
114            is_compressed(U),
115            is_validated(U),
116        )
117        .map(|v| ArkScale(v))
118        .map_err(ark_error_to_scale_error)
119    }
120
121    // fn skip<I: Input>(input: &mut I) -> Result<(), Error> { ... }
122
123    // fn encoded_fixed_size() -> Option<usize> { ... }
124}
125
126const OOPS: &'static str =
127    "Arkworks serialization failed, but Scale cannot handle serialization failures.  As ark_scale::rw::OutputAsWrite cannot fail, and ark_serialize_derive cannot introduce fresh falures, you have a non-derived `impl<..> ark_serialize::CanonicalSerialize` which fails, which violates usage conditions from ark-scale/README.md.";
128    // You could usually verify this condition by reading results like
129    // git clone https://github.com/arkworks-rs/algebra
130    // cd algebra
131    // grep -r --include '*.rs' 'CanonicalSerialize for' -A 10 ff* ec* poly/ | less
132
133
134impl<T: CanonicalSerialize, const U: Usage> EncodeLike for ArkScale<T, U> {}
135
136impl<T: CanonicalSerialize, const U: Usage> Encode for ArkScale<T, U> {
137    fn size_hint(&self) -> usize {
138        self.0.serialized_size(is_compressed(U))
139    }
140
141    fn encode_to<O: Output + ?Sized>(&self, dest: &mut O) {
142        self.0
143            .serialize_with_mode(OutputAsWrite(dest), is_compressed(U))
144            .expect(OOPS);
145    }
146
147    // TODO:  Arkworks wants an io::Write, so we ignre the rule that
148    // value types override using_encoded.
149    // fn using_encoded<R, F: FnOnce(&[u8]) -> R>(&self, f: F) -> R;
150
151    fn encoded_size(&self) -> usize {
152        self.0.serialized_size(is_compressed(U))
153    }
154}
155
156impl<T: 'static + ArkScaleMaxEncodedLen, const U: Usage> TypeInfo for ArkScale<T, U> {
157    type Identity = Self;
158
159    fn type_info() -> scale_info::Type {
160        let path = scale_info::Path::new("ArkScale", module_path!());
161        let array_type_def = scale_info::TypeDefArray {
162            len: T::max_encoded_len(is_compressed(U)) as u32,
163            type_param: scale_info::MetaType::new::<u8>(),
164        };
165        let type_def = scale_info::TypeDef::Array(array_type_def);
166        scale_info::Type { path, type_params: Vec::new(), type_def, docs: Vec::new() }
167    }
168}
169
170
171#[derive(Copy,Debug)] // CanonicalSerialize
172pub struct ArkScaleRef<'a, T, const U: Usage = WIRE>(pub &'a T);
173
174impl<'a, T, const U: Usage> Clone for ArkScaleRef<'a, T, U> {
175    fn clone(&self) -> Self {
176        ArkScaleRef(self.0)
177    }
178}
179
180impl<'a, T, const U: Usage> From<&'a T> for ArkScaleRef<'a, T, U> {
181    fn from(t: &'a T) -> ArkScaleRef<'a, T, U> {
182        ArkScaleRef(t)
183    }
184}
185
186impl<'a, T: CanonicalSerialize, const U: Usage> Encode for ArkScaleRef<'a, T, U> {
187    fn size_hint(&self) -> usize {
188        self.0.serialized_size(is_compressed(U))
189    }
190
191    fn encode_to<O: Output + ?Sized>(&self, dest: &mut O) {
192        self.0
193            .serialize_with_mode(OutputAsWrite(dest), is_compressed(U))
194            .expect(OOPS);
195    }
196
197    // TODO:  Arkworks wants an io::Write, so we ignre the rule that
198    // value types override using_encoded.
199    // fn using_encoded<R, F: FnOnce(&[u8]) -> R>(&self, f: F) -> R;
200
201    fn encoded_size(&self) -> usize {
202        self.0.serialized_size(is_compressed(U))
203    }
204}
205
206
207/// Arkworks' `CanonicalSerialize` cannot consume `Iterator`s directly,
208/// but `iter_ark_to_ark_bytes` serializes exactly like `Vec<T>`,
209/// `&'a [T]`, or `[T]` do with `CanonicalSerialize`.
210///
211/// Returns errors as `ark_serialize::SerializationError`.
212pub fn iter_ark_to_ark_bytes<T, B, I>(iter: I, usage: Usage) -> Result<Vec<u8>, SerializationError>
213where
214    T: CanonicalSerialize,
215    B: Borrow<T>,
216    I: IntoIterator<Item = B>,
217{
218    const LL: usize = 8;
219    let mut iter = iter.into_iter();
220    let len = iter.size_hint().0;
221    let first = iter.next();
222    let mut vec = if let Some(ref e) = first {
223        let size = e.borrow().serialized_size(is_compressed(usage));
224        Vec::with_capacity(LL + size * (1 + len))
225    } else {
226        Vec::with_capacity(LL)
227    };
228    vec.extend_from_slice(&[0u8; LL]);
229    if let Some(e) = first {
230        e.borrow()
231            .serialize_with_mode(&mut vec, is_compressed(usage))?;
232        let mut l = 1;
233        for e in iter {
234            e.borrow()
235                .serialize_with_mode(&mut vec, is_compressed(usage))?;
236            l += 1;
237        }
238        debug_assert_eq!(
239            l, len,
240            "Iterator::size_hint underestimate would slow down release execution."
241        );
242        // let err = |_| scale_error_to_ark_error(scale::Error::from("Arkworks cannot serialize more than 2^32 items."));
243        // let l = u32::try_from(l).map_err(err) ?;
244        (&mut vec)[0..LL].copy_from_slice(&(l as u64).to_le_bytes());
245    }
246    Ok(vec)
247}
248
249/// Arkworks' `CanonicalSerialize` cannot consume `Iterator`s directly,
250/// but `iter_ark_to_scale_bytes` serializes exactly like
251/// `ArkScale(Vec<T>)`, `ArkScale(&'a [T])`, or `ArkScale([T])` do
252/// under `parity_scale_codec::Encode`.
253///
254/// Returns errors as `parity_scale_codec::Error`.
255pub fn iter_ark_to_scale_bytes<T, B, I>(iter: I, usage: Usage) -> Result<Vec<u8>, scale::Error>
256where
257    T: CanonicalSerialize,
258    B: Borrow<T>,
259    I: IntoIterator<Item = B>,
260{
261    iter_ark_to_ark_bytes(iter, usage).map_err(ark_error_to_scale_error)
262}
263
264
265// We next provide helper macros for implementing scale upon
266// your own arkworks types.
267
268
269/// Implement body of `scale::Decode` by delegation to `ArkScale`,
270/// usable from polymorphic code.
271#[macro_export]
272macro_rules! impl_decode_via_ark {
273    () => {
274        fn decode<I: ark_scale::scale::Input>(input: &mut I) -> Result<Self, ark_scale::scale::Error> {
275            let a: ark_scale::ArkScale<Self> = <ark_scale::ArkScale<Self> as ark_scale::scale::Decode>::decode(input) ?;
276            Ok(a.0)
277        }
278    
279        /*
280        fn decode_into<I: ark_scale::scale::Input>(input: &mut I, dst: &mut core::mem::MaybeUninit<Self>) -> Result<ark_scale::scale::DecodeFinished, ark_scale::scale::Error> {
281            // safe thanks to #[repr(transparent)]
282            <ark_scale::ArkScale<Self> as ark_scale::scale::Decode>::decode_into(input,dst)
283        }
284        */
285    
286        fn skip<I: ark_scale::scale::Input>(input: &mut I) -> Result<(), ark_scale::scale::Error> {
287            <ark_scale::ArkScale<Self> as ark_scale::scale::Decode>::skip(input)
288        }
289    
290        fn encoded_fixed_size() -> Option<usize> {
291            <ark_scale::ArkScale<Self> as ark_scale::scale::Decode>::encoded_fixed_size()
292        }    
293    }
294}
295
296/// Implement body of `scale::Encode` by delegation to `ArkScale`,
297/// usable from polymorphic code.
298#[macro_export]
299macro_rules! impl_encode_via_ark {
300    () => {
301        fn size_hint(&self) -> usize {
302            let a: ark_scale::ArkScaleRef<Self> = ark_scale::ArkScaleRef(self);
303            a.size_hint()
304        }
305    
306        fn encode_to<O: ark_scale::scale::Output + ?Sized>(&self, dest: &mut O) {
307            let a: ark_scale::ArkScaleRef<Self> = ark_scale::ArkScaleRef(self);
308            a.encode_to(dest)
309        }
310    
311        fn encode(&self) -> Vec<u8> {
312            let a: ark_scale::ArkScaleRef<Self> = ark_scale::ArkScaleRef(self);
313            a.encode()
314        }
315    
316        fn using_encoded<R, F: FnOnce(&[u8]) -> R>(&self, f: F) -> R {
317            let a: ark_scale::ArkScaleRef<Self> = ark_scale::ArkScaleRef(self);
318            a.using_encoded(f)
319        }
320    
321        fn encoded_size(&self) -> usize {
322            let a: ark_scale::ArkScaleRef<Self> = ark_scale::ArkScaleRef(self);
323            a.encoded_size()
324        }
325    }
326}
327
328/// Implement `scale::{Encode,Decode}` by delegation to `ArkScale`,
329/// but lacks support for polymorphic code.
330/// 
331/// You should manually provide `MaxEncodedLen` for weights.
332/// ```ignore
333/// impl_scale_via_ark!(MyArkworksType);
334/// 
335/// impl ark_scale::MaxEncodedLen for MyArkworksType 
336/// {
337///     fn max_encoded_len() -> usize {
338///         256
339///     }
340/// }
341/// ```
342#[macro_export]
343macro_rules! impl_scale_via_ark {
344    ($t:ty) => {
345        impl Decode for $t {
346            ark_scale::impl_decode_via_ark!();
347        }
348
349        impl Encode for $t {
350            ark_scale::impl_encode_via_ark!();
351        }
352
353        impl ark_scale::scale::EncodeLike for $t {}
354    }
355} // macro_rules! impl_scale_via_ark
356
357/// Implement body of `scale::MaxEncodedLen` by delegation
358/// to `ark_std::Zero` and `CanonicalSerialize`, usable
359/// from polymorphic code.
360#[macro_export]
361macro_rules! impl_body_max_encode_len {
362    () => {
363        ark_scale::impl_body_max_encode_len!(Self);
364    };
365    ($t:ty) => {
366        #[inline]
367        fn max_encoded_len(compress: ark_serialize::Compress) -> usize {
368            use ark_serialize::{CanonicalSerialize}; 
369            <$t as ark_std::Zero>::zero().serialized_size(compress)
370        }
371    };
372}