1#![cfg_attr(not(feature = "std"), no_std)]
9#![deny(unsafe_code)]
10#![doc = include_str!("../README.md")]
11
12use ark_std::{
13 borrow::Borrow,
14 fmt,
15 io::{self, Read, Write},
16 vec::Vec,
17};
18
19type ArkResult<T> = Result<T, io::Error>;
20use ark_serialize::{
21 CanonicalDeserialize, CanonicalSerialize, Compress, SerializationError, Validate,
22};
23pub use ark_serialize::{self as ark_serialize};
24
25pub use scale_codec::{self as scale, MaxEncodedLen}; use scale::{Decode, Encode, EncodeLike, Input, Output};
27use scale_info::TypeInfo;
28pub mod rw;
31use rw::*;
32
33mod max_encoded_len;
34pub use max_encoded_len::*;
35
36#[cfg(feature = "hazmat")]
37pub mod hazmat;
38
39#[cfg(feature = "ff")]
40pub mod ff;
41
42#[cfg(feature = "ec")]
43pub mod ec;
44
45#[cfg(test)]
46mod tests;
47
48pub type Usage = u8; pub const fn make_usage(compress: Compress, validate: Validate) -> Usage {
60 let c = match compress {
61 Compress::Yes => 0,
62 Compress::No => 1,
63 };
64 let v = match validate {
65 Validate::Yes => 0,
66 Validate::No => 2,
67 };
68 c | v
69}
70
71pub const fn is_compressed(u: Usage) -> Compress {
72 assert!(u < 4);
74 if u & 1 == 1 {
75 Compress::No
76 } else {
77 Compress::Yes
78 }
79}
80
81pub const fn is_validated(u: Usage) -> Validate {
82 assert!(u < 4);
84 if u & 2 == 2 {
85 Validate::No
86 } else {
87 Validate::Yes
88 }
89}
90
91pub const WIRE: Usage = make_usage(Compress::Yes, Validate::Yes);
93
94pub const HOST_CALL: Usage = make_usage(Compress::No, Validate::No);
98
99#[derive(Clone, Eq, PartialEq, Debug)] #[repr(transparent)]
102pub struct ArkScale<T, const U: Usage = WIRE>(pub T);
103
104impl<T, const U: Usage> From<T> for ArkScale<T, U> {
105 fn from(t: T) -> ArkScale<T, U> {
106 ArkScale(t)
107 }
108}
109
110impl<T: CanonicalDeserialize, const U: Usage> Decode for ArkScale<T, U> {
111 fn decode<I: Input>(input: &mut I) -> Result<Self, scale::Error> {
112 <T as CanonicalDeserialize>::deserialize_with_mode(
113 InputAsRead(input),
114 is_compressed(U),
115 is_validated(U),
116 )
117 .map(|v| ArkScale(v))
118 .map_err(ark_error_to_scale_error)
119 }
120
121 }
125
126const OOPS: &'static str =
127 "Arkworks serialization failed, but Scale cannot handle serialization failures. As ark_scale::rw::OutputAsWrite cannot fail, and ark_serialize_derive cannot introduce fresh falures, you have a non-derived `impl<..> ark_serialize::CanonicalSerialize` which fails, which violates usage conditions from ark-scale/README.md.";
128 impl<T: CanonicalSerialize, const U: Usage> EncodeLike for ArkScale<T, U> {}
135
136impl<T: CanonicalSerialize, const U: Usage> Encode for ArkScale<T, U> {
137 fn size_hint(&self) -> usize {
138 self.0.serialized_size(is_compressed(U))
139 }
140
141 fn encode_to<O: Output + ?Sized>(&self, dest: &mut O) {
142 self.0
143 .serialize_with_mode(OutputAsWrite(dest), is_compressed(U))
144 .expect(OOPS);
145 }
146
147 fn encoded_size(&self) -> usize {
152 self.0.serialized_size(is_compressed(U))
153 }
154}
155
156impl<T: 'static + ArkScaleMaxEncodedLen, const U: Usage> TypeInfo for ArkScale<T, U> {
157 type Identity = Self;
158
159 fn type_info() -> scale_info::Type {
160 let path = scale_info::Path::new("ArkScale", module_path!());
161 let array_type_def = scale_info::TypeDefArray {
162 len: T::max_encoded_len(is_compressed(U)) as u32,
163 type_param: scale_info::MetaType::new::<u8>(),
164 };
165 let type_def = scale_info::TypeDef::Array(array_type_def);
166 scale_info::Type { path, type_params: Vec::new(), type_def, docs: Vec::new() }
167 }
168}
169
170
171#[derive(Copy,Debug)] pub struct ArkScaleRef<'a, T, const U: Usage = WIRE>(pub &'a T);
173
174impl<'a, T, const U: Usage> Clone for ArkScaleRef<'a, T, U> {
175 fn clone(&self) -> Self {
176 ArkScaleRef(self.0)
177 }
178}
179
180impl<'a, T, const U: Usage> From<&'a T> for ArkScaleRef<'a, T, U> {
181 fn from(t: &'a T) -> ArkScaleRef<'a, T, U> {
182 ArkScaleRef(t)
183 }
184}
185
186impl<'a, T: CanonicalSerialize, const U: Usage> Encode for ArkScaleRef<'a, T, U> {
187 fn size_hint(&self) -> usize {
188 self.0.serialized_size(is_compressed(U))
189 }
190
191 fn encode_to<O: Output + ?Sized>(&self, dest: &mut O) {
192 self.0
193 .serialize_with_mode(OutputAsWrite(dest), is_compressed(U))
194 .expect(OOPS);
195 }
196
197 fn encoded_size(&self) -> usize {
202 self.0.serialized_size(is_compressed(U))
203 }
204}
205
206
207pub fn iter_ark_to_ark_bytes<T, B, I>(iter: I, usage: Usage) -> Result<Vec<u8>, SerializationError>
213where
214 T: CanonicalSerialize,
215 B: Borrow<T>,
216 I: IntoIterator<Item = B>,
217{
218 const LL: usize = 8;
219 let mut iter = iter.into_iter();
220 let len = iter.size_hint().0;
221 let first = iter.next();
222 let mut vec = if let Some(ref e) = first {
223 let size = e.borrow().serialized_size(is_compressed(usage));
224 Vec::with_capacity(LL + size * (1 + len))
225 } else {
226 Vec::with_capacity(LL)
227 };
228 vec.extend_from_slice(&[0u8; LL]);
229 if let Some(e) = first {
230 e.borrow()
231 .serialize_with_mode(&mut vec, is_compressed(usage))?;
232 let mut l = 1;
233 for e in iter {
234 e.borrow()
235 .serialize_with_mode(&mut vec, is_compressed(usage))?;
236 l += 1;
237 }
238 debug_assert_eq!(
239 l, len,
240 "Iterator::size_hint underestimate would slow down release execution."
241 );
242 (&mut vec)[0..LL].copy_from_slice(&(l as u64).to_le_bytes());
245 }
246 Ok(vec)
247}
248
249pub fn iter_ark_to_scale_bytes<T, B, I>(iter: I, usage: Usage) -> Result<Vec<u8>, scale::Error>
256where
257 T: CanonicalSerialize,
258 B: Borrow<T>,
259 I: IntoIterator<Item = B>,
260{
261 iter_ark_to_ark_bytes(iter, usage).map_err(ark_error_to_scale_error)
262}
263
264
265#[macro_export]
272macro_rules! impl_decode_via_ark {
273 () => {
274 fn decode<I: ark_scale::scale::Input>(input: &mut I) -> Result<Self, ark_scale::scale::Error> {
275 let a: ark_scale::ArkScale<Self> = <ark_scale::ArkScale<Self> as ark_scale::scale::Decode>::decode(input) ?;
276 Ok(a.0)
277 }
278
279 fn skip<I: ark_scale::scale::Input>(input: &mut I) -> Result<(), ark_scale::scale::Error> {
287 <ark_scale::ArkScale<Self> as ark_scale::scale::Decode>::skip(input)
288 }
289
290 fn encoded_fixed_size() -> Option<usize> {
291 <ark_scale::ArkScale<Self> as ark_scale::scale::Decode>::encoded_fixed_size()
292 }
293 }
294}
295
296#[macro_export]
299macro_rules! impl_encode_via_ark {
300 () => {
301 fn size_hint(&self) -> usize {
302 let a: ark_scale::ArkScaleRef<Self> = ark_scale::ArkScaleRef(self);
303 a.size_hint()
304 }
305
306 fn encode_to<O: ark_scale::scale::Output + ?Sized>(&self, dest: &mut O) {
307 let a: ark_scale::ArkScaleRef<Self> = ark_scale::ArkScaleRef(self);
308 a.encode_to(dest)
309 }
310
311 fn encode(&self) -> Vec<u8> {
312 let a: ark_scale::ArkScaleRef<Self> = ark_scale::ArkScaleRef(self);
313 a.encode()
314 }
315
316 fn using_encoded<R, F: FnOnce(&[u8]) -> R>(&self, f: F) -> R {
317 let a: ark_scale::ArkScaleRef<Self> = ark_scale::ArkScaleRef(self);
318 a.using_encoded(f)
319 }
320
321 fn encoded_size(&self) -> usize {
322 let a: ark_scale::ArkScaleRef<Self> = ark_scale::ArkScaleRef(self);
323 a.encoded_size()
324 }
325 }
326}
327
328#[macro_export]
343macro_rules! impl_scale_via_ark {
344 ($t:ty) => {
345 impl Decode for $t {
346 ark_scale::impl_decode_via_ark!();
347 }
348
349 impl Encode for $t {
350 ark_scale::impl_encode_via_ark!();
351 }
352
353 impl ark_scale::scale::EncodeLike for $t {}
354 }
355} #[macro_export]
361macro_rules! impl_body_max_encode_len {
362 () => {
363 ark_scale::impl_body_max_encode_len!(Self);
364 };
365 ($t:ty) => {
366 #[inline]
367 fn max_encoded_len(compress: ark_serialize::Compress) -> usize {
368 use ark_serialize::{CanonicalSerialize};
369 <$t as ark_std::Zero>::zero().serialized_size(compress)
370 }
371 };
372}