#![cfg_attr(not(feature = "std"), no_std)]
#![deny(unsafe_code)]
#![doc = include_str!("../README.md")]
use ark_std::{
borrow::Borrow,
fmt,
io::{self, Read, Write},
vec::Vec,
};
type ArkResult<T> = Result<T, io::Error>;
use ark_serialize::{
self, CanonicalDeserialize, CanonicalSerialize, Compress, SerializationError, Validate,
};
use parity_scale_codec::{self as scale, Decode, Encode, Input, Output};
pub mod rw;
use rw::*;
#[cfg(feature = "hazmat")]
pub mod hazmat;
#[cfg(test)]
mod tests;
pub type Usage = u8; pub const fn make_usage(compress: Compress, validate: Validate) -> Usage {
let c = match compress {
Compress::Yes => 0,
Compress::No => 1,
};
let v = match validate {
Validate::Yes => 0,
Validate::No => 2,
};
c | v
}
pub const fn is_compressed(u: Usage) -> Compress {
assert!(u < 4);
if u & 1 == 1 {
Compress::No
} else {
Compress::Yes
}
}
pub const fn is_validated(u: Usage) -> Validate {
assert!(u < 4);
if u & 2 == 2 {
Validate::No
} else {
Validate::Yes
}
}
pub const WIRE: Usage = make_usage(Compress::Yes, Validate::Yes);
pub const HOST_CALL: Usage = make_usage(Compress::No, Validate::No);
pub struct ArkScale<T, const U: Usage = WIRE>(pub T);
impl<T, const U: Usage> From<T> for ArkScale<T, U> {
fn from(t: T) -> ArkScale<T, U> {
ArkScale(t)
}
}
impl<T: CanonicalDeserialize, const U: Usage> Decode for ArkScale<T, U> {
fn decode<I: Input>(input: &mut I) -> Result<Self, scale::Error> {
<T as CanonicalDeserialize>::deserialize_with_mode(
InputAsRead(input),
is_compressed(U),
is_validated(U),
)
.map(|v| ArkScale(v))
.map_err(ark_error_to_scale_error)
}
}
const OOPS: &'static str =
"Arkworks serialization failed, but Scale cannot handle serialization failures.";
impl<T: CanonicalSerialize, const U: Usage> Encode for ArkScale<T, U> {
fn size_hint(&self) -> usize {
self.0.serialized_size(is_compressed(U))
}
fn encode_to<O: Output + ?Sized>(&self, dest: &mut O) {
self.0
.serialize_with_mode(OutputAsWrite(dest), is_compressed(U))
.expect(OOPS);
}
fn encoded_size(&self) -> usize {
self.0.serialized_size(is_compressed(U))
}
}
pub struct ArkScaleRef<'a, T, const U: Usage = WIRE>(pub &'a T);
impl<'a, T, const U: Usage> From<&'a T> for ArkScaleRef<'a, T, U> {
fn from(t: &'a T) -> ArkScaleRef<'a, T, U> {
ArkScaleRef(t)
}
}
impl<'a, T: CanonicalSerialize, const U: Usage> Encode for ArkScaleRef<'a, T, U> {
fn size_hint(&self) -> usize {
self.0.serialized_size(is_compressed(U))
}
fn encode_to<O: Output + ?Sized>(&self, dest: &mut O) {
self.0
.serialize_with_mode(OutputAsWrite(dest), is_compressed(U))
.expect(OOPS);
}
fn encoded_size(&self) -> usize {
self.0.serialized_size(is_compressed(U))
}
}
pub fn iter_ark_to_ark_bytes<T, B, I>(iter: I, usage: Usage) -> Result<Vec<u8>, SerializationError>
where
T: CanonicalSerialize,
B: Borrow<T>,
I: IntoIterator<Item = B>,
{
const LL: usize = 8;
let mut iter = iter.into_iter();
let len = iter.size_hint().0;
let first = iter.next();
let mut vec = if let Some(ref e) = first {
let size = e.borrow().serialized_size(is_compressed(usage));
Vec::with_capacity(LL + size * (1 + len))
} else {
Vec::with_capacity(LL)
};
vec.extend_from_slice(&[0u8; LL]);
if let Some(e) = first {
e.borrow()
.serialize_with_mode(&mut vec, is_compressed(usage))?;
let mut l = 1;
for e in iter {
e.borrow()
.serialize_with_mode(&mut vec, is_compressed(usage))?;
l += 1;
}
debug_assert_eq!(
l, len,
"Iterator::size_hint underestimate would slow down release execution."
);
(&mut vec)[0..LL].copy_from_slice(&(l as u64).to_le_bytes());
}
Ok(vec)
}
pub fn iter_ark_to_scale_bytes<T, B, I>(iter: I, usage: Usage) -> Result<Vec<u8>, scale::Error>
where
T: CanonicalSerialize,
B: Borrow<T>,
I: IntoIterator<Item = B>,
{
iter_ark_to_ark_bytes(iter, usage).map_err(ark_error_to_scale_error)
}