#![deny(missing_docs)]
use std::pin::Pin;
use bounded_vec::BoundedVec;
use codec::{Decode, Encode, Error as CodecError, Input};
use futures::Future;
use serde::{de, Deserialize, Deserializer, Serialize, Serializer};
use polkadot_primitives::{
BlakeTwo256, BlockNumber, CandidateCommitments, CandidateHash, ChunkIndex, CollatorPair,
CommittedCandidateReceiptError, CommittedCandidateReceiptV2 as CommittedCandidateReceipt,
CompactStatement, CoreIndex, EncodeAs, Hash, HashT, HeadData, Id as ParaId,
PersistedValidationData, SessionIndex, Signed, UncheckedSigned, ValidationCode,
ValidationCodeHash, MAX_CODE_SIZE, MAX_POV_SIZE,
};
pub use sp_consensus_babe::{
AllowedSlots as BabeAllowedSlots, BabeEpochConfiguration, Epoch as BabeEpoch,
Randomness as BabeRandomness,
};
pub use polkadot_parachain_primitives::primitives::{
BlockData, HorizontalMessages, UpwardMessages,
};
pub mod approval;
pub mod disputes;
pub use disputes::{
dispute_is_inactive, CandidateVotes, DisputeMessage, DisputeMessageCheckError, DisputeStatus,
InvalidDisputeVote, SignedDisputeStatement, Timestamp, UncheckedDisputeMessage,
ValidDisputeVote, ACTIVE_DURATION_SECS,
};
pub const NODE_VERSION: &'static str = "1.22.0";
const MERKLE_NODE_MAX_SIZE: usize = 512 + 100;
const MERKLE_PROOF_MAX_DEPTH: usize = 8;
#[deprecated(
note = "`VALIDATION_CODE_BOMB_LIMIT` will be removed. Use `validation_code_bomb_limit`
runtime API to retrieve the value from the runtime"
)]
pub const VALIDATION_CODE_BOMB_LIMIT: usize = (MAX_CODE_SIZE * 4u32) as usize;
pub const POV_BOMB_LIMIT: usize = (MAX_POV_SIZE * 4u32) as usize;
pub const DISPUTE_CANDIDATE_LIFETIME_AFTER_FINALIZATION: BlockNumber = 10;
pub const MAX_FINALITY_LAG: u32 = 500;
#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd)]
pub struct SessionWindowSize(SessionIndex);
#[macro_export]
macro_rules! new_session_window_size {
(0) => {
compile_error!("Must be non zero");
};
(0_u32) => {
compile_error!("Must be non zero");
};
(0 as u32) => {
compile_error!("Must be non zero");
};
(0 as _) => {
compile_error!("Must be non zero");
};
($l:literal) => {
SessionWindowSize::unchecked_new($l as _)
};
}
pub const DISPUTE_WINDOW: SessionWindowSize = new_session_window_size!(6);
impl SessionWindowSize {
pub fn get(self) -> SessionIndex {
self.0
}
#[doc(hidden)]
pub const fn unchecked_new(size: SessionIndex) -> Self {
Self(size)
}
}
pub type BlockWeight = u32;
#[derive(Clone, PartialEq, Eq, Encode, Decode)]
pub enum Statement {
#[codec(index = 1)]
Seconded(CommittedCandidateReceipt),
#[codec(index = 2)]
Valid(CandidateHash),
}
impl std::fmt::Debug for Statement {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Statement::Seconded(seconded) => write!(f, "Seconded: {:?}", seconded.descriptor),
Statement::Valid(hash) => write!(f, "Valid: {:?}", hash),
}
}
}
impl Statement {
pub fn candidate_hash(&self) -> CandidateHash {
match *self {
Statement::Valid(ref h) => *h,
Statement::Seconded(ref c) => c.hash(),
}
}
pub fn to_compact(&self) -> CompactStatement {
match *self {
Statement::Seconded(ref c) => CompactStatement::Seconded(c.hash()),
Statement::Valid(hash) => CompactStatement::Valid(hash),
}
}
pub fn supply_pvd(self, pvd: PersistedValidationData) -> StatementWithPVD {
match self {
Statement::Seconded(c) => StatementWithPVD::Seconded(c, pvd),
Statement::Valid(hash) => StatementWithPVD::Valid(hash),
}
}
}
impl From<&'_ Statement> for CompactStatement {
fn from(stmt: &Statement) -> Self {
stmt.to_compact()
}
}
impl EncodeAs<CompactStatement> for Statement {
fn encode_as(&self) -> Vec<u8> {
self.to_compact().encode()
}
}
#[derive(Clone, PartialEq, Eq)]
pub enum StatementWithPVD {
Seconded(CommittedCandidateReceipt, PersistedValidationData),
Valid(CandidateHash),
}
impl std::fmt::Debug for StatementWithPVD {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
StatementWithPVD::Seconded(seconded, _) => {
write!(f, "Seconded: {:?}", seconded.descriptor)
},
StatementWithPVD::Valid(hash) => write!(f, "Valid: {:?}", hash),
}
}
}
impl StatementWithPVD {
pub fn candidate_hash(&self) -> CandidateHash {
match *self {
StatementWithPVD::Valid(ref h) => *h,
StatementWithPVD::Seconded(ref c, _) => c.hash(),
}
}
pub fn to_compact(&self) -> CompactStatement {
match *self {
StatementWithPVD::Seconded(ref c, _) => CompactStatement::Seconded(c.hash()),
StatementWithPVD::Valid(hash) => CompactStatement::Valid(hash),
}
}
pub fn drop_pvd(self) -> Statement {
match self {
StatementWithPVD::Seconded(c, _) => Statement::Seconded(c),
StatementWithPVD::Valid(c_h) => Statement::Valid(c_h),
}
}
pub fn drop_pvd_from_signed(signed: SignedFullStatementWithPVD) -> SignedFullStatement {
signed
.convert_to_superpayload_with(|s| s.drop_pvd())
.expect("persisted_validation_data doesn't affect encode_as; qed")
}
pub fn signed_to_compact(signed: SignedFullStatementWithPVD) -> Signed<CompactStatement> {
signed
.convert_to_superpayload_with(|s| s.to_compact())
.expect("doesn't affect encode_as; qed")
}
}
impl From<&'_ StatementWithPVD> for CompactStatement {
fn from(stmt: &StatementWithPVD) -> Self {
stmt.to_compact()
}
}
impl EncodeAs<CompactStatement> for StatementWithPVD {
fn encode_as(&self) -> Vec<u8> {
self.to_compact().encode()
}
}
pub type SignedFullStatement = Signed<Statement, CompactStatement>;
pub type UncheckedSignedFullStatement = UncheckedSigned<Statement, CompactStatement>;
pub type SignedFullStatementWithPVD = Signed<StatementWithPVD, CompactStatement>;
#[derive(Debug)]
pub enum InvalidCandidate {
ExecutionError(String),
InvalidOutputs,
Timeout,
ParamsTooLarge(u64),
CodeTooLarge(u64),
PoVDecompressionFailure,
BadReturn,
BadParent,
PoVHashMismatch,
BadSignature,
ParaHeadHashMismatch,
CodeHashMismatch,
CommitmentsHashMismatch,
InvalidSchedulingSession,
InvalidRelayParentSession,
InvalidUMPSignals(CommittedCandidateReceiptError),
}
#[derive(Debug)]
pub enum ValidationResult {
Valid(CandidateCommitments, PersistedValidationData),
Invalid(InvalidCandidate),
}
#[derive(PartialEq, Eq, Clone, Encode, Decode, Debug)]
pub struct PoV {
pub block_data: BlockData,
}
impl PoV {
pub fn hash(&self) -> Hash {
BlakeTwo256::hash_of(self)
}
}
#[derive(Clone, Encode, Decode)]
#[cfg(not(target_os = "unknown"))]
pub enum MaybeCompressedPoV {
Raw(PoV),
Compressed(PoV),
}
#[cfg(not(target_os = "unknown"))]
impl std::fmt::Debug for MaybeCompressedPoV {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
let (variant, size) = match self {
MaybeCompressedPoV::Raw(pov) => ("Raw", pov.block_data.0.len()),
MaybeCompressedPoV::Compressed(pov) => ("Compressed", pov.block_data.0.len()),
};
write!(f, "{} PoV ({} bytes)", variant, size)
}
}
#[cfg(not(target_os = "unknown"))]
impl MaybeCompressedPoV {
pub fn into_compressed(self) -> PoV {
match self {
Self::Raw(raw) => maybe_compress_pov(raw),
Self::Compressed(compressed) => compressed,
}
}
}
#[derive(Debug, Clone, Encode, Decode)]
#[cfg(not(target_os = "unknown"))]
pub struct Collation<BlockNumber = polkadot_primitives::BlockNumber> {
pub upward_messages: UpwardMessages,
pub horizontal_messages: HorizontalMessages,
pub new_validation_code: Option<ValidationCode>,
pub head_data: HeadData,
pub proof_of_validity: MaybeCompressedPoV,
pub processed_downward_messages: u32,
pub hrmp_watermark: BlockNumber,
}
#[derive(Debug)]
#[cfg(not(target_os = "unknown"))]
pub struct CollationSecondedSignal {
pub scheduling_parent: Hash,
pub statement: SignedFullStatement,
}
#[cfg(not(target_os = "unknown"))]
pub struct CollationResult {
pub collation: Collation,
pub result_sender: Option<futures::channel::oneshot::Sender<CollationSecondedSignal>>,
}
#[cfg(not(target_os = "unknown"))]
impl CollationResult {
pub fn into_inner(
self,
) -> (Collation, Option<futures::channel::oneshot::Sender<CollationSecondedSignal>>) {
(self.collation, self.result_sender)
}
}
#[cfg(not(target_os = "unknown"))]
pub type CollatorFn = Box<
dyn Fn(
Hash,
&PersistedValidationData,
) -> Pin<Box<dyn Future<Output = Option<CollationResult>> + Send>>
+ Send
+ Sync,
>;
#[cfg(not(target_os = "unknown"))]
pub struct CollationGenerationConfig {
pub key: CollatorPair,
pub collator: Option<CollatorFn>,
pub para_id: ParaId,
}
#[cfg(not(target_os = "unknown"))]
impl std::fmt::Debug for CollationGenerationConfig {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "CollationGenerationConfig {{ ... }}")
}
}
#[derive(Debug)]
pub struct SubmitCollationParams {
pub relay_parent: Hash,
pub collation: Collation,
pub parent_head: HeadData,
pub validation_code_hash: ValidationCodeHash,
pub result_sender: Option<futures::channel::oneshot::Sender<CollationSecondedSignal>>,
pub core_index: CoreIndex,
pub scheduling_parent: Option<Hash>,
}
#[derive(Clone, Encode, Decode, PartialEq, Eq, Debug)]
pub struct AvailableData {
pub pov: std::sync::Arc<PoV>,
pub validation_data: PersistedValidationData,
}
#[derive(PartialEq, Eq, Clone, Debug, Hash)]
pub struct Proof(BoundedVec<BoundedVec<u8, 1, MERKLE_NODE_MAX_SIZE>, 1, MERKLE_PROOF_MAX_DEPTH>);
impl Proof {
pub fn iter(&self) -> impl Iterator<Item = &[u8]> {
self.0.iter().map(|v| v.as_slice())
}
pub fn dummy_proof() -> Proof {
Proof(BoundedVec::from_vec(vec![BoundedVec::from_vec(vec![0]).unwrap()]).unwrap())
}
}
#[derive(thiserror::Error, Debug)]
pub enum MerkleProofError {
#[error("Merkle max proof depth exceeded {0} > {} .", MERKLE_PROOF_MAX_DEPTH)]
MerkleProofDepthExceeded(usize),
#[error("Merkle node max size exceeded {0} > {} .", MERKLE_NODE_MAX_SIZE)]
MerkleProofNodeSizeExceeded(usize),
}
impl TryFrom<Vec<Vec<u8>>> for Proof {
type Error = MerkleProofError;
fn try_from(input: Vec<Vec<u8>>) -> Result<Self, Self::Error> {
if input.len() > MERKLE_PROOF_MAX_DEPTH {
return Err(Self::Error::MerkleProofDepthExceeded(input.len()));
}
let mut out = Vec::new();
for element in input.into_iter() {
let length = element.len();
let data: BoundedVec<u8, 1, MERKLE_NODE_MAX_SIZE> = BoundedVec::from_vec(element)
.map_err(|_| Self::Error::MerkleProofNodeSizeExceeded(length))?;
out.push(data);
}
Ok(Proof(BoundedVec::from_vec(out).expect("Buffer size is deterined above. qed")))
}
}
impl Decode for Proof {
fn decode<I: Input>(value: &mut I) -> Result<Self, CodecError> {
let temp: Vec<Vec<u8>> = Decode::decode(value)?;
let mut out = Vec::new();
for element in temp.into_iter() {
let bounded_temp: Result<BoundedVec<u8, 1, MERKLE_NODE_MAX_SIZE>, CodecError> =
BoundedVec::from_vec(element)
.map_err(|_| "Inner node exceeds maximum node size.".into());
out.push(bounded_temp?);
}
BoundedVec::from_vec(out)
.map(Self)
.map_err(|_| "Merkle proof depth exceeds maximum trie depth".into())
}
}
impl Encode for Proof {
fn size_hint(&self) -> usize {
MERKLE_NODE_MAX_SIZE * MERKLE_PROOF_MAX_DEPTH
}
fn using_encoded<R, F: FnOnce(&[u8]) -> R>(&self, f: F) -> R {
let temp = self.0.iter().map(|v| v.as_vec()).collect::<Vec<_>>();
temp.using_encoded(f)
}
}
impl Serialize for Proof {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
serializer.serialize_bytes(&self.encode())
}
}
impl<'de> Deserialize<'de> for Proof {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let s = Vec::<u8>::deserialize(deserializer)?;
let mut slice = s.as_slice();
Decode::decode(&mut slice).map_err(de::Error::custom)
}
}
#[derive(PartialEq, Eq, Clone, Encode, Decode, Serialize, Deserialize, Debug, Hash)]
pub struct ErasureChunk {
pub chunk: Vec<u8>,
pub index: ChunkIndex,
pub proof: Proof,
}
impl ErasureChunk {
pub fn proof(&self) -> &Proof {
&self.proof
}
}
#[cfg(not(target_os = "unknown"))]
pub fn maybe_compress_pov(pov: PoV) -> PoV {
let PoV { block_data: BlockData(raw) } = pov;
let raw = sp_maybe_compressed_blob::compress_weakly(&raw, POV_BOMB_LIMIT).unwrap_or(raw);
let pov = PoV { block_data: BlockData(raw) };
pov
}