use std::{
any::{Any, TypeId},
collections::{BTreeMap, BTreeSet, HashMap},
convert::{TryFrom, TryInto},
iter::FromIterator,
net::{Ipv6Addr, SocketAddr, SocketAddrV6},
sync::Arc,
};
use either::Either;
use once_cell::sync::OnceCell;
use serde::Serialize;
use strum::{EnumIter, IntoEnumIterator};
use casper_types::{
account::AccountHash,
bytesrepr::Bytes,
crypto::{sign, PublicKey, Signature},
AccessRights, Approval, ApprovalsHash, AsymmetricType, Block, BlockHash, BlockHeader,
BlockHeaderV1, BlockHeaderV2, BlockHeaderWithSignatures, BlockSignatures, BlockSignaturesV2,
BlockV2, ChainNameDigest, ChunkWithProof, Deploy, DeployHash, DeployId, Digest, EraEndV1,
EraEndV2, EraId, EraReport, ExecutableDeployItem, FinalitySignature, FinalitySignatureId,
FinalitySignatureV2, PackageHash, ProtocolVersion, RewardedSignatures, RuntimeArgs, SecretKey,
SemVer, SingleBlockRewardedSignatures, TimeDiff, Timestamp, Transaction, TransactionHash,
TransactionId, TransactionRuntimeParams, TransactionV1, TransactionV1Hash, URef,
AUCTION_LANE_ID, INSTALL_UPGRADE_LANE_ID, KEY_HASH_LENGTH, MINT_LANE_ID, U512,
};
use crate::{
components::{
consensus::{max_rounds_per_era, utils::ValidatorMap},
fetcher::Tag,
},
protocol::Message,
types::{
transaction::transaction_v1_builder::TransactionV1Builder, BlockExecutionResultsOrChunk,
BlockPayload, FinalizedBlock, InternalEraReport, LegacyDeploy, SyncLeap, TrieOrChunk,
},
};
use casper_storage::block_store::types::ApprovalsHashes;
pub(crate) const HIGHEST_UNICODE_CODEPOINT: char = '\u{10FFFF}';
const LARGE_WASM_LANE_ID: u8 = 3;
#[derive(Debug, Default)]
pub(crate) struct Cache {
items: HashMap<TypeId, Vec<Box<dyn Any>>>,
}
impl Cache {
pub(crate) fn get<T: Any>(&mut self) -> Option<&T> {
self.get_all::<T>()
.first()
.map(|box_any| box_any.downcast_ref::<T>().expect("cache corrupted"))
}
pub(crate) fn set<T: Any>(&mut self, item: T) -> &T {
let items = self.get_all::<T>();
if items.is_empty() {
let boxed_item: Box<dyn Any> = Box::new(item);
items.push(boxed_item);
}
self.get::<T>().expect("should not be empty")
}
fn get_all<T: Any>(&mut self) -> &mut Vec<Box<dyn Any>> {
self.items.entry(TypeId::of::<T>()).or_default()
}
}
pub(crate) trait SizeEstimator {
fn estimate<T: Serialize>(&self, val: &T) -> usize;
fn parameter<T: TryFrom<i64>>(&self, name: &'static str) -> T;
fn parameter_bool(&self, name: &'static str) -> bool {
self.parameter::<i64>(name) != 0
}
}
pub(crate) trait LargestSpecimen: Sized {
fn largest_specimen<E: SizeEstimator>(estimator: &E, cache: &mut Cache) -> Self;
}
pub(crate) trait LargeUniqueSequence<E>
where
Self: Sized + Ord,
E: SizeEstimator,
{
fn large_unique_sequence(estimator: &E, count: usize, cache: &mut Cache) -> BTreeSet<Self>;
}
pub(crate) fn largest_variant<T, D, E, F>(estimator: &E, generator: F) -> T
where
T: Serialize,
D: IntoEnumIterator,
E: SizeEstimator,
F: FnMut(D) -> T,
{
D::iter()
.map(generator)
.max_by_key(|candidate| estimator.estimate(candidate))
.expect("should have at least one candidate")
}
pub(crate) fn vec_of_largest_specimen<T: LargestSpecimen, E: SizeEstimator>(
estimator: &E,
count: usize,
cache: &mut Cache,
) -> Vec<T> {
let mut vec = Vec::new();
for _ in 0..count {
vec.push(LargestSpecimen::largest_specimen(estimator, cache));
}
vec
}
pub(crate) fn vec_prop_specimen<T: LargestSpecimen, E: SizeEstimator>(
estimator: &E,
parameter_name: &'static str,
cache: &mut Cache,
) -> Vec<T> {
let mut count = estimator.parameter(parameter_name);
if count < 0 {
count = 0;
}
vec_of_largest_specimen(estimator, count as usize, cache)
}
pub(crate) fn btree_map_distinct_from_prop<K, V, E>(
estimator: &E,
parameter_name: &'static str,
cache: &mut Cache,
) -> BTreeMap<K, V>
where
V: LargestSpecimen,
K: Ord + LargeUniqueSequence<E> + Sized,
E: SizeEstimator,
{
let mut count = estimator.parameter(parameter_name);
if count < 0 {
count = 0;
}
K::large_unique_sequence(estimator, count as usize, cache)
.into_iter()
.map(|key| (key, LargestSpecimen::largest_specimen(estimator, cache)))
.collect()
}
pub(crate) fn btree_set_distinct_from_prop<T, E>(
estimator: &E,
parameter_name: &'static str,
cache: &mut Cache,
) -> BTreeSet<T>
where
T: Ord + LargeUniqueSequence<E> + Sized,
E: SizeEstimator,
{
let mut count = estimator.parameter(parameter_name);
if count < 0 {
count = 0;
}
T::large_unique_sequence(estimator, count as usize, cache)
}
pub(crate) fn btree_set_distinct<T, E>(
estimator: &E,
count: usize,
cache: &mut Cache,
) -> BTreeSet<T>
where
T: Ord + LargeUniqueSequence<E> + Sized,
E: SizeEstimator,
{
T::large_unique_sequence(estimator, count, cache)
}
impl LargestSpecimen for SocketAddr {
fn largest_specimen<E: SizeEstimator>(estimator: &E, cache: &mut Cache) -> Self {
SocketAddr::V6(SocketAddrV6::largest_specimen(estimator, cache))
}
}
impl LargestSpecimen for SocketAddrV6 {
fn largest_specimen<E: SizeEstimator>(estimator: &E, cache: &mut Cache) -> Self {
SocketAddrV6::new(
LargestSpecimen::largest_specimen(estimator, cache),
LargestSpecimen::largest_specimen(estimator, cache),
LargestSpecimen::largest_specimen(estimator, cache),
LargestSpecimen::largest_specimen(estimator, cache),
)
}
}
impl LargestSpecimen for Ipv6Addr {
fn largest_specimen<E: SizeEstimator>(_estimator: &E, _cache: &mut Cache) -> Self {
Ipv6Addr::new(
0xffff, 0xffff, 0xffff, 0xffff, 0xffff, 0xffff, 0xffff, 0xffff,
)
}
}
impl LargestSpecimen for bool {
fn largest_specimen<E: SizeEstimator>(_estimator: &E, _cache: &mut Cache) -> Self {
true
}
}
impl LargestSpecimen for u8 {
fn largest_specimen<E: SizeEstimator>(_estimator: &E, _cache: &mut Cache) -> Self {
u8::MAX
}
}
impl LargestSpecimen for u16 {
fn largest_specimen<E: SizeEstimator>(_estimator: &E, _cache: &mut Cache) -> Self {
u16::MAX
}
}
impl LargestSpecimen for u32 {
fn largest_specimen<E: SizeEstimator>(_estimator: &E, _cache: &mut Cache) -> Self {
u32::MAX
}
}
impl LargestSpecimen for u64 {
fn largest_specimen<E: SizeEstimator>(_estimator: &E, _cache: &mut Cache) -> Self {
u64::MAX
}
}
impl LargestSpecimen for u128 {
fn largest_specimen<E: SizeEstimator>(_estimator: &E, _cache: &mut Cache) -> Self {
u128::MAX
}
}
impl<T: LargestSpecimen + Copy, const N: usize> LargestSpecimen for [T; N] {
fn largest_specimen<E: SizeEstimator>(estimator: &E, cache: &mut Cache) -> Self {
[LargestSpecimen::largest_specimen(estimator, cache); N]
}
}
impl<T> LargestSpecimen for Option<T>
where
T: LargestSpecimen,
{
fn largest_specimen<E: SizeEstimator>(estimator: &E, cache: &mut Cache) -> Self {
Some(LargestSpecimen::largest_specimen(estimator, cache))
}
}
impl<T> LargestSpecimen for Box<T>
where
T: LargestSpecimen,
{
fn largest_specimen<E: SizeEstimator>(estimator: &E, cache: &mut Cache) -> Self {
Box::new(LargestSpecimen::largest_specimen(estimator, cache))
}
}
impl<T> LargestSpecimen for Arc<T>
where
T: LargestSpecimen,
{
fn largest_specimen<E: SizeEstimator>(estimator: &E, cache: &mut Cache) -> Self {
Arc::new(LargestSpecimen::largest_specimen(estimator, cache))
}
}
impl<T1, T2> LargestSpecimen for (T1, T2)
where
T1: LargestSpecimen,
T2: LargestSpecimen,
{
fn largest_specimen<E: SizeEstimator>(estimator: &E, cache: &mut Cache) -> Self {
(
LargestSpecimen::largest_specimen(estimator, cache),
LargestSpecimen::largest_specimen(estimator, cache),
)
}
}
impl<T1, T2, T3> LargestSpecimen for (T1, T2, T3)
where
T1: LargestSpecimen,
T2: LargestSpecimen,
T3: LargestSpecimen,
{
fn largest_specimen<E: SizeEstimator>(estimator: &E, cache: &mut Cache) -> Self {
(
LargestSpecimen::largest_specimen(estimator, cache),
LargestSpecimen::largest_specimen(estimator, cache),
LargestSpecimen::largest_specimen(estimator, cache),
)
}
}
impl<L, R> LargestSpecimen for Either<L, R>
where
L: LargestSpecimen + Serialize,
R: LargestSpecimen + Serialize,
{
fn largest_specimen<E: SizeEstimator>(estimator: &E, cache: &mut Cache) -> Self {
let l = L::largest_specimen(estimator, cache);
let r = R::largest_specimen(estimator, cache);
if estimator.estimate(&l) >= estimator.estimate(&r) {
Either::Left(l)
} else {
Either::Right(r)
}
}
}
impl LargestSpecimen for ProtocolVersion {
fn largest_specimen<E: SizeEstimator>(estimator: &E, cache: &mut Cache) -> Self {
ProtocolVersion::new(LargestSpecimen::largest_specimen(estimator, cache))
}
}
impl LargestSpecimen for URef {
fn largest_specimen<E: SizeEstimator>(estimator: &E, cache: &mut Cache) -> Self {
URef::new(
[LargestSpecimen::largest_specimen(estimator, cache); 32],
AccessRights::READ_ADD_WRITE,
)
}
}
impl LargestSpecimen for AccountHash {
fn largest_specimen<E: SizeEstimator>(estimator: &E, cache: &mut Cache) -> Self {
AccountHash::new([LargestSpecimen::largest_specimen(estimator, cache); 32])
}
}
impl LargestSpecimen for SemVer {
fn largest_specimen<E: SizeEstimator>(estimator: &E, cache: &mut Cache) -> Self {
SemVer {
major: LargestSpecimen::largest_specimen(estimator, cache),
minor: LargestSpecimen::largest_specimen(estimator, cache),
patch: LargestSpecimen::largest_specimen(estimator, cache),
}
}
}
impl LargestSpecimen for PublicKey {
fn largest_specimen<E: SizeEstimator>(estimator: &E, cache: &mut Cache) -> Self {
PublicKey::large_unique_sequence(estimator, 1, cache)
.into_iter()
.next()
.unwrap()
}
}
impl<E> LargeUniqueSequence<E> for PublicKey
where
E: SizeEstimator,
{
fn large_unique_sequence(estimator: &E, count: usize, cache: &mut Cache) -> BTreeSet<Self> {
let data_vec = cache.get_all::<Self>();
fn generate_key<E: SizeEstimator>(estimator: &E, seed: usize) -> PublicKey {
#[derive(Copy, Clone, Debug, EnumIter)]
enum PublicKeyDiscriminants {
System,
Ed25519,
Secp256k1,
}
largest_variant::<PublicKey, PublicKeyDiscriminants, _, _>(estimator, |variant| {
let seed_bytes = Digest::hash(seed.to_be_bytes()).value();
match variant {
PublicKeyDiscriminants::System => PublicKey::system(),
PublicKeyDiscriminants::Ed25519 => {
let ed25519_sec = SecretKey::ed25519_from_bytes(seed_bytes)
.expect("unable to create ed25519 key from seed bytes");
PublicKey::from(&ed25519_sec)
}
PublicKeyDiscriminants::Secp256k1 => {
let secp256k1_sec = SecretKey::secp256k1_from_bytes(seed_bytes)
.expect("unable to create secp256k1 key from seed bytes");
PublicKey::from(&secp256k1_sec)
}
}
})
}
while data_vec.len() < count {
let seed = data_vec.len();
let key = generate_key(estimator, seed);
data_vec.push(Box::new(key));
}
debug_assert!(data_vec.len() >= count);
let output_set: BTreeSet<Self> = data_vec[..count]
.iter()
.map(|item| item.downcast_ref::<Self>().expect("cache corrupted"))
.cloned()
.collect();
debug_assert_eq!(output_set.len(), count);
output_set
}
}
impl<E> LargeUniqueSequence<E> for Digest
where
E: SizeEstimator,
{
fn large_unique_sequence(_estimator: &E, count: usize, _cache: &mut Cache) -> BTreeSet<Self> {
(0..count).map(|n| Digest::hash(n.to_ne_bytes())).collect()
}
}
impl LargestSpecimen for Signature {
fn largest_specimen<E: SizeEstimator>(estimator: &E, cache: &mut Cache) -> Self {
if let Some(item) = cache.get::<Self>() {
return *item;
}
#[derive(Debug, Copy, Clone, EnumIter)]
enum SignatureDiscriminants {
System,
Ed25519,
Secp256k1,
}
*cache.set(largest_variant::<Self, SignatureDiscriminants, _, _>(
estimator,
|variant| match variant {
SignatureDiscriminants::System => Signature::system(),
SignatureDiscriminants::Ed25519 => {
let ed25519_sec = &SecretKey::generate_ed25519().expect("a correct secret");
sign([0_u8], ed25519_sec, &ed25519_sec.into())
}
SignatureDiscriminants::Secp256k1 => {
let secp256k1_sec = &SecretKey::generate_secp256k1().expect("a correct secret");
sign([0_u8], secp256k1_sec, &secp256k1_sec.into())
}
},
))
}
}
impl LargestSpecimen for EraId {
fn largest_specimen<E: SizeEstimator>(estimator: &E, cache: &mut Cache) -> Self {
EraId::new(LargestSpecimen::largest_specimen(estimator, cache))
}
}
impl LargestSpecimen for Timestamp {
fn largest_specimen<E: SizeEstimator>(_estimator: &E, _cache: &mut Cache) -> Self {
const MAX_TIMESTAMP_HUMAN_READABLE: u64 = 253_402_300_799;
Timestamp::from(MAX_TIMESTAMP_HUMAN_READABLE)
}
}
impl LargestSpecimen for TimeDiff {
fn largest_specimen<E: SizeEstimator>(estimator: &E, cache: &mut Cache) -> Self {
TimeDiff::from_millis(LargestSpecimen::largest_specimen(estimator, cache))
}
}
impl LargestSpecimen for BlockHeaderV1 {
fn largest_specimen<E: SizeEstimator>(estimator: &E, cache: &mut Cache) -> Self {
BlockHeaderV1::new(
LargestSpecimen::largest_specimen(estimator, cache),
LargestSpecimen::largest_specimen(estimator, cache),
LargestSpecimen::largest_specimen(estimator, cache),
LargestSpecimen::largest_specimen(estimator, cache),
LargestSpecimen::largest_specimen(estimator, cache),
LargestSpecimen::largest_specimen(estimator, cache),
LargestSpecimen::largest_specimen(estimator, cache),
LargestSpecimen::largest_specimen(estimator, cache),
LargestSpecimen::largest_specimen(estimator, cache),
LargestSpecimen::largest_specimen(estimator, cache),
OnceCell::with_value(LargestSpecimen::largest_specimen(estimator, cache)),
)
}
}
impl LargestSpecimen for BlockHeaderV2 {
fn largest_specimen<E: SizeEstimator>(estimator: &E, cache: &mut Cache) -> Self {
BlockHeaderV2::new(
LargestSpecimen::largest_specimen(estimator, cache),
LargestSpecimen::largest_specimen(estimator, cache),
LargestSpecimen::largest_specimen(estimator, cache),
LargestSpecimen::largest_specimen(estimator, cache),
LargestSpecimen::largest_specimen(estimator, cache),
LargestSpecimen::largest_specimen(estimator, cache),
LargestSpecimen::largest_specimen(estimator, cache),
LargestSpecimen::largest_specimen(estimator, cache),
LargestSpecimen::largest_specimen(estimator, cache),
LargestSpecimen::largest_specimen(estimator, cache),
LargestSpecimen::largest_specimen(estimator, cache),
LargestSpecimen::largest_specimen(estimator, cache),
LargestSpecimen::largest_specimen(estimator, cache),
OnceCell::with_value(LargestSpecimen::largest_specimen(estimator, cache)),
)
}
}
impl LargestSpecimen for BlockHeader {
fn largest_specimen<E: SizeEstimator>(estimator: &E, cache: &mut Cache) -> Self {
let v1 = BlockHeaderV1::largest_specimen(estimator, cache);
let v2 = BlockHeaderV2::largest_specimen(estimator, cache);
if estimator.estimate(&v1) > estimator.estimate(&v2) {
BlockHeader::V1(v1)
} else {
BlockHeader::V2(v2)
}
}
}
pub(crate) struct BlockHeaderWithoutEraEnd(BlockHeaderV2);
impl BlockHeaderWithoutEraEnd {
pub(crate) fn into_block_header(self) -> BlockHeader {
BlockHeader::V2(self.0)
}
}
impl LargestSpecimen for BlockHeaderWithoutEraEnd {
fn largest_specimen<E: SizeEstimator>(estimator: &E, cache: &mut Cache) -> Self {
BlockHeaderWithoutEraEnd(BlockHeaderV2::new(
LargestSpecimen::largest_specimen(estimator, cache),
LargestSpecimen::largest_specimen(estimator, cache),
LargestSpecimen::largest_specimen(estimator, cache),
LargestSpecimen::largest_specimen(estimator, cache),
LargestSpecimen::largest_specimen(estimator, cache),
None,
LargestSpecimen::largest_specimen(estimator, cache),
LargestSpecimen::largest_specimen(estimator, cache),
LargestSpecimen::largest_specimen(estimator, cache),
LargestSpecimen::largest_specimen(estimator, cache),
LargestSpecimen::largest_specimen(estimator, cache),
LargestSpecimen::largest_specimen(estimator, cache),
LargestSpecimen::largest_specimen(estimator, cache),
OnceCell::with_value(LargestSpecimen::largest_specimen(estimator, cache)),
))
}
}
impl LargestSpecimen for EraEndV1 {
fn largest_specimen<E: SizeEstimator>(estimator: &E, cache: &mut Cache) -> Self {
EraEndV1::new(
LargestSpecimen::largest_specimen(estimator, cache),
btree_map_distinct_from_prop(estimator, "validator_count", cache),
)
}
}
impl LargestSpecimen for EraEndV2 {
fn largest_specimen<E: SizeEstimator>(estimator: &E, cache: &mut Cache) -> Self {
let rewards = {
let count = estimator.parameter("validator_count");
PublicKey::large_unique_sequence(estimator, count, cache)
.into_iter()
.map(|key| (key, vec_of_largest_specimen(estimator, 2, cache)))
.collect()
};
EraEndV2::new(
vec_prop_specimen(estimator, "validator_count", cache),
vec_prop_specimen(estimator, "validator_count", cache),
btree_map_distinct_from_prop(estimator, "validator_count", cache),
rewards,
1u8,
)
}
}
impl LargestSpecimen for InternalEraReport {
fn largest_specimen<E: SizeEstimator>(estimator: &E, cache: &mut Cache) -> Self {
InternalEraReport {
equivocators: vec_prop_specimen(estimator, "validator_count", cache),
inactive_validators: vec_prop_specimen(estimator, "validator_count", cache),
}
}
}
impl LargestSpecimen for BlockHeaderWithSignatures {
fn largest_specimen<E: SizeEstimator>(estimator: &E, cache: &mut Cache) -> Self {
BlockHeaderWithSignatures::new(
LargestSpecimen::largest_specimen(estimator, cache),
LargestSpecimen::largest_specimen(estimator, cache),
)
}
}
impl LargestSpecimen for BlockSignatures {
fn largest_specimen<E: SizeEstimator>(estimator: &E, cache: &mut Cache) -> Self {
let mut block_signatures = BlockSignaturesV2::new(
LargestSpecimen::largest_specimen(estimator, cache),
LargestSpecimen::largest_specimen(estimator, cache),
LargestSpecimen::largest_specimen(estimator, cache),
LargestSpecimen::largest_specimen(estimator, cache),
);
let sigs = btree_map_distinct_from_prop(estimator, "validator_count", cache);
sigs.into_iter().for_each(|(public_key, sig)| {
block_signatures.insert_signature(public_key, sig);
});
BlockSignatures::V2(block_signatures)
}
}
impl LargestSpecimen for BlockV2 {
fn largest_specimen<E: SizeEstimator>(estimator: &E, cache: &mut Cache) -> Self {
let mint_hashes = vec![
TransactionHash::largest_specimen(estimator, cache);
estimator.parameter::<usize>("max_mint_per_block")
];
let auction_hashes = vec![
TransactionHash::largest_specimen(estimator, cache);
estimator.parameter::<usize>("max_auctions_per_block")
];
let install_upgrade_hashes =
vec![
TransactionHash::largest_specimen(estimator, cache);
estimator.parameter::<usize>("max_install_upgrade_transactions_per_block")
];
let standard_hashes = vec![
TransactionHash::largest_specimen(estimator, cache);
estimator
.parameter::<usize>("max_standard_transactions_per_block")
];
let transactions = {
let mut ret = BTreeMap::new();
ret.insert(MINT_LANE_ID, mint_hashes);
ret.insert(AUCTION_LANE_ID, auction_hashes);
ret.insert(INSTALL_UPGRADE_LANE_ID, install_upgrade_hashes);
ret.insert(3, standard_hashes);
ret
};
BlockV2::new(
LargestSpecimen::largest_specimen(estimator, cache),
LargestSpecimen::largest_specimen(estimator, cache),
LargestSpecimen::largest_specimen(estimator, cache),
LargestSpecimen::largest_specimen(estimator, cache),
LargestSpecimen::largest_specimen(estimator, cache),
LargestSpecimen::largest_specimen(estimator, cache),
LargestSpecimen::largest_specimen(estimator, cache),
LargestSpecimen::largest_specimen(estimator, cache),
LargestSpecimen::largest_specimen(estimator, cache),
LargestSpecimen::largest_specimen(estimator, cache),
transactions,
LargestSpecimen::largest_specimen(estimator, cache),
LargestSpecimen::largest_specimen(estimator, cache),
LargestSpecimen::largest_specimen(estimator, cache),
)
}
}
impl LargestSpecimen for Block {
fn largest_specimen<E: SizeEstimator>(estimator: &E, cache: &mut Cache) -> Self {
Block::V2(LargestSpecimen::largest_specimen(estimator, cache))
}
}
impl LargestSpecimen for FinalizedBlock {
fn largest_specimen<E: SizeEstimator>(estimator: &E, cache: &mut Cache) -> Self {
FinalizedBlock::new(
LargestSpecimen::largest_specimen(estimator, cache),
LargestSpecimen::largest_specimen(estimator, cache),
LargestSpecimen::largest_specimen(estimator, cache),
LargestSpecimen::largest_specimen(estimator, cache),
LargestSpecimen::largest_specimen(estimator, cache),
LargestSpecimen::largest_specimen(estimator, cache),
)
}
}
impl LargestSpecimen for FinalitySignature {
fn largest_specimen<E: SizeEstimator>(estimator: &E, cache: &mut Cache) -> Self {
FinalitySignature::V2(LargestSpecimen::largest_specimen(estimator, cache))
}
}
impl LargestSpecimen for FinalitySignatureV2 {
fn largest_specimen<E: SizeEstimator>(estimator: &E, cache: &mut Cache) -> Self {
FinalitySignatureV2::new(
LargestSpecimen::largest_specimen(estimator, cache),
LargestSpecimen::largest_specimen(estimator, cache),
LargestSpecimen::largest_specimen(estimator, cache),
LargestSpecimen::largest_specimen(estimator, cache),
LargestSpecimen::largest_specimen(estimator, cache),
LargestSpecimen::largest_specimen(estimator, cache),
)
}
}
impl LargestSpecimen for FinalitySignatureId {
fn largest_specimen<E: SizeEstimator>(estimator: &E, cache: &mut Cache) -> Self {
FinalitySignatureId::new(
LargestSpecimen::largest_specimen(estimator, cache),
LargestSpecimen::largest_specimen(estimator, cache),
LargestSpecimen::largest_specimen(estimator, cache),
)
}
}
impl LargestSpecimen for EraReport<PublicKey> {
fn largest_specimen<E: SizeEstimator>(estimator: &E, cache: &mut Cache) -> Self {
EraReport::new(
vec_prop_specimen(estimator, "validator_count", cache),
btree_map_distinct_from_prop(estimator, "validator_count", cache),
vec_prop_specimen(estimator, "validator_count", cache),
)
}
}
impl LargestSpecimen for BlockHash {
fn largest_specimen<E: SizeEstimator>(estimator: &E, cache: &mut Cache) -> Self {
BlockHash::new(LargestSpecimen::largest_specimen(estimator, cache))
}
}
impl LargestSpecimen for ChainNameDigest {
fn largest_specimen<E: SizeEstimator>(_estimator: &E, _cache: &mut Cache) -> Self {
ChainNameDigest::from_chain_name("")
}
}
impl LargestSpecimen for Digest {
fn largest_specimen<E: SizeEstimator>(_estimator: &E, _cache: &mut Cache) -> Self {
Digest::hash("")
}
}
impl LargestSpecimen for BlockPayload {
fn largest_specimen<E: SizeEstimator>(estimator: &E, cache: &mut Cache) -> Self {
let large_txn = match Transaction::largest_specimen(estimator, cache) {
Transaction::Deploy(deploy) => {
Transaction::Deploy(deploy.with_approvals(btree_set_distinct_from_prop(
estimator,
"average_approvals_per_transaction_in_block",
cache,
)))
}
Transaction::V1(v1) => {
Transaction::V1(v1.with_approvals(btree_set_distinct_from_prop(
estimator,
"average_approvals_per_transaction_in_block",
cache,
)))
}
};
let large_txn_hash_with_approvals = (large_txn.hash(), large_txn.approvals());
let mut transactions = BTreeMap::new();
transactions.insert(
MINT_LANE_ID,
vec![
large_txn_hash_with_approvals.clone();
estimator.parameter::<usize>("max_mint_per_block")
],
);
transactions.insert(
AUCTION_LANE_ID,
vec![
large_txn_hash_with_approvals.clone();
estimator.parameter::<usize>("max_auctions_per_block")
],
);
transactions.insert(
LARGE_WASM_LANE_ID,
vec![
large_txn_hash_with_approvals.clone();
estimator.parameter::<usize>("max_standard_transactions_per_block")
],
);
transactions.insert(
INSTALL_UPGRADE_LANE_ID,
vec![
large_txn_hash_with_approvals;
estimator.parameter::<usize>("max_install_upgrade_transactions_per_block")
],
);
BlockPayload::new(
transactions,
vec_prop_specimen(estimator, "max_accusations_per_block", cache),
LargestSpecimen::largest_specimen(estimator, cache),
LargestSpecimen::largest_specimen(estimator, cache),
LargestSpecimen::largest_specimen(estimator, cache),
)
}
}
impl LargestSpecimen for RewardedSignatures {
fn largest_specimen<E: SizeEstimator>(estimator: &E, cache: &mut Cache) -> Self {
RewardedSignatures::new(
std::iter::repeat(LargestSpecimen::largest_specimen(estimator, cache))
.take(estimator.parameter("signature_rewards_max_delay")),
)
}
}
impl LargestSpecimen for SingleBlockRewardedSignatures {
fn largest_specimen<E: SizeEstimator>(estimator: &E, _cache: &mut Cache) -> Self {
SingleBlockRewardedSignatures::pack(
std::iter::repeat(1).take(estimator.parameter("validator_count")),
)
}
}
impl LargestSpecimen for DeployHash {
fn largest_specimen<E: SizeEstimator>(estimator: &E, cache: &mut Cache) -> Self {
DeployHash::new(LargestSpecimen::largest_specimen(estimator, cache))
}
}
impl LargestSpecimen for Approval {
fn largest_specimen<E: SizeEstimator>(estimator: &E, cache: &mut Cache) -> Self {
Approval::new(
LargestSpecimen::largest_specimen(estimator, cache),
LargestSpecimen::largest_specimen(estimator, cache),
)
}
}
impl<E> LargeUniqueSequence<E> for Approval
where
Self: Sized + Ord,
E: SizeEstimator,
{
fn large_unique_sequence(estimator: &E, count: usize, cache: &mut Cache) -> BTreeSet<Self> {
PublicKey::large_unique_sequence(estimator, count, cache)
.into_iter()
.map(|public_key| {
Approval::new(
public_key,
LargestSpecimen::largest_specimen(estimator, cache),
)
})
.collect()
}
}
impl LargestSpecimen for (TransactionHash, Option<BTreeSet<Approval>>) {
fn largest_specimen<E: SizeEstimator>(estimator: &E, cache: &mut Cache) -> Self {
let max_items = estimator.parameter::<usize>("max_transfers_per_block")
+ estimator.parameter::<usize>("max_standard_per_block");
let transaction = (
TransactionHash::largest_specimen(estimator, cache),
Some(btree_set_distinct(estimator, max_items, cache)),
);
let v1 = (
TransactionHash::largest_specimen(estimator, cache),
Some(btree_set_distinct(estimator, max_items, cache)),
);
if estimator.estimate(&transaction) > estimator.estimate(&v1) {
transaction
} else {
v1
}
}
}
impl LargestSpecimen for Deploy {
fn largest_specimen<E: SizeEstimator>(estimator: &E, cache: &mut Cache) -> Self {
Deploy::new_signed(
LargestSpecimen::largest_specimen(estimator, cache),
LargestSpecimen::largest_specimen(estimator, cache),
LargestSpecimen::largest_specimen(estimator, cache),
Default::default(), largest_chain_name(estimator),
LargestSpecimen::largest_specimen(estimator, cache),
ExecutableDeployItem::Transfer {
args: Default::default(), },
&LargestSpecimen::largest_specimen(estimator, cache),
LargestSpecimen::largest_specimen(estimator, cache),
)
}
}
impl LargestSpecimen for DeployId {
fn largest_specimen<E: SizeEstimator>(estimator: &E, cache: &mut Cache) -> Self {
DeployId::new(
LargestSpecimen::largest_specimen(estimator, cache),
LargestSpecimen::largest_specimen(estimator, cache),
)
}
}
impl LargestSpecimen for ApprovalsHash {
fn largest_specimen<E: SizeEstimator>(estimator: &E, cache: &mut Cache) -> Self {
let deploy_ah = ApprovalsHash(LargestSpecimen::largest_specimen(estimator, cache));
let txn_v1_ah = ApprovalsHash(LargestSpecimen::largest_specimen(estimator, cache));
if estimator.estimate(&deploy_ah) >= estimator.estimate(&txn_v1_ah) {
deploy_ah
} else {
txn_v1_ah
}
}
}
impl LargestSpecimen for TransactionV1Hash {
fn largest_specimen<E: SizeEstimator>(estimator: &E, cache: &mut Cache) -> Self {
TransactionV1Hash::new(LargestSpecimen::largest_specimen(estimator, cache))
}
}
impl LargestSpecimen for TransactionV1 {
fn largest_specimen<E: SizeEstimator>(estimator: &E, cache: &mut Cache) -> Self {
let max_size_with_margin =
estimator.parameter::<i32>("max_transaction_size").max(0) as usize + 10 * 4;
TransactionV1Builder::new_session(
true,
Bytes::from(vec_of_largest_specimen(
estimator,
max_size_with_margin,
cache,
)),
TransactionRuntimeParams::VmCasperV1,
)
.with_secret_key(&LargestSpecimen::largest_specimen(estimator, cache))
.with_timestamp(LargestSpecimen::largest_specimen(estimator, cache))
.with_ttl(LargestSpecimen::largest_specimen(estimator, cache))
.with_chain_name(largest_chain_name(estimator))
.build()
.unwrap()
}
}
impl LargestSpecimen for TransactionId {
fn largest_specimen<E: SizeEstimator>(estimator: &E, cache: &mut Cache) -> Self {
let deploy_hash =
TransactionHash::Deploy(LargestSpecimen::largest_specimen(estimator, cache));
let v1_hash = TransactionHash::V1(LargestSpecimen::largest_specimen(estimator, cache));
let deploy = TransactionId::new(
deploy_hash,
LargestSpecimen::largest_specimen(estimator, cache),
);
let v1 = TransactionId::new(v1_hash, LargestSpecimen::largest_specimen(estimator, cache));
if estimator.estimate(&deploy) >= estimator.estimate(&v1) {
deploy
} else {
v1
}
}
}
impl LargestSpecimen for Transaction {
fn largest_specimen<E: SizeEstimator>(estimator: &E, cache: &mut Cache) -> Self {
let deploy = Transaction::Deploy(LargestSpecimen::largest_specimen(estimator, cache));
let v1 = Transaction::V1(LargestSpecimen::largest_specimen(estimator, cache));
if estimator.estimate(&deploy) >= estimator.estimate(&v1) {
deploy
} else {
v1
}
}
}
impl LargestSpecimen for TransactionHash {
fn largest_specimen<E: SizeEstimator>(estimator: &E, cache: &mut Cache) -> Self {
let deploy_hash =
TransactionHash::Deploy(LargestSpecimen::largest_specimen(estimator, cache));
let v1_hash = TransactionHash::V1(LargestSpecimen::largest_specimen(estimator, cache));
if estimator.estimate(&deploy_hash) >= estimator.estimate(&v1_hash) {
deploy_hash
} else {
v1_hash
}
}
}
impl LargestSpecimen for ExecutableDeployItem {
fn largest_specimen<E: SizeEstimator>(estimator: &E, cache: &mut Cache) -> Self {
let max_size_with_margin =
estimator.parameter::<i32>("max_transaction_size").max(0) as usize + 10 * 4;
ExecutableDeployItem::ModuleBytes {
module_bytes: Bytes::from(vec_of_largest_specimen(
estimator,
max_size_with_margin,
cache,
)),
args: RuntimeArgs::new(),
}
}
}
impl LargestSpecimen for U512 {
fn largest_specimen<E: SizeEstimator>(_estimator: &E, _cache: &mut Cache) -> Self {
U512::max_value()
}
}
impl LargestSpecimen for PackageHash {
fn largest_specimen<E: SizeEstimator>(estimator: &E, cache: &mut Cache) -> Self {
PackageHash::new([LargestSpecimen::largest_specimen(estimator, cache); KEY_HASH_LENGTH])
}
}
impl LargestSpecimen for ChunkWithProof {
fn largest_specimen<E: SizeEstimator>(_estimator: &E, _cache: &mut Cache) -> Self {
ChunkWithProof::new(&[0xFF; Self::CHUNK_SIZE_BYTES], 0)
.expect("the chunk to be correctly created")
}
}
impl LargestSpecimen for SecretKey {
fn largest_specimen<E: SizeEstimator>(_estimator: &E, _cache: &mut Cache) -> Self {
SecretKey::ed25519_from_bytes([u8::MAX; 32]).expect("valid secret key bytes")
}
}
impl<T: LargestSpecimen> LargestSpecimen for ValidatorMap<T> {
fn largest_specimen<E: SizeEstimator>(estimator: &E, cache: &mut Cache) -> Self {
let max_validators = estimator.parameter("validator_count");
ValidatorMap::from_iter(
std::iter::repeat_with(|| LargestSpecimen::largest_specimen(estimator, cache))
.take(max_validators),
)
}
}
pub(crate) fn largest_get_request<E: SizeEstimator>(estimator: &E, cache: &mut Cache) -> Message {
largest_variant::<Message, Tag, _, _>(estimator, |variant| {
match variant {
Tag::Transaction => Message::new_get_request::<Transaction>(
&LargestSpecimen::largest_specimen(estimator, cache),
),
Tag::LegacyDeploy => Message::new_get_request::<LegacyDeploy>(
&LargestSpecimen::largest_specimen(estimator, cache),
),
Tag::Block => Message::new_get_request::<Block>(&LargestSpecimen::largest_specimen(
estimator, cache,
)),
Tag::BlockHeader => Message::new_get_request::<BlockHeader>(
&LargestSpecimen::largest_specimen(estimator, cache),
),
Tag::TrieOrChunk => Message::new_get_request::<TrieOrChunk>(
&LargestSpecimen::largest_specimen(estimator, cache),
),
Tag::FinalitySignature => Message::new_get_request::<FinalitySignature>(
&LargestSpecimen::largest_specimen(estimator, cache),
),
Tag::SyncLeap => Message::new_get_request::<SyncLeap>(
&LargestSpecimen::largest_specimen(estimator, cache),
),
Tag::ApprovalsHashes => Message::new_get_request::<ApprovalsHashes>(
&LargestSpecimen::largest_specimen(estimator, cache),
),
Tag::BlockExecutionResults => Message::new_get_request::<BlockExecutionResultsOrChunk>(
&LargestSpecimen::largest_specimen(estimator, cache),
),
}
.expect("did not expect new_get_request from largest deploy to fail")
})
}
pub(crate) fn largest_get_response<E: SizeEstimator>(estimator: &E, cache: &mut Cache) -> Message {
largest_variant::<Message, Tag, _, _>(estimator, |variant| {
match variant {
Tag::Transaction => Message::new_get_response::<Transaction>(
&LargestSpecimen::largest_specimen(estimator, cache),
),
Tag::LegacyDeploy => Message::new_get_response::<LegacyDeploy>(
&LargestSpecimen::largest_specimen(estimator, cache),
),
Tag::Block => Message::new_get_response::<Block>(&LargestSpecimen::largest_specimen(
estimator, cache,
)),
Tag::BlockHeader => Message::new_get_response::<BlockHeader>(
&LargestSpecimen::largest_specimen(estimator, cache),
),
Tag::TrieOrChunk => Message::new_get_response::<TrieOrChunk>(
&LargestSpecimen::largest_specimen(estimator, cache),
),
Tag::FinalitySignature => Message::new_get_response::<FinalitySignature>(
&LargestSpecimen::largest_specimen(estimator, cache),
),
Tag::SyncLeap => Message::new_get_response::<SyncLeap>(
&LargestSpecimen::largest_specimen(estimator, cache),
),
Tag::ApprovalsHashes => Message::new_get_response::<ApprovalsHashes>(
&LargestSpecimen::largest_specimen(estimator, cache),
),
Tag::BlockExecutionResults => {
Message::new_get_response::<BlockExecutionResultsOrChunk>(
&LargestSpecimen::largest_specimen(estimator, cache),
)
}
}
.expect("did not expect new_get_response from largest deploy to fail")
})
}
fn largest_chain_name<E: SizeEstimator>(estimator: &E) -> String {
string_max_characters(estimator.parameter("network_name_limit"))
}
fn string_max_characters(max_char: usize) -> String {
std::iter::repeat(HIGHEST_UNICODE_CODEPOINT)
.take(max_char)
.collect()
}
pub(crate) fn estimator_max_rounds_per_era(estimator: &impl SizeEstimator) -> usize {
let minimum_era_height = estimator.parameter("minimum_era_height");
let era_duration_ms = TimeDiff::from_millis(estimator.parameter("era_duration_ms"));
let minimum_round_length_ms =
TimeDiff::from_millis(estimator.parameter("minimum_round_length_ms"));
max_rounds_per_era(minimum_era_height, era_duration_ms, minimum_round_length_ms)
.try_into()
.expect("to be a valid `usize`")
}
#[cfg(test)]
mod tests {
use super::Cache;
#[test]
fn memoization_cache_simple() {
let mut cache = Cache::default();
assert!(cache.get::<u32>().is_none());
assert!(cache.get::<String>().is_none());
cache.set::<u32>(1234);
assert_eq!(cache.get::<u32>(), Some(&1234));
cache.set::<String>("a string is not copy".to_owned());
assert_eq!(
cache.get::<String>().map(String::as_str),
Some("a string is not copy")
);
assert_eq!(cache.get::<u32>(), Some(&1234));
cache.set::<String>("this should not overwrite".to_owned());
assert_eq!(
cache.get::<String>().map(String::as_str),
Some("a string is not copy")
);
}
}