#![forbid(unsafe_code)]
#![allow(clippy::too_many_arguments)]
#![warn(clippy::cast_possible_truncation)]
#[macro_use]
extern crate lazy_static;
pub use snarkvm_console_network_environment as environment;
pub use snarkvm_console_network_environment::*;
mod helpers;
pub use helpers::*;
mod canary_v0;
pub use canary_v0::*;
mod consensus_heights;
pub use consensus_heights::*;
mod mainnet_v0;
pub use mainnet_v0::*;
mod testnet_v0;
pub use testnet_v0::*;
pub mod prelude {
#[cfg(feature = "wasm")]
pub use crate::get_or_init_consensus_version_heights;
pub use crate::{
CANARY_V0_CONSENSUS_VERSION_HEIGHTS,
CanaryV0,
ConsensusVersion,
MAINNET_V0_CONSENSUS_VERSION_HEIGHTS,
MainnetV0,
Network,
TEST_CONSENSUS_VERSION_HEIGHTS,
TESTNET_V0_CONSENSUS_VERSION_HEIGHTS,
TestnetV0,
consensus_config_value,
consensus_config_value_by_version,
environment::prelude::*,
};
}
pub use crate::environment::prelude::*;
use snarkvm_algorithms::{
AlgebraicSponge,
crypto_hash::PoseidonSponge,
snark::varuna::{CircuitProvingKey, CircuitVerifyingKey, VarunaHidingMode},
srs::{UniversalProver, UniversalVerifier},
};
use snarkvm_console_algorithms::{BHP512, BHP1024, Poseidon2, Poseidon4, Poseidon8};
use snarkvm_console_collections::merkle_tree::{MerklePath, MerkleTree};
use snarkvm_console_types::{Field, Group, Scalar};
use snarkvm_curves::PairingEngine;
use indexmap::IndexMap;
use std::sync::{Arc, OnceLock};
pub type BHPMerkleTree<N, const DEPTH: u8> = MerkleTree<N, BHP1024<N>, BHP512<N>, DEPTH>;
pub type PoseidonMerkleTree<N, const DEPTH: u8> = MerkleTree<N, Poseidon4<N>, Poseidon2<N>, DEPTH>;
type Fq<N> = <<N as Environment>::PairingCurve as PairingEngine>::Fq;
pub type FiatShamir<N> = PoseidonSponge<Fq<N>, 2, 1>;
pub type FiatShamirParameters<N> = <FiatShamir<N> as AlgebraicSponge<Fq<N>, 2>>::Parameters;
pub(crate) type VarunaProvingKey<N> = CircuitProvingKey<<N as Environment>::PairingCurve, VarunaHidingMode>;
pub(crate) type VarunaVerifyingKey<N> = CircuitVerifyingKey<<N as Environment>::PairingCurve>;
static CONSENSUS_VERSION_HEIGHTS: OnceLock<[(ConsensusVersion, u32); NUM_CONSENSUS_VERSIONS]> = OnceLock::new();
pub trait Network:
'static
+ Environment
+ Copy
+ Clone
+ Debug
+ Eq
+ PartialEq
+ core::hash::Hash
+ Serialize
+ DeserializeOwned
+ for<'a> Deserialize<'a>
+ Send
+ Sync
{
const ID: u16;
const NAME: &'static str;
const SHORT_NAME: &'static str;
const INCLUSION_FUNCTION_NAME: &'static str;
const GENESIS_TIMESTAMP: i64;
const GENESIS_COINBASE_TARGET: u64;
const GENESIS_PROOF_TARGET: u64;
const MAX_SOLUTIONS_AS_POWER_OF_TWO: u8 = 2; const MAX_SOLUTIONS: usize = 1 << Self::MAX_SOLUTIONS_AS_POWER_OF_TWO;
const STARTING_SUPPLY: u64 = 1_500_000_000_000_000; const MAX_SUPPLY: u64 = 5_000_000_000_000_000; #[cfg(not(feature = "test"))]
const MAX_SUPPLY_LIMIT_HEIGHT: u32 = 263_527_685;
#[cfg(feature = "test")]
const MAX_SUPPLY_LIMIT_HEIGHT: u32 = 5;
const DEPLOYMENT_FEE_MULTIPLIER: u64 = 1_000; const CONSTRUCTOR_FEE_MULTIPLIER: u64 = 100; const EXECUTION_STORAGE_FEE_SCALING_FACTOR: u64 = 5000;
const EXECUTION_STORAGE_PENALTY_THRESHOLD: u64 = 5000;
const SYNTHESIS_FEE_MULTIPLIER: u64 = 25; const MAX_DEPLOYMENT_VARIABLES: u64 = 1 << 21; const MAX_DEPLOYMENT_CONSTRAINTS: u64 = 1 << 21; const MAX_BATCH_PROOF_INSTANCES: usize = 128;
const MAX_FEE: u64 = 1_000_000_000_000_000;
const TRANSACTION_SPEND_LIMIT: [(ConsensusVersion, u64); 2] =
[(ConsensusVersion::V1, 100_000_000), (ConsensusVersion::V10, 4_000_000)];
const ARC_0005_COMPUTE_DISCOUNT: u64 = 25;
const ANCHOR_HEIGHT: u32 = Self::ANCHOR_TIME as u32 / Self::BLOCK_TIME as u32;
const ANCHOR_TIME: u16 = 25;
const BLOCK_TIME: u16 = 10;
#[cfg(not(feature = "test"))]
const NUM_BLOCKS_PER_EPOCH: u32 = 3600 / Self::BLOCK_TIME as u32; #[cfg(feature = "test")]
const NUM_BLOCKS_PER_EPOCH: u32 = 10;
const MAX_DATA_ENTRIES: usize = 32;
const MAX_DATA_DEPTH: usize = 32;
#[allow(clippy::cast_possible_truncation)]
const MAX_DATA_SIZE_IN_FIELDS: u32 = ((128 * 1024 * 8) / Field::<Self>::SIZE_IN_DATA_BITS) as u32;
const MIN_STRUCT_ENTRIES: usize = 1; const MAX_STRUCT_ENTRIES: usize = Self::MAX_DATA_ENTRIES;
const MIN_ARRAY_ELEMENTS: usize = 1; const MAX_ARRAY_ELEMENTS: [(ConsensusVersion, usize); 3] =
[(ConsensusVersion::V1, 32), (ConsensusVersion::V11, 512), (ConsensusVersion::V14, 2048)];
const MIN_RECORD_ENTRIES: usize = 1; const MAX_RECORD_ENTRIES: usize = Self::MIN_RECORD_ENTRIES.saturating_add(Self::MAX_DATA_ENTRIES);
const MAX_PROGRAM_SIZE: [(ConsensusVersion, usize); 2] = [
(ConsensusVersion::V1, 100_000), (ConsensusVersion::V14, 512_000), ];
const MAX_MAPPINGS: usize = 31;
const MAX_FUNCTIONS: usize = 31;
const MAX_STRUCTS: usize = 10 * Self::MAX_FUNCTIONS;
const MAX_RECORDS: usize = 10 * Self::MAX_FUNCTIONS;
const MAX_CLOSURES: usize = 2 * Self::MAX_FUNCTIONS;
const MAX_OPERANDS: usize = Self::MAX_INPUTS;
const MAX_INSTRUCTIONS: usize = u16::MAX as usize;
const MAX_COMMANDS: usize = u16::MAX as usize;
const MAX_WRITES: [(ConsensusVersion, u16); 2] = [(ConsensusVersion::V1, 16), (ConsensusVersion::V14, 32)];
const MAX_POSITIONS: usize = u8::MAX as usize;
const MAX_INPUTS: usize = 16;
const MAX_OUTPUTS: usize = 16;
const MAX_IMPORTS: usize = 64;
const MAX_TRANSACTION_SIZE: [(ConsensusVersion, usize); 2] = [
(ConsensusVersion::V1, 128_000), (ConsensusVersion::V14, 768_000), ];
type StateRoot: Bech32ID<Field<Self>>;
type BlockHash: Bech32ID<Field<Self>>;
type RatificationID: Bech32ID<Field<Self>>;
type TransactionID: Bech32ID<Field<Self>>;
type TransitionID: Bech32ID<Field<Self>>;
type TransmissionChecksum: IntegerType;
const _CONSENSUS_VERSION_HEIGHTS: [(ConsensusVersion, u32); NUM_CONSENSUS_VERSIONS];
const MAX_CERTIFICATES: [(ConsensusVersion, u16); 5];
#[allow(non_snake_case)]
#[cfg(not(any(test, feature = "test", feature = "test_consensus_heights")))]
fn CONSENSUS_VERSION_HEIGHTS() -> &'static [(ConsensusVersion, u32); NUM_CONSENSUS_VERSIONS] {
CONSENSUS_VERSION_HEIGHTS.get_or_init(|| Self::_CONSENSUS_VERSION_HEIGHTS)
}
#[allow(non_snake_case)]
#[cfg(any(test, feature = "test", feature = "test_consensus_heights"))]
fn CONSENSUS_VERSION_HEIGHTS() -> &'static [(ConsensusVersion, u32); NUM_CONSENSUS_VERSIONS] {
CONSENSUS_VERSION_HEIGHTS.get_or_init(load_test_consensus_heights)
}
#[allow(non_snake_case)]
#[cfg(any(test, feature = "test", feature = "test_consensus_heights"))]
const TEST_CONSENSUS_VERSION_HEIGHTS: [(ConsensusVersion, u32); NUM_CONSENSUS_VERSIONS] =
TEST_CONSENSUS_VERSION_HEIGHTS;
#[allow(non_snake_case)]
fn CONSENSUS_VERSION(seek_height: u32) -> anyhow::Result<ConsensusVersion> {
match Self::CONSENSUS_VERSION_HEIGHTS().binary_search_by(|(_, height)| height.cmp(&seek_height)) {
Ok(index) => Ok(Self::CONSENSUS_VERSION_HEIGHTS()[index].0),
Err(index) => {
if index == 0 {
Err(anyhow!("Expected consensus version 1 to exist at height 0."))
} else {
Ok(Self::CONSENSUS_VERSION_HEIGHTS()[index - 1].0)
}
}
}
}
#[allow(non_snake_case)]
fn CONSENSUS_HEIGHT(version: ConsensusVersion) -> Result<u32> {
Ok(Self::CONSENSUS_VERSION_HEIGHTS().get(version as usize - 1).ok_or(anyhow!("Invalid consensus version"))?.1)
}
#[allow(non_snake_case)]
fn LATEST_MAX_ARRAY_ELEMENTS() -> usize {
Self::MAX_ARRAY_ELEMENTS.last().expect("MAX_ARRAY_ELEMENTS must have at least one entry").1
}
#[allow(non_snake_case)]
fn LATEST_MAX_CERTIFICATES() -> u16 {
Self::MAX_CERTIFICATES.last().expect("MAX_CERTIFICATES must have at least one entry").1
}
#[allow(non_snake_case)]
fn LATEST_MAX_PROGRAM_SIZE() -> usize {
Self::MAX_PROGRAM_SIZE.last().expect("MAX_PROGRAM_SIZE must have at least one entry").1
}
#[allow(non_snake_case)]
fn LATEST_MAX_WRITES() -> u16 {
Self::MAX_WRITES.last().expect("MAX_WRITES must have at least one entry").1
}
#[allow(non_snake_case)]
fn LATEST_MAX_TRANSACTION_SIZE() -> usize {
Self::MAX_TRANSACTION_SIZE.last().expect("MAX_TRANSACTION_SIZE must have at least one entry").1
}
#[allow(non_snake_case)]
fn INCLUSION_UPGRADE_HEIGHT() -> Result<u32>;
fn genesis_bytes() -> &'static [u8];
fn restrictions_list_as_str() -> &'static str;
fn get_credits_v0_proving_key(function_name: String) -> Result<&'static Arc<VarunaProvingKey<Self>>>;
fn get_credits_v0_verifying_key(function_name: String) -> Result<&'static Arc<VarunaVerifyingKey<Self>>>;
fn get_credits_proving_key(function_name: String) -> Result<&'static Arc<VarunaProvingKey<Self>>>;
fn get_credits_verifying_key(function_name: String) -> Result<&'static Arc<VarunaVerifyingKey<Self>>>;
#[cfg(not(feature = "wasm"))]
fn inclusion_v0_proving_key() -> &'static Arc<VarunaProvingKey<Self>>;
#[cfg(feature = "wasm")]
fn inclusion_v0_proving_key(bytes: Option<Vec<u8>>) -> &'static Arc<VarunaProvingKey<Self>>;
fn inclusion_v0_verifying_key() -> &'static Arc<VarunaVerifyingKey<Self>>;
#[cfg(not(feature = "wasm"))]
fn inclusion_proving_key() -> &'static Arc<VarunaProvingKey<Self>>;
#[cfg(feature = "wasm")]
fn inclusion_proving_key(bytes: Option<Vec<u8>>) -> &'static Arc<VarunaProvingKey<Self>>;
fn inclusion_verifying_key() -> &'static Arc<VarunaVerifyingKey<Self>>;
#[cfg(not(feature = "wasm"))]
fn translation_credits_proving_key() -> &'static Arc<VarunaProvingKey<Self>>;
#[cfg(feature = "wasm")]
fn translation_credits_proving_key(bytes: Option<Vec<u8>>) -> &'static Arc<VarunaProvingKey<Self>>;
fn translation_credits_verifying_key() -> &'static Arc<VarunaVerifyingKey<Self>>;
fn g_powers() -> &'static Vec<Group<Self>>;
fn g_scalar_multiply(scalar: &Scalar<Self>) -> Group<Self>;
fn varuna_universal_prover() -> &'static UniversalProver<Self::PairingCurve>;
fn varuna_universal_verifier() -> &'static UniversalVerifier<Self::PairingCurve>;
fn varuna_fs_parameters() -> &'static FiatShamirParameters<Self>;
fn commitment_domain() -> Field<Self>;
fn encryption_domain() -> Field<Self>;
fn graph_key_domain() -> Field<Self>;
fn serial_number_domain() -> Field<Self>;
fn commit_bhp256(input: &[bool], randomizer: &Scalar<Self>) -> Result<Field<Self>>;
fn commit_bhp512(input: &[bool], randomizer: &Scalar<Self>) -> Result<Field<Self>>;
fn commit_bhp768(input: &[bool], randomizer: &Scalar<Self>) -> Result<Field<Self>>;
fn commit_bhp1024(input: &[bool], randomizer: &Scalar<Self>) -> Result<Field<Self>>;
fn commit_ped64(input: &[bool], randomizer: &Scalar<Self>) -> Result<Field<Self>>;
fn commit_ped128(input: &[bool], randomizer: &Scalar<Self>) -> Result<Field<Self>>;
fn commit_to_group_bhp256(input: &[bool], randomizer: &Scalar<Self>) -> Result<Group<Self>>;
fn commit_to_group_bhp512(input: &[bool], randomizer: &Scalar<Self>) -> Result<Group<Self>>;
fn commit_to_group_bhp768(input: &[bool], randomizer: &Scalar<Self>) -> Result<Group<Self>>;
fn commit_to_group_bhp1024(input: &[bool], randomizer: &Scalar<Self>) -> Result<Group<Self>>;
fn commit_to_group_ped64(input: &[bool], randomizer: &Scalar<Self>) -> Result<Group<Self>>;
fn commit_to_group_ped128(input: &[bool], randomizer: &Scalar<Self>) -> Result<Group<Self>>;
fn hash_bhp256(input: &[bool]) -> Result<Field<Self>>;
fn hash_bhp512(input: &[bool]) -> Result<Field<Self>>;
fn hash_bhp768(input: &[bool]) -> Result<Field<Self>>;
fn hash_bhp1024(input: &[bool]) -> Result<Field<Self>>;
fn hash_keccak256(input: &[bool]) -> Result<Vec<bool>>;
fn hash_keccak384(input: &[bool]) -> Result<Vec<bool>>;
fn hash_keccak512(input: &[bool]) -> Result<Vec<bool>>;
fn hash_ped64(input: &[bool]) -> Result<Field<Self>>;
fn hash_ped128(input: &[bool]) -> Result<Field<Self>>;
fn hash_psd2(input: &[Field<Self>]) -> Result<Field<Self>>;
fn hash_psd4(input: &[Field<Self>]) -> Result<Field<Self>>;
fn hash_psd8(input: &[Field<Self>]) -> Result<Field<Self>>;
fn hash_sha3_256(input: &[bool]) -> Result<Vec<bool>>;
fn hash_sha3_384(input: &[bool]) -> Result<Vec<bool>>;
fn hash_sha3_512(input: &[bool]) -> Result<Vec<bool>>;
fn hash_many_psd2(input: &[Field<Self>], num_outputs: u16) -> Vec<Field<Self>>;
fn hash_many_psd4(input: &[Field<Self>], num_outputs: u16) -> Vec<Field<Self>>;
fn hash_many_psd8(input: &[Field<Self>], num_outputs: u16) -> Vec<Field<Self>>;
fn hash_to_group_bhp256(input: &[bool]) -> Result<Group<Self>>;
fn hash_to_group_bhp512(input: &[bool]) -> Result<Group<Self>>;
fn hash_to_group_bhp768(input: &[bool]) -> Result<Group<Self>>;
fn hash_to_group_bhp1024(input: &[bool]) -> Result<Group<Self>>;
fn hash_to_group_ped64(input: &[bool]) -> Result<Group<Self>>;
fn hash_to_group_ped128(input: &[bool]) -> Result<Group<Self>>;
fn hash_to_group_psd2(input: &[Field<Self>]) -> Result<Group<Self>>;
fn hash_to_group_psd4(input: &[Field<Self>]) -> Result<Group<Self>>;
fn hash_to_group_psd8(input: &[Field<Self>]) -> Result<Group<Self>>;
fn hash_to_scalar_psd2(input: &[Field<Self>]) -> Result<Scalar<Self>>;
fn hash_to_scalar_psd4(input: &[Field<Self>]) -> Result<Scalar<Self>>;
fn hash_to_scalar_psd8(input: &[Field<Self>]) -> Result<Scalar<Self>>;
fn merkle_tree_bhp<const DEPTH: u8>(leaves: &[Vec<bool>]) -> Result<BHPMerkleTree<Self, DEPTH>>;
fn merkle_tree_psd<const DEPTH: u8>(leaves: &[Vec<Field<Self>>]) -> Result<PoseidonMerkleTree<Self, DEPTH>>;
#[allow(clippy::ptr_arg)]
fn verify_merkle_path_bhp<const DEPTH: u8>(
path: &MerklePath<Self, DEPTH>,
root: &Field<Self>,
leaf: &Vec<bool>,
) -> bool;
#[allow(clippy::ptr_arg)]
fn verify_merkle_path_psd<const DEPTH: u8>(
path: &MerklePath<Self, DEPTH>,
root: &Field<Self>,
leaf: &Vec<Field<Self>>,
) -> bool;
fn dynamic_record_leaf_hasher() -> &'static Poseidon8<Self>;
fn dynamic_record_path_hasher() -> &'static Poseidon2<Self>;
}
#[cfg(feature = "wasm")]
pub fn get_or_init_consensus_version_heights(
heights: Option<String>,
) -> [(ConsensusVersion, u32); NUM_CONSENSUS_VERSIONS] {
let heights = load_test_consensus_heights_inner(heights);
*CONSENSUS_VERSION_HEIGHTS.get_or_init(|| heights)
}