use alloc::boxed::Box;
use alloc::collections::{BTreeMap, BTreeSet};
use alloc::sync::Arc;
use alloc::vec::Vec;
use miden_protocol::Word;
use miden_protocol::account::delta::AccountUpdateDetails;
use miden_protocol::account::{AccountCode, AccountId, StorageSlot, StorageSlotContent};
use miden_protocol::address::NetworkId;
use miden_protocol::block::{BlockHeader, BlockNumber, ProvenBlock};
use miden_protocol::crypto::merkle::mmr::{Forest, Mmr, MmrProof};
use miden_protocol::crypto::merkle::smt::SmtProof;
use miden_protocol::note::{NoteHeader, NoteId, NoteScript, NoteTag, Nullifier};
use miden_protocol::transaction::{ProvenTransaction, TransactionInputs};
use miden_testing::{MockChain, MockChainNote};
use miden_tx::utils::sync::RwLock;
use crate::Client;
use crate::rpc::domain::account::{
AccountDetails,
AccountProof,
AccountStorageDetails,
AccountStorageMapDetails,
AccountStorageRequirements,
AccountUpdateSummary,
AccountVaultDetails,
FetchedAccount,
StorageMapEntries,
StorageMapEntry,
};
use crate::rpc::domain::account_vault::{AccountVaultInfo, AccountVaultUpdate};
use crate::rpc::domain::note::{
CommittedNote,
CommittedNoteMetadata,
FetchedNote,
NoteSyncBlock,
NoteSyncInfo,
};
use crate::rpc::domain::nullifier::NullifierUpdate;
use crate::rpc::domain::storage_map::{StorageMapInfo, StorageMapUpdate};
use crate::rpc::domain::sync::ChainMmrInfo;
use crate::rpc::domain::transaction::{TransactionRecord, TransactionsInfo};
use crate::rpc::{AccountStateAt, NodeRpcClient, RpcError, RpcStatusInfo};
pub type MockClient<AUTH> = Client<AUTH>;
#[derive(Clone)]
pub struct MockRpcApi {
account_commitment_updates: Arc<RwLock<BTreeMap<BlockNumber, BTreeMap<AccountId, Word>>>>,
pub mock_chain: Arc<RwLock<MockChain>>,
oversize_threshold: usize,
}
impl Default for MockRpcApi {
fn default() -> Self {
Self::new(MockChain::new())
}
}
impl MockRpcApi {
const PAGINATION_BLOCK_LIMIT: u32 = 5;
pub fn new(mock_chain: MockChain) -> Self {
Self {
account_commitment_updates: Arc::new(RwLock::new(build_account_updates(&mock_chain))),
mock_chain: Arc::new(RwLock::new(mock_chain)),
oversize_threshold: 1000,
}
}
#[must_use]
pub fn with_oversize_threshold(mut self, threshold: usize) -> Self {
self.oversize_threshold = threshold;
self
}
pub fn get_mmr(&self) -> Mmr {
self.mock_chain.read().blockchain().as_mmr().clone()
}
pub fn get_chain_tip_block_num(&self) -> BlockNumber {
self.mock_chain.read().latest_block_header().block_num()
}
pub fn prove_block(&self) {
let proven_block = self.mock_chain.write().prove_next_block().unwrap();
let mut account_commitment_updates = self.account_commitment_updates.write();
let block_num = proven_block.header().block_num();
let updates: BTreeMap<AccountId, Word> = proven_block
.body()
.updated_accounts()
.iter()
.map(|update| (update.account_id(), update.final_state_commitment()))
.collect();
if !updates.is_empty() {
account_commitment_updates.insert(block_num, updates);
}
}
fn get_block_by_num(&self, block_num: BlockNumber) -> BlockHeader {
self.mock_chain.read().block_header(block_num.as_usize())
}
fn get_sync_account_vault_request(
&self,
block_from: BlockNumber,
block_to: Option<BlockNumber>,
account_id: AccountId,
) -> AccountVaultInfo {
let chain_tip = self.get_chain_tip_block_num();
let target_block = block_to.unwrap_or(chain_tip).min(chain_tip);
let page_end_block: BlockNumber = (block_from.as_u32() + Self::PAGINATION_BLOCK_LIMIT)
.min(target_block.as_u32())
.into();
let mut updates = vec![];
for block in self.mock_chain.read().proven_blocks() {
let block_number = block.header().block_num();
if block_number <= block_from || block_number > page_end_block {
continue;
}
for update in block
.body()
.updated_accounts()
.iter()
.filter(|block_acc_update| block_acc_update.account_id() == account_id)
{
let AccountUpdateDetails::Delta(account_delta) = update.details().clone() else {
continue;
};
let vault_delta = account_delta.vault();
for asset in vault_delta.added_assets() {
let account_vault_update = AccountVaultUpdate {
block_num: block_number,
asset: Some(asset),
vault_key: asset.vault_key(),
};
updates.push(account_vault_update);
}
}
}
AccountVaultInfo {
chain_tip,
block_number: page_end_block,
updates,
}
}
fn get_sync_transactions_request(
&self,
block_from: BlockNumber,
block_to: Option<BlockNumber>,
account_ids: &[AccountId],
) -> TransactionsInfo {
let chain_tip = self.get_chain_tip_block_num();
let block_to = match block_to {
Some(block_to) => block_to,
None => chain_tip,
};
let mut transaction_records = vec![];
for block in self.mock_chain.read().proven_blocks() {
let block_number = block.header().block_num();
if block_number <= block_from || block_number > block_to {
continue;
}
for transaction_header in block.body().transactions().as_slice() {
if !account_ids.contains(&transaction_header.account_id()) {
continue;
}
transaction_records.push(TransactionRecord {
block_num: block_number,
transaction_header: transaction_header.clone(),
output_notes: vec![],
erased_output_note_ids: vec![],
});
}
}
TransactionsInfo {
chain_tip,
block_num: block_to,
transaction_records,
}
}
fn get_sync_storage_maps_request(
&self,
block_from: BlockNumber,
block_to: Option<BlockNumber>,
account_id: AccountId,
) -> StorageMapInfo {
let chain_tip = self.get_chain_tip_block_num();
let target_block = block_to.unwrap_or(chain_tip).min(chain_tip);
let page_end_block: BlockNumber = (block_from.as_u32() + Self::PAGINATION_BLOCK_LIMIT)
.min(target_block.as_u32())
.into();
let mut updates = vec![];
for block in self.mock_chain.read().proven_blocks() {
let block_number = block.header().block_num();
if block_number <= block_from || block_number > page_end_block {
continue;
}
for update in block
.body()
.updated_accounts()
.iter()
.filter(|block_acc_update| block_acc_update.account_id() == account_id)
{
let AccountUpdateDetails::Delta(account_delta) = update.details().clone() else {
continue;
};
let storage_delta = account_delta.storage();
for (slot_name, map_delta) in storage_delta.maps() {
for (key, value) in map_delta.entries() {
let storage_map_info = StorageMapUpdate {
block_num: block_number,
slot_name: slot_name.clone(),
key: *key,
value: *value,
};
updates.push(storage_map_info);
}
}
}
}
StorageMapInfo {
chain_tip,
block_number: page_end_block,
updates,
}
}
pub fn get_available_notes(&self) -> Vec<MockChainNote> {
self.mock_chain.read().committed_notes().values().cloned().collect()
}
pub fn get_public_available_notes(&self) -> Vec<MockChainNote> {
self.mock_chain
.read()
.committed_notes()
.values()
.filter(|n| matches!(n, MockChainNote::Public(_, _)))
.cloned()
.collect()
}
pub fn get_private_available_notes(&self) -> Vec<MockChainNote> {
self.mock_chain
.read()
.committed_notes()
.values()
.filter(|n| matches!(n, MockChainNote::Private(_, _, _)))
.cloned()
.collect()
}
pub fn advance_blocks(&self, num_blocks: u32) {
let current_height = self.get_chain_tip_block_num();
let mut mock_chain = self.mock_chain.write();
mock_chain.prove_until_block(current_height + num_blocks).unwrap();
}
}
#[cfg_attr(not(target_arch = "wasm32"), async_trait::async_trait)]
#[cfg_attr(target_arch = "wasm32", async_trait::async_trait(?Send))]
impl NodeRpcClient for MockRpcApi {
fn has_genesis_commitment(&self) -> Option<Word> {
None
}
async fn set_genesis_commitment(&self, _commitment: Word) -> Result<(), RpcError> {
Ok(())
}
async fn sync_notes(
&self,
block_num: BlockNumber,
block_to: Option<BlockNumber>,
note_tags: &BTreeSet<NoteTag>,
) -> Result<NoteSyncInfo, RpcError> {
let chain_tip = self.get_chain_tip_block_num();
let upper_bound = block_to.unwrap_or(chain_tip);
let mut blocks_with_notes: BTreeMap<BlockNumber, BTreeMap<NoteId, CommittedNote>> =
BTreeMap::new();
for note in self.mock_chain.read().committed_notes().values() {
let note_block = note.inclusion_proof().location().block_num();
if note_tags.contains(¬e.metadata().tag())
&& note_block > block_num
&& note_block <= upper_bound
{
let committed = CommittedNote::new(
note.id(),
CommittedNoteMetadata::Full(note.metadata().clone()),
note.inclusion_proof().clone(),
);
blocks_with_notes.entry(note_block).or_default().insert(note.id(), committed);
}
}
blocks_with_notes.entry(upper_bound).or_default();
let blocks: Vec<NoteSyncBlock> = blocks_with_notes
.into_iter()
.map(|(bn, notes)| {
let block_header = self.get_block_by_num(bn);
let mmr_path = self.get_mmr().open(bn.as_usize()).unwrap().merkle_path().clone();
NoteSyncBlock { block_header, mmr_path, notes }
})
.collect();
Ok(NoteSyncInfo { chain_tip, block_to: upper_bound, blocks })
}
async fn sync_chain_mmr(
&self,
block_from: BlockNumber,
block_to: Option<BlockNumber>,
) -> Result<ChainMmrInfo, RpcError> {
let chain_tip = self.get_chain_tip_block_num();
let target_block = block_to.unwrap_or(chain_tip).min(chain_tip);
let from_forest = if block_from == chain_tip {
target_block.as_usize()
} else {
block_from.as_u32() as usize + 1
};
let mmr_delta = self
.get_mmr()
.get_delta(Forest::new(from_forest), Forest::new(target_block.as_usize()))
.unwrap();
let block_header = self.get_block_by_num(target_block);
Ok(ChainMmrInfo {
block_from,
block_to: target_block,
mmr_delta,
block_header,
})
}
async fn get_block_header_by_number(
&self,
block_num: Option<BlockNumber>,
include_mmr_proof: bool,
) -> Result<(BlockHeader, Option<MmrProof>), RpcError> {
let block = if let Some(block_num) = block_num {
self.mock_chain.read().block_header(block_num.as_usize())
} else {
self.mock_chain.read().latest_block_header()
};
let mmr_proof = if include_mmr_proof {
Some(self.get_mmr().open(block_num.unwrap().as_usize()).unwrap())
} else {
None
};
Ok((block, mmr_proof))
}
async fn get_notes_by_id(&self, note_ids: &[NoteId]) -> Result<Vec<FetchedNote>, RpcError> {
let notes = self.mock_chain.read().committed_notes().clone();
let hit_notes = note_ids.iter().filter_map(|id| notes.get(id));
let mut return_notes = vec![];
for note in hit_notes {
let fetched_note = match note {
MockChainNote::Private(note_id, note_metadata, note_inclusion_proof) => {
let note_header = NoteHeader::new(*note_id, note_metadata.clone());
FetchedNote::Private(note_header, note_inclusion_proof.clone())
},
MockChainNote::Public(note, note_inclusion_proof) => {
FetchedNote::Public(note.clone(), note_inclusion_proof.clone())
},
};
return_notes.push(fetched_note);
}
Ok(return_notes)
}
async fn submit_proven_transaction(
&self,
proven_transaction: ProvenTransaction,
_tx_inputs: TransactionInputs, ) -> Result<BlockNumber, RpcError> {
{
let mut mock_chain = self.mock_chain.write();
mock_chain.add_pending_proven_transaction(proven_transaction.clone());
};
let block_num = self.get_chain_tip_block_num();
Ok(block_num)
}
async fn get_account_details(&self, account_id: AccountId) -> Result<FetchedAccount, RpcError> {
let summary =
self.account_commitment_updates
.read()
.iter()
.rev()
.find_map(|(block_num, updates)| {
updates.get(&account_id).map(|commitment| AccountUpdateSummary {
commitment: *commitment,
last_block_num: *block_num,
})
});
if let Ok(account) = self.mock_chain.read().committed_account(account_id) {
let summary = summary.unwrap_or_else(|| AccountUpdateSummary {
commitment: account.to_commitment(),
last_block_num: BlockNumber::GENESIS,
});
Ok(FetchedAccount::new_public(account.clone(), summary))
} else if let Some(summary) = summary {
Ok(FetchedAccount::new_private(account_id, summary))
} else {
Err(RpcError::ExpectedDataMissing(format!(
"account {account_id} not found in mock commitment updates or mock chain"
)))
}
}
async fn get_account_proof(
&self,
account_id: AccountId,
account_storage_requirements: AccountStorageRequirements,
account_state: AccountStateAt,
_known_account_code: Option<AccountCode>,
_known_vault_commitment: Option<Word>,
) -> Result<(BlockNumber, AccountProof), RpcError> {
let mock_chain = self.mock_chain.read();
let block_number = match account_state {
AccountStateAt::Block(number) => number,
AccountStateAt::ChainTip => mock_chain.latest_block_header().block_num(),
};
let headers = if account_id.has_public_state() {
let account = mock_chain.committed_account(account_id).unwrap();
let mut map_details = vec![];
for slot_name in account_storage_requirements.inner().keys() {
if let Some(StorageSlotContent::Map(storage_map)) =
account.storage().get(slot_name).map(StorageSlot::content)
{
let entries: Vec<StorageMapEntry> = storage_map
.entries()
.map(|(key, value)| StorageMapEntry { key: *key, value: *value })
.collect();
let too_many_entries = entries.len() > self.oversize_threshold;
let account_storage_map_detail = AccountStorageMapDetails {
slot_name: slot_name.clone(),
too_many_entries,
entries: StorageMapEntries::AllEntries(entries),
};
map_details.push(account_storage_map_detail);
} else {
panic!("Storage slot {slot_name} is not a map");
}
}
let storage_details = AccountStorageDetails {
header: account.storage().to_header(),
map_details,
};
let mut assets = vec![];
for asset in account.vault().assets() {
assets.push(asset);
}
let vault_details = AccountVaultDetails {
too_many_assets: assets.len() > self.oversize_threshold,
assets,
};
Some(AccountDetails {
header: account.into(),
storage_details,
code: account.code().clone(),
vault_details,
})
} else {
None
};
let witness = mock_chain.account_tree().open(account_id);
let proof = AccountProof::new(witness, headers).unwrap();
Ok((block_number, proof))
}
async fn sync_nullifiers(
&self,
prefixes: &[u16],
from_block_num: BlockNumber,
block_to: Option<BlockNumber>,
) -> Result<Vec<NullifierUpdate>, RpcError> {
let nullifiers = self
.mock_chain
.read()
.nullifier_tree()
.entries()
.filter_map(|(nullifier, block_num)| {
let within_range = if let Some(to_block) = block_to {
block_num >= from_block_num && block_num <= to_block
} else {
block_num >= from_block_num
};
if prefixes.contains(&nullifier.prefix()) && within_range {
Some(NullifierUpdate { nullifier, block_num })
} else {
None
}
})
.collect::<Vec<_>>();
Ok(nullifiers)
}
async fn check_nullifiers(&self, nullifiers: &[Nullifier]) -> Result<Vec<SmtProof>, RpcError> {
Ok(nullifiers
.iter()
.map(|nullifier| self.mock_chain.read().nullifier_tree().open(nullifier).into_proof())
.collect())
}
async fn get_block_by_number(&self, block_num: BlockNumber) -> Result<ProvenBlock, RpcError> {
let block = self
.mock_chain
.read()
.proven_blocks()
.iter()
.find(|b| b.header().block_num() == block_num)
.unwrap()
.clone();
Ok(block)
}
async fn get_note_script_by_root(&self, root: Word) -> Result<NoteScript, RpcError> {
let note = self
.get_available_notes()
.iter()
.find(|note| note.note().is_some_and(|n| n.script().root() == root))
.unwrap()
.clone();
Ok(note.note().unwrap().script().clone())
}
async fn sync_storage_maps(
&self,
block_from: BlockNumber,
block_to: Option<BlockNumber>,
account_id: AccountId,
) -> Result<StorageMapInfo, RpcError> {
let mut all_updates = Vec::new();
let mut current_block_from = block_from;
let chain_tip = self.get_chain_tip_block_num();
let target_block = block_to.unwrap_or(chain_tip).min(chain_tip);
loop {
let response =
self.get_sync_storage_maps_request(current_block_from, block_to, account_id);
all_updates.extend(response.updates);
if response.block_number >= target_block {
return Ok(StorageMapInfo {
chain_tip: response.chain_tip,
block_number: response.block_number,
updates: all_updates,
});
}
current_block_from = (response.block_number.as_u32() + 1).into();
}
}
async fn sync_account_vault(
&self,
block_from: BlockNumber,
block_to: Option<BlockNumber>,
account_id: AccountId,
) -> Result<AccountVaultInfo, RpcError> {
let mut all_updates = Vec::new();
let mut current_block_from = block_from;
let chain_tip = self.get_chain_tip_block_num();
let target_block = block_to.unwrap_or(chain_tip).min(chain_tip);
loop {
let response =
self.get_sync_account_vault_request(current_block_from, block_to, account_id);
all_updates.extend(response.updates);
if response.block_number >= target_block {
return Ok(AccountVaultInfo {
chain_tip: response.chain_tip,
block_number: response.block_number,
updates: all_updates,
});
}
current_block_from = (response.block_number.as_u32() + 1).into();
}
}
async fn sync_transactions(
&self,
block_from: BlockNumber,
block_to: Option<BlockNumber>,
account_ids: Vec<AccountId>,
) -> Result<TransactionsInfo, RpcError> {
let response = self.get_sync_transactions_request(block_from, block_to, &account_ids);
Ok(response)
}
async fn get_network_id(&self) -> Result<NetworkId, RpcError> {
Ok(NetworkId::Testnet)
}
async fn get_rpc_limits(&self) -> Result<crate::rpc::RpcLimits, RpcError> {
Ok(crate::rpc::RpcLimits::default())
}
fn has_rpc_limits(&self) -> Option<crate::rpc::RpcLimits> {
None
}
async fn set_rpc_limits(&self, _limits: crate::rpc::RpcLimits) {
}
async fn get_status_unversioned(&self) -> Result<RpcStatusInfo, RpcError> {
Ok(RpcStatusInfo {
version: env!("CARGO_PKG_VERSION").into(),
genesis_commitment: None,
store: None,
block_producer: None,
})
}
}
impl From<MockChain> for MockRpcApi {
fn from(mock_chain: MockChain) -> Self {
MockRpcApi::new(mock_chain)
}
}
fn build_account_updates(
mock_chain: &MockChain,
) -> BTreeMap<BlockNumber, BTreeMap<AccountId, Word>> {
let mut account_commitment_updates = BTreeMap::new();
for block in mock_chain.proven_blocks() {
let block_num = block.header().block_num();
let mut updates = BTreeMap::new();
for update in block.body().updated_accounts() {
updates.insert(update.account_id(), update.final_state_commitment());
}
if updates.is_empty() {
continue;
}
account_commitment_updates.insert(block_num, updates);
}
account_commitment_updates
}