use alloc::collections::VecDeque;
use alloc::sync::Arc;
use core::cell::RefCell;
use core::mem;
use bitcoin::blockdata::constants::{genesis_block, DIFFCHANGE_INTERVAL};
use bitcoin::consensus::{Decodable, Encodable};
use bitcoin::hashes::hex::ToHex;
use bitcoin::hashes::Hash;
use bitcoin::secp256k1::PublicKey;
use bitcoin::util::uint::Uint256;
use bitcoin::{
BlockHash, BlockHeader, FilterHeader, Network, OutPoint, PackedLockTime, Transaction, TxIn,
TxOut, Txid,
};
use crate::policy::validator::ValidatorFactory;
#[allow(unused_imports)]
use log::{debug, error, info, warn};
use push_decoder::{BlockDecoder, Listener as PushListener};
use serde_derive::{Deserialize, Serialize};
use serde_with::{serde_as, IfIsHumanReadable};
use txoo::filter::BlockSpendFilter;
use txoo::get_latest_checkpoint;
use txoo::proof::{ProofType, TxoProof};
use crate::prelude::*;
use crate::short_function;
use crate::util::ser_util::{OutPointDef, TxidDef};
#[derive(Debug, PartialEq)]
pub enum Error {
InvalidChain,
OrphanBlock(String),
InvalidBlock,
ReorgTooDeep,
InvalidProof,
}
macro_rules! error_invalid_chain {
($($arg:tt)*) => {{
error!("InvalidChain: {}", format!($($arg)*));
Error::InvalidChain
}};
}
macro_rules! error_orphan_block {
($($arg:tt)*) => {{
let message = format!($($arg)*);
warn!("OrphanBlock: {}", message);
Error::OrphanBlock(message)
}};
}
macro_rules! error_invalid_block {
($($arg:tt)*) => {{
error!("InvalidBlock: {}", format!($($arg)*));
Error::InvalidBlock
}};
}
macro_rules! error_invalid_proof {
($($arg:tt)*) => {{
error!("InvalidProof: {}", format!($($arg)*));
Error::InvalidProof
}};
}
#[serde_as]
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ListenSlot {
#[serde_as(as = "IfIsHumanReadable<OrderedSet<TxidDef>>")]
pub txid_watches: OrderedSet<Txid>,
#[serde_as(as = "IfIsHumanReadable<OrderedSet<OutPointDef>>")]
pub watches: OrderedSet<OutPoint>,
#[serde_as(as = "IfIsHumanReadable<OrderedSet<OutPointDef>>")]
pub seen: OrderedSet<OutPoint>,
}
#[derive(Clone)]
pub struct Headers(pub BlockHeader, pub FilterHeader);
impl Encodable for Headers {
fn consensus_encode<S: crate::io::Write + ?Sized>(
&self,
s: &mut S,
) -> Result<usize, crate::io::Error> {
let mut len = 0;
len += self.0.consensus_encode(s)?;
len += self.1.consensus_encode(s)?;
Ok(len)
}
}
impl Decodable for Headers {
fn consensus_decode<D: crate::io::Read + ?Sized>(
d: &mut D,
) -> Result<Self, bitcoin::consensus::encode::Error> {
let header = BlockHeader::consensus_decode(d)?;
let filter_header = FilterHeader::consensus_decode(d)?;
Ok(Headers(header, filter_header))
}
}
struct BlockDecodeState {
decoder: BlockDecoder,
offset: u32,
block_hash: BlockHash,
}
impl BlockDecodeState {
fn new(block_hash: BlockHash) -> Self {
BlockDecodeState { decoder: BlockDecoder::new(), offset: 0, block_hash }
}
}
pub struct ChainTracker<L: ChainListener> {
pub headers: VecDeque<Headers>,
pub tip: Headers,
pub height: u32,
pub network: Network,
pub listeners: OrderedMap<L::Key, (L, ListenSlot)>,
node_id: PublicKey,
validator_factory: Arc<dyn ValidatorFactory>,
decode_state: Option<RefCell<BlockDecodeState>>,
}
impl<L: ChainListener> ChainTracker<L> {
#[cfg(feature = "tracker_size_workaround")]
pub const MAX_REORG_SIZE: usize = 16;
#[cfg(not(feature = "tracker_size_workaround"))]
pub const MAX_REORG_SIZE: usize = 100;
pub fn new(
network: Network,
height: u32,
tip: Headers,
node_id: PublicKey,
validator_factory: Arc<dyn ValidatorFactory>,
) -> Result<Self, Error> {
let header = tip.0;
header
.validate_pow(&header.target())
.map_err(|e| error_invalid_block!("validate pow {}: {}", header.target(), e))?;
let headers = VecDeque::new();
let listeners = OrderedMap::new();
Ok(ChainTracker {
headers,
tip,
height,
network,
listeners,
node_id,
validator_factory,
decode_state: None,
})
}
pub fn restore(
headers: VecDeque<Headers>,
tip: Headers,
height: u32,
network: Network,
listeners: OrderedMap<L::Key, (L, ListenSlot)>,
node_id: PublicKey,
validator_factory: Arc<dyn ValidatorFactory>,
) -> Self {
ChainTracker {
headers,
tip,
height,
network,
listeners,
node_id,
validator_factory,
decode_state: None,
}
}
pub fn from_genesis(
network: Network,
node_id: PublicKey,
validator_factory: Arc<dyn ValidatorFactory>,
) -> Self {
let height = 0;
let genesis = genesis_block(network);
let filter = BlockSpendFilter::from_block(&genesis);
let filter_header = filter.filter_header(&FilterHeader::all_zeros());
Self::from_checkpoint(
network,
node_id,
validator_factory,
&genesis.header,
&filter_header,
height,
)
}
fn from_checkpoint(
network: Network,
node_id: PublicKey,
validator_factory: Arc<dyn ValidatorFactory>,
header: &BlockHeader,
filter_header: &FilterHeader,
height: u32,
) -> Self {
let tip = Headers(*header, *filter_header);
Self::new(network, height, tip, node_id, validator_factory)
.expect("genesis block / checkpoint is expected to be valid")
}
pub fn for_network(
network: Network,
node_id: PublicKey,
validator_factory: Arc<dyn ValidatorFactory>,
) -> Self {
if let Some((height, _hash, filter_header, header)) = get_latest_checkpoint(network) {
Self::from_checkpoint(
network,
node_id,
validator_factory,
&header,
&filter_header,
height,
)
} else {
Self::from_genesis(network, node_id, validator_factory)
}
}
pub fn tip(&self) -> &Headers {
&self.tip
}
pub fn headers(&self) -> &VecDeque<Headers> {
&self.headers
}
pub fn height(&self) -> u32 {
self.height
}
pub fn remove_block(&mut self, proof: TxoProof) -> Result<BlockHeader, Error> {
if self.headers.is_empty() {
return Err(Error::ReorgTooDeep);
}
let tip_block_hash = self.headers[0].0.block_hash();
let tip_block_time = self.headers[0].0.time;
self.maybe_finish_decoding_block(&proof, &tip_block_hash);
let prev_headers = &self.headers[0];
let expected_external_block_hash =
if proof.proof.is_external() { Some(&tip_block_hash) } else { None };
self.validate_block(
self.height - 1,
expected_external_block_hash,
prev_headers,
&self.tip,
&proof,
true,
)?;
match proof.proof {
ProofType::Filter(_, spv_proof) => self.notify_listeners_remove(
Some(spv_proof.txs.as_slice()),
tip_block_hash,
Some(tip_block_time),
),
ProofType::Block(b) => panic!("non-streamed block not supported {}", b.block_hash()),
ProofType::ExternalBlock() => self.notify_listeners_remove(None, tip_block_hash, None),
};
info!("removed block {}: {}", self.height, &self.tip.0.block_hash());
let mut headers = self.headers.pop_front().expect("already checked");
mem::swap(&mut self.tip, &mut headers);
self.height -= 1;
Ok(headers.0)
}
pub fn restore_listener(&mut self, outpoint: L::Key, listener: L, slot: ListenSlot) {
self.listeners.insert(outpoint, (listener, slot));
}
fn notify_listeners_remove(
&mut self,
txs: Option<&[Transaction]>,
block_hash: BlockHash,
maybe_block_time: Option<u32>,
) {
for (listener, slot) in self.listeners.values_mut() {
let (adds, removes) = if let Some(txs) = txs {
listener.on_remove_block(txs, &block_hash, maybe_block_time.expect("block_time"))
} else {
listener.on_remove_streamed_block_end(&block_hash)
};
debug!("{}: REVERT adding {:?}, removing {:?}", short_function!(), adds, removes);
for outpoint in removes.iter() {
slot.seen.remove(outpoint);
}
slot.watches.extend(removes);
for outpoint in adds.iter() {
slot.watches.remove(outpoint);
}
}
}
pub fn block_chunk(&mut self, hash: BlockHash, offset: u32, chunk: &[u8]) -> Result<(), Error> {
if offset == 0 {
assert!(self.decode_state.is_none(), "already decoding, and got chunk at offset 0");
self.decode_state = Some(RefCell::new(BlockDecodeState::new(hash)));
}
if let Some(decode_state_cell) = self.decode_state.as_ref() {
let mut decode_state = decode_state_cell.borrow_mut();
assert_eq!(
decode_state.block_hash, hash,
"got chunk for wrong block {} != {}",
hash, decode_state.block_hash
);
assert_eq!(
decode_state.offset, offset,
"got chunk for wrong offset {} != {}",
offset, decode_state.offset
);
let decoder = &mut decode_state.decoder;
let mut listener = ChainTrackerPushListener(self, hash);
decoder.decode_next(chunk, &mut listener).expect("decode failure");
decode_state.offset += chunk.len() as u32;
} else {
panic!("got chunk at offset {} without decoder", offset);
}
Ok(())
}
pub fn add_block(&mut self, header: BlockHeader, proof: TxoProof) -> Result<(), Error> {
let message_block_hash = header.block_hash();
self.maybe_finish_decoding_block(&proof, &message_block_hash);
let filter_header = proof.filter_header();
let headers = Headers(header, filter_header);
let expected_external_block_hash =
if proof.proof.is_external() { Some(&message_block_hash) } else { None };
self.validate_block(
self.height,
expected_external_block_hash,
&self.tip,
&headers,
&proof,
false,
)?;
match proof.proof {
ProofType::Filter(_, spv_proof) => self.notify_listeners_add(
Some(spv_proof.txs.as_slice()),
message_block_hash,
Some(header.time),
),
ProofType::Block(b) => panic!("non-streamed block not supported {}", b.block_hash()),
ProofType::ExternalBlock() => self.notify_listeners_add(None, message_block_hash, None),
};
self.headers.truncate(Self::MAX_REORG_SIZE - 1);
self.headers.push_front(self.tip.clone());
self.tip = Headers(header, filter_header);
self.height += 1;
info!("added block {}: {}", self.height, &self.tip.0.block_hash());
Ok(())
}
fn maybe_finish_decoding_block(&mut self, proof: &TxoProof, expected_block_hash: &BlockHash) {
assert_eq!(
proof.proof.is_external(),
self.decode_state.is_some(),
"is_external != decode_state"
);
if let Some(decode_state_cell) = self.decode_state.take() {
let decode_state = decode_state_cell.into_inner();
decode_state.decoder.finish().expect("decode finish failure");
assert_eq!(
decode_state.block_hash, *expected_block_hash,
"wrong block was sent {} != {}",
decode_state.block_hash, expected_block_hash
);
}
}
fn notify_listeners_add(
&mut self,
txs: Option<&[Transaction]>,
block_hash: BlockHash,
maybe_block_time: Option<u32>,
) {
for (listener, slot) in self.listeners.values_mut() {
let (adds, removes) = if let Some(txs) = txs {
listener.on_add_block(txs, &block_hash, maybe_block_time.expect("block_time"))
} else {
listener.on_add_streamed_block_end(&block_hash)
};
debug!("{}: adding {:?}, removing {:?}", short_function!(), adds, removes);
slot.watches.extend(adds);
for outpoint in removes.iter() {
slot.watches.remove(outpoint);
}
slot.seen.extend(removes);
}
}
pub fn add_listener(&mut self, listener: L, initial_txid_watches: OrderedSet<Txid>) {
let slot = ListenSlot {
txid_watches: initial_txid_watches,
watches: OrderedSet::new(),
seen: OrderedSet::new(),
};
debug!("{}: adding listener with txid watches {:?}", short_function!(), slot.txid_watches);
self.listeners.insert(listener.key().clone(), (listener, slot));
}
pub fn remove_listener(&mut self, key: &L::Key) {
debug!("{}: removing listener", short_function!());
self.listeners.remove(&key);
}
pub fn add_listener_watches(&mut self, key: &L::Key, watches: OrderedSet<OutPoint>) {
let (_, slot) =
self.listeners.get_mut(key).expect("trying to add watches to non-existent listener");
debug!("{}: adding watches {:?}", short_function!(), watches);
slot.watches.extend(watches);
}
pub fn get_all_forward_watches(&self) -> (Vec<Txid>, Vec<OutPoint>) {
self.get_all_watches(false)
}
pub fn get_all_reverse_watches(&self) -> (Vec<Txid>, Vec<OutPoint>) {
self.get_all_watches(true)
}
fn get_all_watches(&self, include_reverse: bool) -> (Vec<Txid>, Vec<OutPoint>) {
let mut txid_watches = OrderedSet::new();
let mut outpoint_watches = OrderedSet::new();
for (_, slot) in self.listeners.values() {
txid_watches.extend(&slot.txid_watches);
outpoint_watches.extend(&slot.watches);
if include_reverse {
outpoint_watches.extend(&slot.seen);
}
}
(txid_watches.into_iter().collect(), outpoint_watches.into_iter().collect())
}
fn validate_block(
&self,
height: u32,
external_block_hash: Option<&BlockHash>,
prev_headers: &Headers,
headers: &Headers,
proof: &TxoProof,
is_remove: bool,
) -> Result<(), Error> {
let header = &headers.0;
let prev_header = &prev_headers.0;
if header.prev_blockhash != prev_header.block_hash() {
return Err(error_orphan_block!(
"header.prev_blockhash {} != self.tip.block_hash {}",
header.prev_blockhash.to_hex(),
prev_header.block_hash().to_hex()
));
}
header.validate_pow(&header.target()).map_err(|_| Error::InvalidBlock)?;
if self.network == Network::Testnet
&& header.target() == max_target(self.network)
&& header.time > prev_header.time + 60 * 20
{
} else if (height + 1) % DIFFCHANGE_INTERVAL == 0 {
let prev_target = prev_header.target();
let target = header.target();
let network = self.network;
validate_retarget(prev_target, target, network)?;
} else {
if header.bits != prev_header.bits && self.network != Network::Testnet {
return Err(error_invalid_chain!(
"header.bits {} != self.tip.bits {}",
header.bits,
prev_header.bits
));
}
}
let (_, outpoint_watches) =
if is_remove { self.get_all_reverse_watches() } else { self.get_all_forward_watches() };
let validator = self.validator_factory.make_validator(self.network, self.node_id, None);
let prev_filter_header = &prev_headers.1;
if prev_filter_header.iter().all(|x| *x == 0) {
log::warn!("bypassing filter validation because prev_filter_header is all zeroes");
} else {
validator
.validate_block(
proof,
height + 1,
header,
external_block_hash,
prev_filter_header,
&outpoint_watches,
)
.map_err(|e| error_invalid_proof!("{:?}", e))?;
}
Ok(())
}
}
fn validate_retarget(prev_target: Uint256, target: Uint256, network: Network) -> Result<(), Error> {
fn round_trip_target(prev_target: &Uint256) -> Uint256 {
BlockHeader::u256_from_compact_target(BlockHeader::compact_target_from_u256(prev_target))
}
let min = round_trip_target(&(prev_target >> 2));
let max = round_trip_target(&(prev_target << 2));
let chain_max = max_target(network);
if target.gt(&chain_max) {
return Err(error_invalid_block!("target {} > chain_max {}", target, chain_max));
}
if target.lt(&min) {
return Err(error_invalid_chain!("target {} < min {}", target, min));
}
if target.gt(&max) {
return Err(error_invalid_chain!("target {} > max {}", target, max));
}
Ok(())
}
struct ChainTrackerPushListener<'a, L: ChainListener>(&'a ChainTracker<L>, BlockHash);
impl<'a, L: ChainListener> ChainTrackerPushListener<'a, L> {
fn do_push<F: FnMut(&mut dyn PushListener)>(&mut self, mut f: F) {
for (listener, _) in self.0.listeners.values() {
listener.on_push(&mut f)
}
}
}
impl<'a, L: ChainListener> PushListener for ChainTrackerPushListener<'a, L> {
fn on_block_start(&mut self, header: &BlockHeader) {
assert_eq!(
header.block_hash(),
self.1,
"streamed block hash does not match header {} != {}",
header.block_hash(),
self.1
);
self.do_push(|pl| pl.on_block_start(header));
}
fn on_block_end(&mut self) {
self.do_push(|pl| pl.on_block_end());
}
fn on_transaction_start(&mut self, version: i32) {
self.do_push(|pl| pl.on_transaction_start(version));
}
fn on_transaction_end(&mut self, locktime: PackedLockTime, txid: Txid) {
self.do_push(|pl| pl.on_transaction_end(locktime, txid));
}
fn on_transaction_input(&mut self, txin: &TxIn) {
self.do_push(|pl| pl.on_transaction_input(txin));
}
fn on_transaction_output(&mut self, txout: &TxOut) {
self.do_push(|pl| pl.on_transaction_output(txout));
}
}
pub trait ChainListener: SendSync {
type Key: Ord + Clone;
fn key(&self) -> &Self::Key;
fn on_add_block(
&self,
txs: &[Transaction],
block_hash: &BlockHash,
block_time: u32,
) -> (Vec<OutPoint>, Vec<OutPoint>);
fn on_add_streamed_block_end(&self, block_hash: &BlockHash) -> (Vec<OutPoint>, Vec<OutPoint>);
fn on_remove_block(
&self,
txs: &[Transaction],
block_hash: &BlockHash,
block_time: u32,
) -> (Vec<OutPoint>, Vec<OutPoint>);
fn on_remove_streamed_block_end(
&self,
block_hash: &BlockHash,
) -> (Vec<OutPoint>, Vec<OutPoint>);
fn on_push<F>(&self, f: F)
where
F: FnOnce(&mut dyn PushListener);
}
pub fn max_target(network: Network) -> Uint256 {
match network {
Network::Regtest => Uint256::from_u64(0x7fffff).unwrap() << (256 - 24),
_ => Uint256::from_u64(0xFFFF).unwrap() << 208,
}
}
#[cfg(test)]
mod tests {
use crate::util::test_utils::*;
use bitcoin::blockdata::constants::genesis_block;
use bitcoin::consensus::serialize;
use bitcoin::hashes::Hash;
use bitcoin::network::constants::Network;
use bitcoin::util::hash::bitcoin_merkle_root;
use bitcoin::{Block, TxIn};
use bitcoin::{PackedLockTime, Sequence, TxMerkleNode, Witness};
use bitcoind_client::dummy::DummyTxooSource;
use core::iter::FromIterator;
use super::*;
use crate::util::mocks::MockValidatorFactory;
use test_log::test;
use txoo::source::Source;
#[tokio::test]
async fn test_add_remove() -> Result<(), Error> {
let genesis = genesis_block(Network::Regtest);
let source = make_source().await;
let (mut tracker, _) = make_tracker()?;
let header0 = tracker.tip().0.clone();
assert_eq!(tracker.height(), 0);
let header1 = add_block(&mut tracker, &source, &[]).await?;
assert_eq!(tracker.height(), 1);
let bad_bits = header1.bits - 1;
let header_bad_bits =
mine_header_with_bits(tracker.tip.0.block_hash(), TxMerkleNode::all_zeros(), bad_bits);
let dummy_proof =
TxoProof::prove_unchecked(&genesis, &FilterHeader::all_zeros(), tracker.height() + 1);
assert_eq!(
tracker.add_block(header_bad_bits, dummy_proof).err(),
Some(Error::InvalidChain)
);
let header_removed = remove_block(&mut tracker, &source, &[], &header0).await?;
assert_eq!(header1, header_removed);
let (_, filter_header) = source.get(0, &genesis).await.unwrap();
let proof = TxoProof::prove_unchecked(&genesis, &filter_header, 0);
assert_eq!(tracker.remove_block(proof).err(), Some(Error::ReorgTooDeep));
Ok(())
}
async fn make_source() -> DummyTxooSource {
let source = DummyTxooSource::new();
source.on_new_block(0, &genesis_block(Network::Regtest)).await;
source
}
#[tokio::test]
async fn test_listeners() -> Result<(), Error> {
let source = make_source().await;
let (mut tracker, validator_factory) = make_tracker()?;
let header1 = add_block(&mut tracker, &source, &[]).await?;
let tx = make_tx(vec![make_txin(1)]);
let initial_watch = make_outpoint(1);
let second_watch = OutPoint::new(tx.txid(), 0);
let listener = MockListener::new(initial_watch);
tracker.add_listener(listener.clone(), OrderedSet::new());
tracker.add_listener_watches(&initial_watch, OrderedSet::from_iter(vec![initial_watch]));
assert_eq!(tracker.listeners.len(), 1);
assert_eq!(
tracker.listeners.get(listener.key()).unwrap().1.watches,
OrderedSet::from_iter(vec![initial_watch])
);
let header2 = add_block(&mut tracker, &source, &[tx.clone()]).await?;
assert_eq!(
tracker.listeners.get(listener.key()).unwrap().1.watches,
OrderedSet::from_iter(vec![second_watch])
);
let tx2 = make_tx(vec![TxIn {
previous_output: second_watch,
script_sig: Default::default(),
sequence: Sequence::ZERO,
witness: Witness::default(),
}]);
let _header3 = add_block(&mut tracker, &source, &[tx2.clone()]).await?;
assert_eq!(tracker.listeners.get(listener.key()).unwrap().1.watches, OrderedSet::new());
assert_eq!(
*validator_factory.validator().last_validated_watches.lock().unwrap(),
vec![second_watch]
);
remove_block(&mut tracker, &source, &[tx2], &header2).await?;
assert_eq!(
tracker.listeners.get(listener.key()).unwrap().1.watches,
OrderedSet::from_iter(vec![second_watch])
);
assert_eq!(
*validator_factory.validator().last_validated_watches.lock().unwrap(),
vec![initial_watch, second_watch]
);
remove_block(&mut tracker, &source, &[tx], &header1).await?;
assert_eq!(
tracker.listeners.get(listener.key()).unwrap().1.watches,
OrderedSet::from_iter(vec![initial_watch])
);
assert_eq!(
*validator_factory.validator().last_validated_watches.lock().unwrap(),
vec![initial_watch, second_watch]
);
Ok(())
}
#[tokio::test]
async fn test_streamed() -> Result<(), Error> {
let source = make_source().await;
let (mut tracker, _validator_factory) = make_tracker()?;
let _header1 = add_block(&mut tracker, &source, &[]).await?;
let tx = make_tx(vec![make_txin(1)]);
let initial_watch = make_outpoint(1);
let second_watch = OutPoint::new(tx.txid(), 0);
let listener = MockListener::new(initial_watch);
tracker.add_listener(listener.clone(), OrderedSet::new());
tracker.add_listener_watches(&initial_watch, OrderedSet::from_iter(vec![initial_watch]));
assert_eq!(tracker.listeners.len(), 1);
assert_eq!(
tracker.listeners.get(listener.key()).unwrap().1.watches,
OrderedSet::from_iter(vec![initial_watch])
);
let _header2 = add_streamed_block(&mut tracker, &source, &[tx.clone()]).await?;
assert_eq!(
tracker.listeners.get(listener.key()).unwrap().1.watches,
OrderedSet::from_iter(vec![second_watch])
);
Ok(())
}
async fn add_block(
tracker: &mut ChainTracker<MockListener>,
source: &DummyTxooSource,
txs: &[Transaction],
) -> Result<BlockHeader, Error> {
let txs = txs_with_coinbase(txs);
let block = make_block(tracker.tip().0, txs);
let height = tracker.height() + 1;
source.on_new_block(height, &block).await;
let (_attestation, filter_header) = source.get(height, &block).await.unwrap();
let proof = TxoProof::prove_unchecked(&block, &filter_header, height);
tracker.add_block(block.header.clone(), proof)?;
Ok(block.header)
}
async fn add_streamed_block(
tracker: &mut ChainTracker<MockListener>,
source: &DummyTxooSource,
txs: &[Transaction],
) -> Result<BlockHeader, Error> {
let txs = txs_with_coinbase(txs);
let block = make_block(tracker.tip().0, txs);
let height = tracker.height() + 1;
source.on_new_block(height, &block).await;
let (_attestation, filter_header) = source.get(height, &block).await.unwrap();
let proof = TxoProof::prove_unchecked(&block, &filter_header, height);
let proof =
TxoProof { attestations: proof.attestations, proof: ProofType::ExternalBlock() };
let bytes = serialize(&block);
tracker.block_chunk(block.block_hash(), 0, &bytes)?;
tracker.add_block(block.header.clone(), proof)?;
Ok(block.header)
}
async fn add_block_with_bits(
tracker: &mut ChainTracker<MockListener>,
source: &DummyTxooSource,
bits: u32,
do_add: bool,
) -> Result<BlockHeader, Error> {
let txs = txs_with_coinbase(&[]);
let txids: Vec<Txid> = txs.iter().map(|tx| tx.txid()).collect();
let merkle_root = bitcoin_merkle_root(txids.iter().map(Txid::as_hash)).unwrap().into();
let header = mine_header_with_bits(tracker.tip().0.block_hash(), merkle_root, bits);
let block = Block { header, txdata: txs };
let height = tracker.height() + 1;
let filter_header = if do_add {
source.on_new_block(height, &block).await;
let (_attestation, filter_header) = source.get(height, &block).await.unwrap();
filter_header
} else {
FilterHeader::all_zeros()
};
let proof = TxoProof::prove_unchecked(&block, &filter_header, height);
tracker.add_block(block.header.clone(), proof)?;
Ok(block.header)
}
async fn remove_block(
tracker: &mut ChainTracker<MockListener>,
source: &DummyTxooSource,
txs: &[Transaction],
prev_header: &BlockHeader,
) -> Result<BlockHeader, Error> {
let txs = txs_with_coinbase(txs);
let block = make_block(*prev_header, txs);
let height = tracker.height();
let (_attestation, filter_header) = source.get(height, &block).await.unwrap();
let proof = TxoProof::prove_unchecked(&block, &filter_header, height);
let header = tracker.remove_block(proof)?;
Ok(header)
}
fn txs_with_coinbase(txs: &[Transaction]) -> Vec<Transaction> {
let mut txs = txs.to_vec();
txs.insert(
0,
Transaction {
version: 0,
lock_time: PackedLockTime(0),
input: vec![],
output: vec![Default::default()],
},
);
txs
}
#[tokio::test]
async fn test_retarget() -> Result<(), Error> {
let source = make_source().await;
let (mut tracker, _) = make_tracker()?;
for _ in 1..DIFFCHANGE_INTERVAL {
add_block(&mut tracker, &source, &[]).await?;
}
assert_eq!(tracker.height, DIFFCHANGE_INTERVAL - 1);
let target = tracker.tip().0.target();
let bits = BlockHeader::compact_target_from_u256(&(target << 1));
assert_eq!(
add_block_with_bits(&mut tracker, &source, bits, false).await.err(),
Some(Error::InvalidBlock)
);
let bits = BlockHeader::compact_target_from_u256(&(target >> 3));
assert_eq!(
add_block_with_bits(&mut tracker, &source, bits, false).await.err(),
Some(Error::InvalidChain)
);
let bits = BlockHeader::compact_target_from_u256(&(target >> 1));
add_block_with_bits(&mut tracker, &source, bits, true).await?;
Ok(())
}
#[test]
fn test_retarget_rounding() -> Result<(), Error> {
validate_retarget(
BlockHeader::u256_from_compact_target(0x1c063051),
BlockHeader::u256_from_compact_target(0x1c018c14),
Network::Testnet,
)?;
Ok(())
}
fn make_tracker() -> Result<(ChainTracker<MockListener>, Arc<MockValidatorFactory>), Error> {
let genesis = genesis_block(Network::Regtest);
let validator_factory = Arc::new(MockValidatorFactory::new());
let (node_id, _, _) = make_node();
let tip = Headers(genesis.header, FilterHeader::all_zeros());
let tracker =
ChainTracker::new(Network::Regtest, 0, tip, node_id, validator_factory.clone())?;
Ok((tracker, validator_factory))
}
}