use crate::chain::OrphanBlockPool;
use crate::core::consensus;
use crate::core::core::hash::Hashed;
use crate::core::core::verifier_cache::VerifierCache;
use crate::core::core::Committed;
use crate::core::core::{Block, BlockHeader, BlockSums};
use crate::core::global;
use crate::core::pow;
use crate::error::{Error, ErrorKind};
use crate::store;
use crate::txhashset;
use crate::types::{Options, Tip};
use crate::util::RwLock;
use chrono::prelude::Utc;
use chrono::Duration;
use grin_store;
use std::sync::Arc;
pub struct BlockContext<'a> {
pub opts: Options,
pub pow_verifier: fn(&BlockHeader) -> Result<(), pow::Error>,
pub txhashset: &'a mut txhashset::TxHashSet,
pub batch: store::Batch<'a>,
pub verifier_cache: Arc<RwLock<dyn VerifierCache>>,
pub orphans: Arc<OrphanBlockPool>,
}
fn process_header_for_block(
header: &BlockHeader,
is_fork: bool,
ctx: &mut BlockContext<'_>,
) -> Result<(), Error> {
txhashset::header_extending(&mut ctx.txhashset, &mut ctx.batch, |extension| {
extension.force_rollback();
if is_fork {
rewind_and_apply_header_fork(header, extension)?;
}
extension.validate_root(header)?;
extension.apply_header(header)?;
Ok(())
})?;
validate_header(header, ctx)?;
add_block_header(header, &ctx.batch)?;
update_header_head(header, ctx)?;
Ok(())
}
fn check_known(block: &Block, ctx: &mut BlockContext<'_>) -> Result<(), Error> {
check_known_head(&block.header, ctx)?;
check_known_orphans(&block.header, ctx)?;
check_known_store(&block.header, ctx)?;
Ok(())
}
pub fn process_block(b: &Block, ctx: &mut BlockContext<'_>) -> Result<Option<Tip>, Error> {
debug!(
"pipe: process_block {} at {} [in/out/kern: {}/{}/{}]",
b.hash(),
b.header.height,
b.inputs().len(),
b.outputs().len(),
b.kernels().len(),
);
check_known(b, ctx)?;
let head = ctx.batch.head()?;
let is_next = b.header.prev_hash == head.last_block_h;
let prev = prev_header_store(&b.header, &mut ctx.batch)?;
if !is_next && !ctx.batch.block_exists(&prev.hash())? {
return Err(ErrorKind::Orphan.into());
}
let is_fork = !is_next;
process_header_for_block(&b.header, is_fork, ctx)?;
validate_block(b, ctx)?;
txhashset::extending(&mut ctx.txhashset, &mut ctx.batch, |mut extension| {
if is_fork {
rewind_and_apply_fork(b, extension)?;
}
verify_coinbase_maturity(b, &mut extension)?;
validate_utxo(b, &mut extension)?;
verify_block_sums(b, &mut extension)?;
apply_block_to_txhashset(b, &mut extension)?;
let head = extension.batch.head()?;
if !has_more_work(&b.header, &head) {
extension.force_rollback();
}
Ok(())
})?;
add_block(b, &ctx.batch)?;
if ctx.batch.tail().is_err() {
update_body_tail(&b.header, &ctx.batch)?;
}
let res = update_head(b, ctx)?;
Ok(res)
}
pub fn sync_block_headers(
headers: &[BlockHeader],
ctx: &mut BlockContext<'_>,
) -> Result<Option<Tip>, Error> {
if let Some(header) = headers.first() {
debug!(
"pipe: sync_block_headers: {} headers from {} at {}",
headers.len(),
header.hash(),
header.height,
);
} else {
return Ok(None);
}
let all_known = if let Some(last_header) = headers.last() {
ctx.batch.get_block_header(&last_header.hash()).is_ok()
} else {
false
};
if !all_known {
let first_header = headers.first().unwrap();
let prev_header = ctx.batch.get_previous_header(&first_header)?;
txhashset::sync_extending(&mut ctx.txhashset, &mut ctx.batch, |extension| {
extension.rewind(&prev_header)?;
for header in headers {
extension.validate_root(header)?;
extension.apply_header(header)?;
add_block_header(header, &extension.batch)?;
}
Ok(())
})?;
for header in headers {
validate_header(header, ctx)?;
}
}
if let Some(header) = headers.last() {
update_sync_head(header, &mut ctx.batch)?;
let res = update_header_head(header, ctx)?;
Ok(res)
} else {
Ok(None)
}
}
pub fn process_block_header(header: &BlockHeader, ctx: &mut BlockContext<'_>) -> Result<(), Error> {
debug!(
"pipe: process_block_header: {} at {}",
header.hash(),
header.height,
);
check_header_known(header, ctx)?;
validate_header(header, ctx)?;
Ok(())
}
fn check_header_known(header: &BlockHeader, ctx: &mut BlockContext<'_>) -> Result<(), Error> {
let header_head = ctx.batch.header_head()?;
if header.hash() == header_head.last_block_h || header.hash() == header_head.prev_block_h {
return Err(ErrorKind::Unfit("header already known".to_string()).into());
}
Ok(())
}
fn check_known_head(header: &BlockHeader, ctx: &mut BlockContext<'_>) -> Result<(), Error> {
let head = ctx.batch.head()?;
let bh = header.hash();
if bh == head.last_block_h || bh == head.prev_block_h {
return Err(ErrorKind::Unfit("already known in head".to_string()).into());
}
Ok(())
}
fn check_known_orphans(header: &BlockHeader, ctx: &mut BlockContext<'_>) -> Result<(), Error> {
if ctx.orphans.contains(&header.hash()) {
Err(ErrorKind::Unfit("already known in orphans".to_string()).into())
} else {
Ok(())
}
}
fn check_known_store(header: &BlockHeader, ctx: &mut BlockContext<'_>) -> Result<(), Error> {
match ctx.batch.block_exists(&header.hash()) {
Ok(true) => {
let head = ctx.batch.head()?;
if header.height < head.height.saturating_sub(50) {
Err(ErrorKind::OldBlock.into())
} else {
Err(ErrorKind::Unfit("already known in store".to_string()).into())
}
}
Ok(false) => {
Ok(())
}
Err(e) => {
return Err(ErrorKind::StoreErr(e, "pipe get this block".to_owned()).into());
}
}
}
fn prev_header_store(
header: &BlockHeader,
batch: &mut store::Batch<'_>,
) -> Result<BlockHeader, Error> {
let prev = batch.get_previous_header(&header).map_err(|e| match e {
grin_store::Error::NotFoundErr(_) => ErrorKind::Orphan,
_ => ErrorKind::StoreErr(e, "check prev header".into()),
})?;
Ok(prev)
}
fn validate_header(header: &BlockHeader, ctx: &mut BlockContext<'_>) -> Result<(), Error> {
if !consensus::valid_header_version(header.height, header.version) {
error!(
"Invalid block header version received ({}), maybe update Grin?",
header.version
);
return Err(ErrorKind::InvalidBlockVersion(header.version).into());
}
if header.timestamp > Utc::now() + Duration::seconds(12 * (consensus::BLOCK_TIME_SEC as i64))
&& !global::is_automated_testing_mode()
{
return Err(ErrorKind::InvalidBlockTime.into());
}
if !ctx.opts.contains(Options::SKIP_POW) {
if !header.pow.is_primary() && !header.pow.is_secondary() {
return Err(ErrorKind::LowEdgebits.into());
}
let edge_bits = header.pow.edge_bits();
if !(ctx.pow_verifier)(header).is_ok() {
error!(
"pipe: error validating header with cuckoo edge_bits {}",
edge_bits
);
return Err(ErrorKind::InvalidPow.into());
}
}
let prev = prev_header_store(header, &mut ctx.batch)?;
if header.height != prev.height + 1 {
return Err(ErrorKind::InvalidBlockHeight.into());
}
if header.timestamp <= prev.timestamp && !global::is_automated_testing_mode() {
return Err(ErrorKind::InvalidBlockTime.into());
}
if !ctx.opts.contains(Options::SKIP_POW) {
if header.total_difficulty() <= prev.total_difficulty() {
return Err(ErrorKind::DifficultyTooLow.into());
}
let target_difficulty = header.total_difficulty() - prev.total_difficulty();
if header.pow.to_difficulty(header.height) < target_difficulty {
return Err(ErrorKind::DifficultyTooLow.into());
}
let child_batch = ctx.batch.child()?;
let diff_iter = store::DifficultyIter::from_batch(prev.hash(), child_batch);
let next_header_info = consensus::next_difficulty(header.height, diff_iter);
if target_difficulty != next_header_info.difficulty {
info!(
"validate_header: header target difficulty {} != {}",
target_difficulty.to_num(),
next_header_info.difficulty.to_num()
);
return Err(ErrorKind::WrongTotalDifficulty.into());
}
if header.pow.secondary_scaling != next_header_info.secondary_scaling {
info!(
"validate_header: header secondary scaling {} != {}",
header.pow.secondary_scaling, next_header_info.secondary_scaling
);
return Err(ErrorKind::InvalidScaling.into());
}
}
Ok(())
}
fn validate_block(block: &Block, ctx: &mut BlockContext<'_>) -> Result<(), Error> {
let prev = ctx.batch.get_previous_header(&block.header)?;
block
.validate(&prev.total_kernel_offset, ctx.verifier_cache.clone())
.map_err(|e| ErrorKind::InvalidBlockProof(e))?;
Ok(())
}
fn verify_coinbase_maturity(block: &Block, ext: &txhashset::Extension<'_>) -> Result<(), Error> {
ext.utxo_view()
.verify_coinbase_maturity(&block.inputs(), block.header.height)
}
fn verify_block_sums(b: &Block, ext: &mut txhashset::Extension<'_>) -> Result<(), Error> {
let prev = ext.batch.get_previous_header(&b.header)?;
let block_sums = ext.batch.get_block_sums(&prev.hash())?;
let overage = b.header.overage();
let offset = b.header.total_kernel_offset();
let (utxo_sum, kernel_sum) =
(block_sums, b as &dyn Committed).verify_kernel_sums(overage, offset)?;
ext.batch.save_block_sums(
&b.header.hash(),
&BlockSums {
utxo_sum,
kernel_sum,
},
)?;
Ok(())
}
fn apply_block_to_txhashset(
block: &Block,
ext: &mut txhashset::Extension<'_>,
) -> Result<(), Error> {
ext.validate_header_root(&block.header)?;
ext.apply_block(block)?;
ext.validate_roots()?;
ext.validate_sizes()?;
Ok(())
}
fn add_block(b: &Block, batch: &store::Batch<'_>) -> Result<(), Error> {
batch
.save_block(b)
.map_err(|e| ErrorKind::StoreErr(e, "pipe save block".to_owned()))?;
Ok(())
}
fn update_body_tail(bh: &BlockHeader, batch: &store::Batch<'_>) -> Result<(), Error> {
let tip = Tip::from_header(bh);
batch
.save_body_tail(&tip)
.map_err(|e| ErrorKind::StoreErr(e, "pipe save body tail".to_owned()))?;
debug!("body tail {} @ {}", bh.hash(), bh.height);
Ok(())
}
fn add_block_header(bh: &BlockHeader, batch: &store::Batch<'_>) -> Result<(), Error> {
batch
.save_block_header(bh)
.map_err(|e| ErrorKind::StoreErr(e, "pipe save header".to_owned()))?;
Ok(())
}
fn update_head(b: &Block, ctx: &BlockContext<'_>) -> Result<Option<Tip>, Error> {
let head = ctx.batch.head()?;
if has_more_work(&b.header, &head) {
let tip = Tip::from_header(&b.header);
ctx.batch
.save_body_head(&tip)
.map_err(|e| ErrorKind::StoreErr(e, "pipe save body".to_owned()))?;
debug!(
"pipe: head updated to {} at {}",
tip.last_block_h, tip.height
);
Ok(Some(tip))
} else {
Ok(None)
}
}
fn has_more_work(header: &BlockHeader, head: &Tip) -> bool {
header.total_difficulty() > head.total_difficulty
}
fn update_sync_head(bh: &BlockHeader, batch: &mut store::Batch<'_>) -> Result<(), Error> {
let tip = Tip::from_header(bh);
batch
.save_sync_head(&tip)
.map_err(|e| ErrorKind::StoreErr(e, "pipe save sync head".to_owned()))?;
debug!("sync head {} @ {}", bh.hash(), bh.height);
Ok(())
}
fn update_header_head(bh: &BlockHeader, ctx: &mut BlockContext<'_>) -> Result<Option<Tip>, Error> {
let header_head = ctx.batch.header_head()?;
if has_more_work(&bh, &header_head) {
let tip = Tip::from_header(bh);
ctx.batch
.save_header_head(&tip)
.map_err(|e| ErrorKind::StoreErr(e, "pipe save header head".to_owned()))?;
debug!(
"pipe: header_head updated to {} at {}",
tip.last_block_h, tip.height
);
Ok(Some(tip))
} else {
Ok(None)
}
}
pub fn rewind_and_apply_header_fork(
header: &BlockHeader,
ext: &mut txhashset::HeaderExtension<'_>,
) -> Result<(), Error> {
let mut fork_hashes = vec![];
let mut current = ext.batch.get_previous_header(header)?;
while current.height > 0 && !ext.is_on_current_chain(¤t).is_ok() {
fork_hashes.push(current.hash());
current = ext.batch.get_previous_header(¤t)?;
}
fork_hashes.reverse();
let forked_header = current;
ext.rewind(&forked_header)?;
for h in fork_hashes {
let header = ext
.batch
.get_block_header(&h)
.map_err(|e| ErrorKind::StoreErr(e, format!("getting forked headers")))?;
ext.apply_header(&header)?;
}
Ok(())
}
pub fn rewind_and_apply_fork(b: &Block, ext: &mut txhashset::Extension<'_>) -> Result<(), Error> {
let mut fork_hashes = vec![];
let mut current = ext.batch.get_previous_header(&b.header)?;
while current.height > 0 && !ext.is_on_current_chain(¤t).is_ok() {
fork_hashes.push(current.hash());
current = ext.batch.get_previous_header(¤t)?;
}
fork_hashes.reverse();
let forked_header = current;
ext.rewind(&forked_header)?;
for h in fork_hashes {
let fb = ext
.batch
.get_block(&h)
.map_err(|e| ErrorKind::StoreErr(e, format!("getting forked blocks")))?;
verify_coinbase_maturity(&fb, ext)?;
validate_utxo(&fb, ext)?;
verify_block_sums(&fb, ext)?;
apply_block_to_txhashset(&fb, ext)?;
}
Ok(())
}
fn validate_utxo(block: &Block, ext: &txhashset::Extension<'_>) -> Result<(), Error> {
ext.utxo_view().validate_block(block)
}