use std::{
fmt, str, io,
sync::Arc,
collections::HashMap,
};
use vapory_types::H256;
use tetsy_hash_db::HashDB;
use tetsy_keccak_hasher::KeccakHasher;
use tetsy_kvdb::{self, DBTransaction, DBValue};
use tetsy_bytes::Bytes;
mod archivedb;
mod earlymergedb;
mod overlayrecentdb;
mod refcounteddb;
mod util;
mod as_hash_db_impls;
mod overlaydb;
pub trait JournalDB: HashDB<KeccakHasher, DBValue> {
fn boxed_clone(&self) -> Box<dyn JournalDB>;
fn mem_used(&self) -> usize;
fn journal_size(&self) -> usize { 0 }
fn is_empty(&self) -> bool;
fn earliest_era(&self) -> Option<u64> { None }
fn latest_era(&self) -> Option<u64>;
fn journal_under(&mut self, batch: &mut DBTransaction, now: u64, id: &H256) -> io::Result<u32>;
fn mark_canonical(&mut self, batch: &mut DBTransaction, era: u64, id: &H256) -> io::Result<u32>;
fn inject(&mut self, batch: &mut DBTransaction) -> io::Result<u32>;
fn state(&self, _id: &H256) -> Option<Bytes>;
fn is_prunable(&self) -> bool { true }
fn backing(&self) -> &Arc<dyn tetsy_kvdb::KeyValueDB>;
fn flush(&self) {}
fn consolidate(&mut self, overlay: MemoryDB);
fn keys(&self) -> HashMap<H256, i32>;
}
type MemoryDB = tetsy_memory_db::MemoryDB<
tetsy_keccak_hasher::KeccakHasher,
tetsy_memory_db::HashKey<tetsy_keccak_hasher::KeccakHasher>,
tetsy_kvdb::DBValue,
>;
#[derive(Debug, PartialEq, Clone, Copy)]
pub enum Algorithm {
Archive,
EarlyMerge,
OverlayRecent,
RefCounted,
}
impl str::FromStr for Algorithm {
type Err = String;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"archive" => Ok(Algorithm::Archive),
"light" => Ok(Algorithm::EarlyMerge),
"fast" => Ok(Algorithm::OverlayRecent),
"basic" => Ok(Algorithm::RefCounted),
e => Err(format!("Invalid algorithm: {}", e)),
}
}
}
impl Algorithm {
pub fn as_str(&self) -> &'static str {
match *self {
Algorithm::Archive => "archive",
Algorithm::EarlyMerge => "light",
Algorithm::OverlayRecent => "fast",
Algorithm::RefCounted => "basic",
}
}
pub fn as_internal_name_str(&self) -> &'static str {
match *self {
Algorithm::Archive => "archive",
Algorithm::EarlyMerge => "earlymerge",
Algorithm::OverlayRecent => "overlayrecent",
Algorithm::RefCounted => "refcounted",
}
}
pub fn is_stable(&self) -> bool {
match *self {
Algorithm::Archive | Algorithm::OverlayRecent => true,
_ => false,
}
}
pub fn all_types() -> Vec<Algorithm> {
vec![Algorithm::Archive, Algorithm::EarlyMerge, Algorithm::OverlayRecent, Algorithm::RefCounted]
}
}
impl fmt::Display for Algorithm {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.as_str())
}
}
pub fn new(backing: Arc<dyn (::tetsy_kvdb::KeyValueDB)>, algorithm: Algorithm, col: u32) -> Box<dyn JournalDB> {
match algorithm {
Algorithm::Archive => Box::new(archivedb::ArchiveDB::new(backing, col)),
Algorithm::EarlyMerge => Box::new(earlymergedb::EarlyMergeDB::new(backing, col)),
Algorithm::OverlayRecent => Box::new(overlayrecentdb::OverlayRecentDB::new(backing, col)),
Algorithm::RefCounted => Box::new(refcounteddb::RefCountedDB::new(backing, col)),
}
}
const DB_PREFIX_LEN : usize = ::tetsy_kvdb::PREFIX_LEN;
const LATEST_ERA_KEY : [u8; ::tetsy_kvdb::PREFIX_LEN] = [ b'l', b'a', b's', b't', 0, 0, 0, 0, 0, 0, 0, 0 ];
fn error_key_already_exists(hash: &vapory_types::H256) -> io::Error {
io::Error::new(io::ErrorKind::AlreadyExists, hash.to_string())
}
fn error_negatively_reference_hash(hash: &vapory_types::H256) -> io::Error {
io::Error::new(io::ErrorKind::Other, format!("Entry {} removed from database more times than it was added.", hash))
}
pub fn new_tetsy_memory_db() -> MemoryDB {
MemoryDB::from_null_node(&tetsy_rlp::NULL_RLP, tetsy_rlp::NULL_RLP.as_ref().into())
}
#[cfg(test)]
pub fn inject_batch(jdb: &mut dyn JournalDB) -> io::Result<u32> {
let mut batch = jdb.backing().transaction();
let res = jdb.inject(&mut batch)?;
jdb.backing().write(batch).map(|_| res).map_err(Into::into)
}
#[cfg(test)]
fn commit_batch(jdb: &mut dyn JournalDB, now: u64, id: &H256, end: Option<(u64, H256)>) -> io::Result<u32> {
let mut batch = jdb.backing().transaction();
let mut ops = jdb.journal_under(&mut batch, now, id)?;
if let Some((end_era, canon_id)) = end {
ops += jdb.mark_canonical(&mut batch, end_era, &canon_id)?;
}
let result = jdb.backing().write(batch).map(|_| ops).map_err(Into::into);
jdb.flush();
result
}
#[cfg(test)]
mod tests {
use super::Algorithm;
#[test]
fn test_journal_algorithm_parsing() {
assert_eq!(Algorithm::Archive, "archive".parse().unwrap());
assert_eq!(Algorithm::EarlyMerge, "light".parse().unwrap());
assert_eq!(Algorithm::OverlayRecent, "fast".parse().unwrap());
assert_eq!(Algorithm::RefCounted, "basic".parse().unwrap());
}
#[test]
fn test_journal_algorithm_printing() {
assert_eq!(Algorithm::Archive.to_string(), "archive".to_owned());
assert_eq!(Algorithm::EarlyMerge.to_string(), "light".to_owned());
assert_eq!(Algorithm::OverlayRecent.to_string(), "fast".to_owned());
assert_eq!(Algorithm::RefCounted.to_string(), "basic".to_owned());
}
#[test]
fn test_journal_algorithm_is_stable() {
assert!(Algorithm::Archive.is_stable());
assert!(Algorithm::OverlayRecent.is_stable());
assert!(!Algorithm::EarlyMerge.is_stable());
assert!(!Algorithm::RefCounted.is_stable());
}
#[test]
fn test_journal_algorithm_all_types() {
let mut archive = 0;
let mut earlymerge = 0;
let mut overlayrecent = 0;
let mut refcounted = 0;
for a in &Algorithm::all_types() {
match *a {
Algorithm::Archive => archive += 1,
Algorithm::EarlyMerge => earlymerge += 1,
Algorithm::OverlayRecent => overlayrecent += 1,
Algorithm::RefCounted => refcounted += 1,
}
}
assert_eq!(archive, 1);
assert_eq!(earlymerge, 1);
assert_eq!(overlayrecent, 1);
assert_eq!(refcounted, 1);
}
}