pub mod backend;
mod cache;
mod error;
pub mod migration;
#[cfg(feature = "postgres")]
mod postgres;
#[cfg(feature = "sqlite")]
mod sqlite;
pub mod store;
use std::cmp::Reverse;
use std::collections::{HashMap, HashSet};
use sha2::{Digest, Sha512};
use crate::error::InternalError;
use crate::state::error::StateWriteError;
#[cfg(feature = "state-in-transaction")]
use crate::state::State;
use crate::state::StateChange;
use super::node::Node;
use backend::Backend;
pub use error::SqlMerkleStateBuildError;
use store::{MerkleRadixStore, TreeUpdate};
const TOKEN_SIZE: usize = 2;
#[derive(Default)]
pub struct SqlMerkleStateBuilder<B: Backend> {
backend: Option<B>,
tree_name: Option<String>,
create_tree: bool,
min_cached_data_size: Option<usize>,
cache_size: Option<u16>,
}
impl<B: Backend> SqlMerkleStateBuilder<B> {
pub fn new() -> Self {
Self {
backend: None,
tree_name: None,
create_tree: false,
min_cached_data_size: None,
cache_size: None,
}
}
pub fn with_backend(mut self, backend: B) -> Self {
self.backend = Some(backend);
self
}
pub fn with_tree<S: Into<String>>(mut self, tree_name: S) -> Self {
self.tree_name = Some(tree_name.into());
self
}
pub fn create_tree_if_necessary(mut self) -> Self {
self.create_tree = true;
self
}
pub fn with_min_cached_data_size(mut self, size: usize) -> Self {
self.min_cached_data_size = Some(size);
self
}
pub fn with_cache_size(mut self, size: u16) -> Self {
self.cache_size = Some(size);
self
}
}
pub struct SqlMerkleState<B: Backend> {
backend: B,
tree_id: i64,
cache: cache::DataCache,
}
impl<B: Backend> SqlMerkleState<B> {
pub fn initial_state_root_hash(&self) -> Result<String, InternalError> {
let (hash, _) = encode_and_hash(Node::default())?;
Ok(hex::encode(hash))
}
}
impl<B> Clone for SqlMerkleState<B>
where
B: Backend + Clone,
{
fn clone(&self) -> Self {
Self {
backend: self.backend.clone(),
tree_id: self.tree_id,
cache: self.cache.clone(),
}
}
}
#[cfg(feature = "state-in-transaction")]
impl<B: Backend> State for SqlMerkleState<B> {
type StateId = String;
type Key = String;
type Value = Vec<u8>;
}
struct MerkleRadixOverlay<'s, S> {
tree_id: i64,
state_root_hash: &'s str,
inner: S,
}
impl<'s, S> MerkleRadixOverlay<'s, S>
where
S: MerkleRadixStore,
{
fn new(tree_id: i64, state_root_hash: &'s str, inner: S) -> Self {
Self {
tree_id,
state_root_hash,
inner,
}
}
fn write_updates(
&self,
new_state_root: &str,
tree_update: TreeUpdate,
) -> Result<(), InternalError> {
if tree_update.node_changes.is_empty() {
return Ok(());
}
self.inner.write_changes(
self.tree_id,
new_state_root,
self.state_root_hash,
tree_update,
)?;
Ok(())
}
fn has_root(&self) -> Result<bool, InternalError> {
self.inner.has_root(self.tree_id, self.state_root_hash)
}
fn get_entries(&self, keys: &[String]) -> Result<HashMap<String, Vec<u8>>, InternalError> {
let keys = keys.iter().map(|k| &**k).collect::<Vec<_>>();
self.inner
.get_entries(self.tree_id, self.state_root_hash, keys)
.map(|result| result.into_iter().collect::<HashMap<_, _>>())
}
fn generate_updates(
&self,
state_changes: &[StateChange],
) -> Result<(String, TreeUpdate), StateWriteError> {
if state_changes.is_empty() {
return Ok((self.state_root_hash.to_string(), TreeUpdate::default()));
}
let mut path_map = HashMap::new();
let mut deletions = HashSet::new();
let mut additions = HashSet::new();
let mut delete_items = vec![];
for state_change in state_changes {
match state_change {
StateChange::Set { key, value } => {
let mut set_path_map = self
.get_path(key)
.map_err(|e| StateWriteError::StorageError(Box::new(e)))?;
{
let node = set_path_map
.get_mut(key)
.expect("Path map not correctly generated");
node.value = Some(value.to_vec());
}
for pkey in set_path_map.keys() {
additions.insert(pkey.clone());
}
path_map.extend(set_path_map);
}
StateChange::Delete { key } => {
let del_path_map = self
.get_path(key)
.map_err(|e| StateWriteError::StorageError(Box::new(e)))?;
path_map.extend(del_path_map);
delete_items.push(key);
}
}
}
for del_address in delete_items.iter() {
path_map.remove(*del_address);
let (mut parent_address, mut path_branch) = parent_and_branch(del_address);
while !parent_address.is_empty() {
let remove_parent = {
let parent_node = path_map
.get_mut(parent_address)
.expect("Path map not correctly generated or entry is deleted");
if let Some(old_hash_key) = parent_node.children.remove(path_branch) {
deletions.insert(old_hash_key);
}
parent_node.children.is_empty()
};
if remove_parent && !additions.contains(parent_address) {
path_map.remove(parent_address);
} else {
break;
}
let (next_parent, next_branch) = parent_and_branch(parent_address);
parent_address = next_parent;
path_branch = next_branch;
if parent_address.is_empty() {
let parent_node = path_map
.get_mut(parent_address)
.expect("Path map not correctly generated");
if let Some(old_hash_key) = parent_node.children.remove(path_branch) {
deletions.insert(old_hash_key);
}
}
}
}
let mut sorted_paths: Vec<_> = path_map.keys().cloned().collect();
sorted_paths.sort_by_key(|a| Reverse(a.len()));
let mut key_hash_hex = String::new();
let mut batch = Vec::with_capacity(sorted_paths.len());
for path in sorted_paths {
let node = path_map
.remove(&path)
.expect("Path map keys are out of sink");
let (hash_key, _) = encode_and_hash(node.clone())
.map_err(|e| StateWriteError::StorageError(Box::new(e)))?;
key_hash_hex = hex::encode(&hash_key);
if !path.is_empty() {
let (parent_address, path_branch) = parent_and_branch(&path);
let parent = path_map
.get_mut(parent_address)
.expect("Path map not correctly generated");
if let Some(old_hash_key) = parent
.children
.insert(path_branch.to_string(), key_hash_hex.clone())
{
deletions.insert(old_hash_key);
}
}
batch.push((key_hash_hex.clone(), node, path));
}
deletions.insert(self.state_root_hash.to_string());
Ok((
key_hash_hex,
TreeUpdate {
node_changes: batch,
deletions,
},
))
}
fn get_path(&self, address: &str) -> Result<HashMap<String, Node>, InternalError> {
let addresses_along_path = (0..address.len())
.step_by(2)
.map(|i| address[0..i].to_string())
.chain(std::iter::once(address.to_string()));
let node_path_iter = self
.inner
.get_path(self.tree_id, self.state_root_hash, address)?
.into_iter()
.map(|(_, node)| node)
.chain(std::iter::repeat(Node::default()));
Ok(addresses_along_path
.zip(node_path_iter)
.collect::<HashMap<_, _>>())
}
}
struct MerkleRadixPruner<S> {
tree_id: i64,
inner: S,
}
impl<S> MerkleRadixPruner<S>
where
S: MerkleRadixStore,
{
fn new(tree_id: i64, inner: S) -> Self {
Self { tree_id, inner }
}
fn prune(&self, state_ids: &[String]) -> Result<Vec<String>, InternalError> {
let mut removed_hashes = vec![];
for state_id in state_ids {
let pruned = self.inner.prune(self.tree_id, state_id)?;
removed_hashes.extend(pruned.into_iter());
}
Ok(removed_hashes)
}
}
fn parent_and_branch(path: &str) -> (&str, &str) {
let parent_address = if !path.is_empty() {
&path[..path.len() - TOKEN_SIZE]
} else {
""
};
let path_branch = if !path.is_empty() {
&path[(path.len() - TOKEN_SIZE)..]
} else {
""
};
(parent_address, path_branch)
}
fn encode_and_hash(node: Node) -> Result<(Vec<u8>, Vec<u8>), InternalError> {
let packed = node.into_bytes()?;
let hash = hash(&packed);
Ok((hash, packed))
}
fn hash(input: &[u8]) -> Vec<u8> {
let mut hasher = Sha512::new();
hasher.update(input);
let bytes = hasher.finalize().to_vec();
let (hash, _rest) = bytes.split_at(bytes.len() / 2);
hash.to_vec()
}