use crate as storage;
use crate::arena::{ArenaHash, ArenaKey, Sp};
use crate::db::{DB, InMemoryDB};
use crate::merkle_patricia_trie::Annotation;
use crate::merkle_patricia_trie::MerklePatriciaTrie;
use crate::merkle_patricia_trie::Semigroup;
use crate::storable::{Loader, SizeAnn};
use crate::{DefaultDB, Storable};
use base_crypto::time::Timestamp;
use crypto::digest::Digest;
use derive_where::derive_where;
#[cfg(feature = "proptest")]
use proptest::arbitrary::Arbitrary;
use rand::distributions::{Distribution, Standard};
#[cfg(feature = "proptest")]
use serialize::NoStrategy;
use serialize::{Deserializable, Serializable, Tagged, tag_enforcement_test};
use sha2::Sha256;
use std::borrow::Borrow;
use std::fmt::{Debug, Formatter};
use std::hash::Hash;
use std::marker::PhantomData;
use std::ops::Deref;
use std::sync::Arc;
pub use storage_core::storage::*;
pub type InMemoryStorage = Storage<InMemoryDB<Sha256>>;
#[derive(Storable)]
#[derive_where(Clone, Eq, PartialEq; V, A)]
#[storable(db = D, invariant = HashMap::invariant)]
pub struct HashMap<
K: Serializable + Storable<D>,
V: Storable<D>,
D: DB = DefaultDB,
A: Storable<D> + Annotation<(Sp<K, D>, Sp<V, D>)> = SizeAnn,
>(
#[cfg(feature = "public-internal-structure")]
#[allow(clippy::type_complexity)]
pub Map<ArenaHash<D::Hasher>, (Sp<K, D>, Sp<V, D>), D, A>,
#[cfg(not(feature = "public-internal-structure"))]
#[allow(clippy::type_complexity)]
Map<ArenaHash<D::Hasher>, (Sp<K, D>, Sp<V, D>), D, A>,
);
impl<
K: Serializable + Storable<D> + Tagged,
V: Storable<D> + Tagged,
D: DB,
A: Storable<D> + Annotation<(Sp<K, D>, Sp<V, D>)> + Tagged,
> Tagged for HashMap<K, V, D, A>
{
fn tag() -> std::borrow::Cow<'static, str> {
format!("hash-map({},{},{})", K::tag(), V::tag(), A::tag()).into()
}
fn tag_unique_factor() -> String {
<Map<ArenaHash<D::Hasher>, (Sp<K, D>, Sp<V, D>), D, A>>::tag_unique_factor()
}
}
tag_enforcement_test!(HashMap<(), ()>);
impl<
K: Serializable + Storable<D>,
V: Storable<D> + PartialEq,
D: DB,
A: Storable<D> + Annotation<(Sp<K, D>, Sp<V, D>)>,
> Hash for HashMap<K, V, D, A>
{
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
self.0.hash(state);
}
}
impl<
K: Serializable + Storable<D> + PartialOrd,
V: Storable<D> + PartialOrd,
D: DB,
A: Storable<D> + PartialOrd + Annotation<(Sp<K, D>, Sp<V, D>)>,
> PartialOrd for HashMap<K, V, D, A>
{
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
self.0.partial_cmp(&other.0)
}
}
impl<
K: Serializable + Storable<D> + Ord,
V: Storable<D> + Ord,
D: DB,
A: Storable<D> + Ord + Annotation<(Sp<K, D>, Sp<V, D>)>,
> Ord for HashMap<K, V, D, A>
{
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
self.0.cmp(&other.0)
}
}
impl<
K: Debug + Serializable + Storable<D>,
V: Debug + Storable<D>,
D: DB,
A: Storable<D> + Annotation<(Sp<K, D>, Sp<V, D>)>,
> Debug for HashMap<K, V, D, A>
{
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
f.debug_map()
.entries(self.iter().map(|kv| (kv.0.clone(), kv.1.clone())))
.finish()
}
}
#[cfg(feature = "proptest")]
impl<
K: Storable<D> + Debug + Serializable,
V: Storable<D> + Debug,
D: DB,
A: Storable<D> + Annotation<(Sp<K, D>, Sp<V, D>)>,
> Arbitrary for HashMap<K, V, D, A>
where
Standard: Distribution<V> + Distribution<K>,
{
type Strategy = NoStrategy<HashMap<K, V, D, A>>;
type Parameters = ();
fn arbitrary_with(_args: Self::Parameters) -> Self::Strategy {
NoStrategy(PhantomData)
}
}
impl<
D: DB,
K: Serializable + Storable<D>,
V: Storable<D>,
A: Storable<D> + Annotation<(Sp<K, D>, Sp<V, D>)>,
> Distribution<HashMap<K, V, D, A>> for Standard
where
Standard: Distribution<V> + Distribution<K>,
{
fn sample<R: rand::Rng + ?Sized>(&self, rng: &mut R) -> HashMap<K, V, D, A> {
let mut map = HashMap::new();
let size: usize = rng.gen_range(0..8);
for _ in 0..size {
map = map.insert(rng.r#gen(), rng.r#gen())
}
map
}
}
impl<
K: serde::Serialize + Serializable + Storable<D>,
V: serde::Serialize + Storable<D>,
D: DB,
A: Storable<D> + Annotation<(Sp<K, D>, Sp<V, D>)>,
> serde::Serialize for HashMap<K, V, D, A>
{
fn serialize<S: serde::Serializer>(&self, ser: S) -> Result<S::Ok, S::Error> {
ser.collect_map(
self.iter()
.map(|kv| (kv.0.deref().clone(), kv.1.deref().clone())),
)
}
}
struct HashMapVisitor<K, V, D, A>(PhantomData<(K, V, D, A)>);
impl<
'de,
K: serde::Deserialize<'de> + Serializable + Storable<D>,
V: serde::Deserialize<'de> + Storable<D>,
D: DB,
A: Storable<D> + Annotation<(Sp<K, D>, Sp<V, D>)>,
> serde::de::Visitor<'de> for HashMapVisitor<K, V, D, A>
{
type Value = HashMap<K, V, D, A>;
fn expecting(&self, formatter: &mut Formatter) -> std::fmt::Result {
write!(formatter, "a hashmap")
}
fn visit_map<ACC: serde::de::MapAccess<'de>>(
self,
mut seq: ACC,
) -> Result<HashMap<K, V, D, A>, ACC::Error> {
std::iter::from_fn(|| seq.next_entry::<K, V>().transpose()).collect()
}
}
impl<
'de,
K: serde::Deserialize<'de> + Serializable + Storable<D1>,
V: serde::Deserialize<'de> + Storable<D1>,
D1: DB,
A: Storable<D1> + Annotation<(Sp<K, D1>, Sp<V, D1>)>,
> serde::Deserialize<'de> for HashMap<K, V, D1, A>
{
fn deserialize<D: serde::Deserializer<'de>>(de: D) -> Result<Self, D::Error> {
de.deserialize_map(HashMapVisitor(PhantomData))
}
}
impl<
K: Serializable + Storable<D>,
V: Storable<D>,
D: DB,
A: Storable<D> + Annotation<(Sp<K, D>, Sp<V, D>)>,
> Default for HashMap<K, V, D, A>
{
fn default() -> Self {
Self::new()
}
}
impl<
K: Serializable + Storable<D>,
V: Storable<D>,
D: DB,
A: Storable<D> + Annotation<(Sp<K, D>, Sp<V, D>)>,
> HashMap<K, V, D, A>
{
pub fn new() -> Self {
Self(Map::new())
}
fn gen_key(key: &K) -> ArenaHash<D::Hasher> {
let mut hasher = D::Hasher::default();
let mut bytes: std::vec::Vec<u8> = std::vec::Vec::new();
K::serialize(key, &mut bytes).expect("HashMap key should be serializable");
hasher.update(bytes);
ArenaHash(hasher.finalize())
}
#[must_use]
pub fn insert(&self, key: K, value: V) -> Self {
HashMap(self.0.insert(
Self::gen_key(&key),
(
self.0.mpt.0.arena.alloc(key),
self.0.mpt.0.arena.alloc(value),
),
))
}
fn invariant(&self) -> Result<(), std::io::Error> {
for (hash, v) in self.0.iter() {
let key = &*v.0;
let hash2 = Self::gen_key(key);
if hash != hash2 {
return Err(std::io::Error::new(
std::io::ErrorKind::InvalidData,
"hashmap key doesn't match serialized hash",
));
}
}
Ok(())
}
pub fn get(&self, key: &K) -> Option<Sp<V, D>> {
self.0.get(&Self::gen_key(key)).map(|(_, v)| v.clone())
}
#[must_use]
pub fn remove(&self, key: &K) -> Self {
HashMap(self.0.remove(&Self::gen_key(key)))
}
pub fn contains_key(&self, key: &K) -> bool {
self.0.contains_key(&Self::gen_key(key))
}
pub fn into_inner_for_drop(self) -> impl Iterator<Item = (Option<K>, Option<V>)> {
self.0.into_inner_for_drop().filter_map(|(k, v)| {
let (k, v) = (Sp::into_inner(k), Sp::into_inner(v));
if k.is_none() && v.is_none() {
None
} else {
Some((k, v))
}
})
}
#[allow(clippy::type_complexity)]
pub fn iter(
&self,
) -> impl Iterator<Item = Sp<(Sp<K, D>, Sp<V, D>), D>> + use<'_, K, V, D, A> + '_ {
self.0.iter().map(|(_, v)| v)
}
pub fn size(&self) -> usize {
self.0.size()
}
pub fn is_empty(&self) -> bool {
self.0.is_empty()
}
pub fn keys(&self) -> impl Iterator<Item = K> + use<'_, K, V, D, A> + '_ {
self.iter().map(|x| x.0.deref().clone())
}
pub fn values(&self) -> impl Iterator<Item = V> + use<'_, K, V, D, A> + '_ {
self.iter().map(|x| x.1.deref().clone())
}
pub fn ann(&self) -> A {
self.0.ann()
}
}
impl<K, V, D, A> FromIterator<(K, V)> for HashMap<K, V, D, A>
where
K: Serializable + Storable<D>,
V: Storable<D>,
D: DB,
A: Storable<D> + Annotation<(Sp<K, D>, Sp<V, D>)>,
{
fn from_iter<T: IntoIterator<Item = (K, V)>>(iter: T) -> Self {
iter.into_iter()
.fold(HashMap::new(), |map, (k, v)| map.insert(k, v))
}
}
impl<
K: Serializable + Deserializable + Storable<D>,
V: Storable<D>,
D: DB,
A: Storable<D> + Annotation<(Sp<K, D>, Sp<V, D>)> + Annotation<V>,
> From<Map<K, V, D, A>> for HashMap<K, V, D, A>
{
fn from(value: Map<K, V, D, A>) -> Self {
let mut hashmap = HashMap::new();
for (k, v) in value.iter() {
hashmap = hashmap.insert(k, v.deref().clone());
}
hashmap
}
}
pub struct HashMapIntoIter<K, V, D: DB>
where
K: 'static,
V: 'static,
{
#[allow(clippy::type_complexity)]
inner: std::vec::IntoIter<(ArenaHash<D::Hasher>, (Sp<K, D>, Sp<V, D>))>,
}
impl<K, V, D> Iterator for HashMapIntoIter<K, V, D>
where
K: Serializable + Storable<D> + Clone + 'static,
V: Storable<D> + Clone + 'static,
D: DB,
{
type Item = (K, V);
fn next(&mut self) -> Option<Self::Item> {
self.inner
.next()
.map(|(_arena_key, (sp_key, sp_val))| ((*sp_key).clone(), (*sp_val).clone()))
}
}
impl<K, V, D> IntoIterator for HashMap<K, V, D>
where
K: Serializable + Storable<D> + Clone + 'static,
V: Storable<D> + Clone + 'static,
D: DB,
{
type Item = (K, V);
type IntoIter = HashMapIntoIter<K, V, D>;
fn into_iter(self) -> Self::IntoIter {
HashMapIntoIter {
inner: self.0.into_iter(),
}
}
}
#[derive(Storable)]
#[derive_where(Clone, Eq, PartialEq, PartialOrd, Ord, Hash; V, A)]
#[storable(db = D)]
#[tag = "hash-set"]
pub struct HashSet<
V: Storable<D> + Serializable,
D: DB = DefaultDB,
A: Storable<D> + Annotation<(Sp<V, D>, Sp<(), D>)> = SizeAnn,
>(pub HashMap<V, (), D, A>);
tag_enforcement_test!(HashSet<()>);
impl<
V: serde::Serialize + Serializable + Storable<D>,
D: DB,
A: Storable<D> + Annotation<(Sp<V, D>, Sp<(), D>)>,
> serde::Serialize for HashSet<V, D, A>
{
fn serialize<S: serde::Serializer>(&self, ser: S) -> Result<S::Ok, S::Error> {
ser.collect_seq(self.iter().map(|v| (**v).clone()))
}
}
impl<V: Storable<D> + Serializable, D: DB, A: Storable<D> + Annotation<(Sp<V, D>, Sp<(), D>)>>
HashSet<V, D, A>
{
pub fn new() -> Self {
Self(HashMap::new())
}
#[must_use]
pub fn insert(&self, value: V) -> Self {
HashSet(self.0.insert(value, ()))
}
#[must_use]
pub fn remove(&self, value: &V) -> Self {
HashSet(self.0.remove(value))
}
pub fn member(&self, value: &V) -> bool {
self.0.contains_key(value)
}
pub fn is_subset(&self, other: &HashSet<V, D, A>) -> bool {
self.iter().all(|x| other.member(&x))
}
pub fn union(&self, other: &HashSet<V, D, A>) -> HashSet<V, D, A>
where
V: Clone,
{
other
.iter()
.fold(self.clone(), |acc, x| acc.insert(x.deref().deref().clone()))
}
pub fn iter(&self) -> impl Iterator<Item = Arc<Sp<V, D>>> + '_
where
V: Clone,
{
self.0.iter().map(|v| Arc::new(v.0.clone()))
}
pub fn size(&self) -> usize {
self.0.size()
}
pub fn is_empty(&self) -> bool {
self.0.is_empty()
}
pub fn ann(&self) -> A {
self.0.ann()
}
}
impl<V: Storable<D> + Serializable, D: DB, A: Storable<D> + Annotation<(Sp<V, D>, Sp<(), D>)>>
Default for HashSet<V, D, A>
{
fn default() -> Self {
Self::new()
}
}
impl<
V: Debug + Storable<D> + Serializable,
D: DB,
A: Storable<D> + Annotation<(Sp<V, D>, Sp<(), D>)>,
> Debug for HashSet<V, D, A>
{
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
self.0.fmt(f)
}
}
#[cfg(feature = "proptest")]
impl<
V: Storable<D> + Serializable + Debug,
D: DB,
A: Storable<D> + Annotation<(Sp<V, D>, Sp<(), D>)>,
> Arbitrary for HashSet<V, D, A>
where
Standard: Distribution<V>,
{
type Strategy = NoStrategy<HashSet<V, D, A>>;
type Parameters = ();
fn arbitrary_with(_args: Self::Parameters) -> Self::Strategy {
NoStrategy(PhantomData)
}
}
impl<V: Storable<D> + Serializable, D: DB, A: Storable<D> + Annotation<(Sp<V, D>, Sp<(), D>)>>
Distribution<HashSet<V, D, A>> for Standard
where
Standard: Distribution<V>,
{
fn sample<R: rand::Rng + ?Sized>(&self, rng: &mut R) -> HashSet<V, D, A> {
let mut set = HashSet::new();
let size: usize = rng.gen_range(0..8);
for _ in 0..size {
set = set.insert(rng.r#gen())
}
set
}
}
impl<V, D, A> FromIterator<V> for HashSet<V, D, A>
where
V: Storable<D> + Serializable,
D: DB,
A: Storable<D> + Annotation<(Sp<V, D>, Sp<(), D>)>,
{
fn from_iter<T: IntoIterator<Item = V>>(iter: T) -> Self {
iter.into_iter()
.fold(HashSet::new(), |set, item| set.insert(item))
}
}
#[derive_where(Clone; V)]
#[derive(Storable)]
#[storable(db = D, invariant = Array::invariant)]
#[tag = "mpt-array[v1]"]
pub struct Array<V: Storable<D>, D: DB = DefaultDB>(
#[cfg(feature = "public-internal-structure")]
#[storable(child)]
pub Sp<MerklePatriciaTrie<V, D>, D>,
#[cfg(not(feature = "public-internal-structure"))]
#[storable(child)]
Sp<MerklePatriciaTrie<V, D>, D>,
);
tag_enforcement_test!(Array<()>);
impl<V: Storable<D> + Debug, D: DB> Debug for Array<V, D> {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
f.debug_list().entries(self.iter()).finish()
}
}
impl<V: Storable<D>, D: DB> From<&std::vec::Vec<V>> for Array<V, D> {
fn from(value: &std::vec::Vec<V>) -> Self {
value.clone().into()
}
}
impl<const N: usize, V: Storable<D>, D: DB> From<[V; N]> for Array<V, D> {
fn from(value: [V; N]) -> Self {
Array::from_iter(value)
}
}
impl<V: Storable<D>, D: DB> From<std::vec::Vec<V>> for Array<V, D> {
fn from(value: std::vec::Vec<V>) -> Self {
Array::from_iter(value)
}
}
impl<V: Storable<D>, D: DB> From<&Array<V, D>> for std::vec::Vec<V> {
fn from(value: &Array<V, D>) -> Self {
value.iter().map(|x| (*x).clone()).collect()
}
}
impl<V: Storable<D>, D: DB> From<Array<V, D>> for std::vec::Vec<V> {
fn from(value: Array<V, D>) -> Self {
(&value).into()
}
}
impl<V: Storable<D>, D: DB> std::iter::FromIterator<V> for Array<V, D> {
fn from_iter<I: IntoIterator<Item = V>>(iter: I) -> Self {
let mut arr = Array::new();
for item in iter {
arr = arr.push(item);
}
arr
}
}
impl<V: Storable<D>, D: DB> Distribution<Array<V, D>> for Standard
where
Standard: Distribution<V>,
{
fn sample<R: rand::Rng + ?Sized>(&self, rng: &mut R) -> Array<V, D> {
let mut array = Array::new();
let len = rng.r#gen::<u8>();
for _ in 0..len {
array = array.push(rng.r#gen())
}
array
}
}
#[cfg(feature = "proptest")]
impl<V: Debug + Storable<D>, D: DB> Arbitrary for Array<V, D>
where
Standard: Distribution<V>,
{
type Strategy = NoStrategy<Array<V, D>>;
type Parameters = ();
fn arbitrary_with(_args: Self::Parameters) -> Self::Strategy {
NoStrategy(PhantomData)
}
}
impl<V: Storable<D> + PartialEq, D: DB> PartialEq for Array<V, D> {
fn eq(&self, other: &Self) -> bool {
self.0.eq(&other.0)
}
}
impl<V: Storable<D> + Eq, D: DB> Eq for Array<V, D> {}
impl<V: Storable<D> + PartialOrd, D: DB> PartialOrd for Array<V, D> {
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
self.0.partial_cmp(&other.0)
}
}
impl<V: Storable<D> + Ord, D: DB> Ord for Array<V, D> {
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
self.0.cmp(&other.0)
}
}
impl<V: Storable<D>, D: DB> Hash for Array<V, D> {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
self.0.deref().hash(state);
}
}
impl<V: Storable<D>, D: DB> Default for Array<V, D> {
fn default() -> Self {
Self::new()
}
}
impl<V: Storable<D>, D: DB> Array<V, D> {
fn index_to_nibbles(i: usize) -> Vec<u8> {
let nibbles = to_nibbles(&BigEndianU64(i as u64));
nibbles.into_iter().skip_while(|x| *x == 0).collect()
}
fn nibbles_to_index(raw_nibbles: &[u8]) -> Result<u64, std::io::Error> {
if raw_nibbles.first() == Some(&0) {
return Err(std::io::Error::new(
std::io::ErrorKind::InvalidData,
"nibbles in array should not have leading zeroes",
));
}
let mut nibbles = [0u8; 16];
if raw_nibbles.len() > 16 {
return Err(std::io::Error::new(
std::io::ErrorKind::InvalidData,
"too long key for index in array",
));
}
nibbles[(16 - raw_nibbles.len())..].copy_from_slice(raw_nibbles);
let val: BigEndianU64 = from_nibbles(&nibbles)?;
Ok(val.0)
}
fn invariant(&self) -> Result<(), std::io::Error> {
let len = self.len() as u64;
self.0
.iter()
.map(|(k, _)| Self::nibbles_to_index(&k))
.try_for_each(|n| {
if n? >= len {
Err(std::io::Error::new(
std::io::ErrorKind::InvalidData,
"index out of range for array on deserialization",
))
} else {
Ok(())
}
})
}
pub fn new() -> Self {
Array(Sp::new(MerklePatriciaTrie::new()))
}
pub fn new_from_slice(values: &[V]) -> Self {
let mut array = Array::<V, D>::new();
for v in values.iter() {
array = array.push(v.clone());
}
array
}
pub fn len(&self) -> usize {
self.0.deref().clone().size()
}
pub fn get(&self, index: usize) -> Option<&V> {
self.0.lookup(&Self::index_to_nibbles(index))
}
#[must_use]
pub fn insert(&self, index: usize, value: V) -> Option<Self> {
if index >= self.len() {
return None; }
Some(Array(Sp::new(
self.0.insert(&Self::index_to_nibbles(index), value),
)))
}
#[must_use]
pub fn push(&self, value: V) -> Self {
let index = self.len();
Self(Sp::new(
self.0.insert(&Self::index_to_nibbles(index), value),
))
}
pub fn into_inner_for_drop(self) -> impl Iterator<Item = V> {
Sp::into_inner(self.0)
.into_iter()
.flat_map(MerklePatriciaTrie::into_inner_for_drop)
}
pub fn iter(&self) -> ArrayIter<'_, V, D> {
ArrayIter::new(self)
}
pub fn iter_deref(&self) -> impl Iterator<Item = &V> {
(0..self.len()).filter_map(|i| self.get(i))
}
pub fn is_empty(&self) -> bool {
self.0.is_empty()
}
}
impl<V: Storable<D> + serde::Serialize, D: DB> serde::Serialize for Array<V, D> {
fn serialize<S: serde::Serializer>(&self, ser: S) -> Result<S::Ok, S::Error> {
ser.collect_seq(self.iter().map(|v| v.deref().clone()))
}
}
struct ArrayVisitor<V, D>(PhantomData<(V, D)>);
impl<'de, V: Storable<D> + serde::Deserialize<'de>, D: DB> serde::de::Visitor<'de>
for ArrayVisitor<V, D>
{
type Value = Array<V, D>;
fn expecting(&self, formatter: &mut Formatter) -> std::fmt::Result {
write!(formatter, "an array")
}
fn visit_seq<A: serde::de::SeqAccess<'de>>(self, mut seq: A) -> Result<Array<V, D>, A::Error> {
Ok(Array::<V, D>::from(
&std::iter::from_fn(|| seq.next_element::<V>().transpose())
.collect::<Result<std::vec::Vec<V>, A::Error>>()?,
))
}
}
impl<'de, V: Storable<D1> + serde::Deserialize<'de>, D1: DB> serde::Deserialize<'de>
for Array<V, D1>
{
fn deserialize<D: serde::Deserializer<'de>>(de: D) -> Result<Self, D::Error> {
de.deserialize_seq(ArrayVisitor(PhantomData))
}
}
pub struct ArrayIter<'a, V: Storable<D>, D: DB> {
array: &'a Array<V, D>,
next_index: usize,
}
impl<'a, V: Storable<D>, D: DB> ArrayIter<'a, V, D> {
fn new(array: &'a Array<V, D>) -> Self {
ArrayIter {
array,
next_index: 0,
}
}
}
impl<V: Storable<D>, D: DB> Iterator for ArrayIter<'_, V, D> {
type Item = Sp<V, D>;
fn next(&mut self) -> Option<Self::Item> {
let result = self
.array
.0
.lookup_sp(&Array::<V, D>::index_to_nibbles(self.next_index));
self.next_index += 1;
result
}
}
#[derive(Storable)]
#[derive_where(Clone, Eq, PartialEq, PartialOrd, Ord, Hash; V)]
#[storable(db = D)]
#[tag = "multi-set[v1]"]
pub struct MultiSet<V: Serializable + Storable<D>, D: DB> {
#[cfg(feature = "public-internal-structure")]
pub elements: HashMap<V, u32, D>,
#[cfg(not(feature = "public-internal-structure"))]
elements: HashMap<V, u32, D>,
}
tag_enforcement_test!(MultiSet<(), DefaultDB>);
impl<V: Serializable + Storable<D>, D: DB> Default for MultiSet<V, D> {
fn default() -> Self {
Self::new()
}
}
impl<V: Serializable + Storable<D>, D: DB> MultiSet<V, D> {
pub fn new() -> Self {
MultiSet {
elements: HashMap::new(),
}
}
#[must_use]
pub fn insert(&self, element: V) -> Self {
let current_count = self.elements.get(&element).map(|x| *x.deref()).unwrap_or(0);
MultiSet {
elements: self.elements.insert(element, current_count + 1),
}
}
#[must_use]
pub fn remove(&self, element: &V) -> Self {
self.remove_n(element, 1)
}
#[must_use]
pub fn remove_n(&self, element: &V, n: u32) -> Self {
let current_count = self.elements.get(element).map(|x| *x.deref()).unwrap_or(0);
let result = u32::checked_sub(current_count, n).unwrap_or(0);
if result == 0 {
MultiSet {
elements: self.elements.remove(element),
}
} else {
MultiSet {
elements: self.elements.insert(element.clone(), result),
}
}
}
pub fn count(&self, element: &V) -> u32 {
match self.elements.get(element) {
Some(i) => *i.deref(),
None => 0,
}
}
pub fn has_subset(&self, other: &MultiSet<V, D>) -> bool {
for element_x_other_count in other.elements.iter() {
let self_count = self.count(element_x_other_count.0.deref());
if self_count < *element_x_other_count.1.deref() {
return false;
}
}
true
}
pub fn member(&self, element: &V) -> bool {
self.elements.contains_key(element)
}
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Serializable)]
pub struct Identity<V>(pub V);
impl<V: Storable<D>, D: DB> Storable<D> for Identity<V> {
fn children(&self) -> std::vec::Vec<ArenaKey<<D as DB>::Hasher>> {
self.0.children()
}
fn from_binary_repr<R: std::io::Read>(
reader: &mut R,
child_hashes: &mut impl Iterator<Item = ArenaKey<<D as DB>::Hasher>>,
loader: &impl Loader<D>,
) -> Result<Self, std::io::Error>
where
Self: Sized,
{
V::from_binary_repr(reader, child_hashes, loader).map(Identity)
}
fn to_binary_repr<W: std::io::Write>(&self, writer: &mut W) -> Result<(), std::io::Error>
where
Self: Sized,
{
self.0.to_binary_repr(writer)
}
}
impl<V: Tagged> Tagged for Identity<V> {
fn tag() -> std::borrow::Cow<'static, str> {
V::tag()
}
fn tag_unique_factor() -> String {
V::tag_unique_factor()
}
}
impl<V> Distribution<Identity<V>> for Standard
where
Standard: Distribution<V>,
{
fn sample<R: rand::Rng + ?Sized>(&self, rng: &mut R) -> Identity<V> {
let v = <Standard as Distribution<V>>::sample(self, rng);
Identity(v)
}
}
impl<V> From<V> for Identity<V> {
fn from(v: V) -> Self {
Identity(v)
}
}
impl<T: Storable<D> + Serializable, D: DB, A: Storable<D> + Annotation<(Sp<T, D>, Sp<(), D>)>>
Semigroup for HashSet<T, D, A>
{
fn append(&self, other: &Self) -> Self {
self.union(other)
}
}
pub trait Container<D: DB> {
type Item: Storable<D> + Clone + PartialEq + Eq + PartialOrd + Ord + Hash;
fn iter_items(self) -> impl Iterator<Item = Self::Item>;
fn once(_: Self::Item) -> Self;
}
impl<T: Storable<D> + Clone + PartialEq + Eq + PartialOrd + Ord + Hash, D: DB> Container<D>
for Identity<T>
{
type Item = T;
fn iter_items(self) -> impl Iterator<Item = Self::Item> {
std::iter::once(self.0)
}
fn once(item: Self::Item) -> Self {
Self(item)
}
}
impl<
T: Serializable + Storable<D> + Clone + PartialEq + Eq + PartialOrd + Ord + Hash,
D: DB,
A: Storable<D> + Annotation<(Sp<T, D>, Sp<(), D>)>,
> Container<D> for HashSet<T, D, A>
{
type Item = T;
fn iter_items(self) -> impl Iterator<Item = Self::Item> {
self.0.keys().collect::<Vec<_>>().into_iter()
}
fn once(item: Self::Item) -> Self {
Self::new().insert(item)
}
}
#[cfg(feature = "public-internal-structure")]
#[derive(Debug)]
pub struct BigEndianU64(u64);
#[cfg(not(feature = "public-internal-structure"))]
#[derive(Debug)]
struct BigEndianU64(u64);
impl Serializable for BigEndianU64 {
fn serialize(&self, writer: &mut impl std::io::Write) -> std::io::Result<()> {
writer.write_all(&self.0.to_be_bytes())
}
fn serialized_size(&self) -> usize {
8
}
}
impl Deserializable for BigEndianU64 {
fn deserialize(
reader: &mut impl std::io::Read,
_recursion_depth: u32,
) -> std::io::Result<Self> {
let mut buf = [0u8; 8];
reader.read_exact(&mut buf[..])?;
Ok(BigEndianU64(u64::from_be_bytes(buf)))
}
}
#[derive(Storable)]
#[derive_where(Clone, Eq, PartialEq, PartialOrd, Ord, Hash; C)]
#[storable(db = D)]
pub struct TimeFilterMap<C: Serializable + Storable<D>, D: DB>
where
C: Container<D> + Serializable + Storable<D>,
<C as Container<D>>::Item: Serializable + Storable<D>,
{
#[cfg(feature = "public-internal-structure")]
pub time_map: Map<BigEndianU64, C, D>,
#[cfg(feature = "public-internal-structure")]
pub set: MultiSet<<C as Container<D>>::Item, D>,
#[cfg(not(feature = "public-internal-structure"))]
time_map: Map<BigEndianU64, C, D>,
#[cfg(not(feature = "public-internal-structure"))]
set: MultiSet<<C as Container<D>>::Item, D>,
}
impl<C: Serializable + Storable<D>, D: DB> Tagged for TimeFilterMap<C, D>
where
C: Container<D> + Serializable + Storable<D> + Tagged,
<C as Container<D>>::Item: Serializable + Storable<D> + Tagged,
{
fn tag() -> std::borrow::Cow<'static, str> {
format!("time-filter-map[v1]({})", C::tag()).into()
}
fn tag_unique_factor() -> String {
format!(
"({},{})",
C::tag(),
<MultiSet<<C as Container<D>>::Item, D>>::tag()
)
}
}
tag_enforcement_test!(TimeFilterMap<Identity<()>, DefaultDB>);
impl<V: Container<D> + Debug + Storable<D> + Serializable, D: DB> Debug for TimeFilterMap<V, D>
where
<V as Container<D>>::Item: Serializable,
{
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
self.time_map.fmt(f)
}
}
impl<C: Container<D> + Debug + Storable<D> + Serializable, D: DB> Default for TimeFilterMap<C, D>
where
<C as Container<D>>::Item: Serializable,
{
fn default() -> Self {
Self::new()
}
}
impl<C: Container<D> + Debug + Storable<D> + Serializable, D: DB> TimeFilterMap<C, D>
where
<C as Container<D>>::Item: Serializable,
{
pub fn new() -> Self {
TimeFilterMap {
time_map: Map::new(),
set: MultiSet::new(),
}
}
pub fn get(&self, ts: Timestamp) -> Option<&C> {
let ts = BigEndianU64(ts.to_secs());
match self.time_map.get(&ts) {
Some(res) => Some(res),
None => self.time_map.find_predecessor(&ts).map(|(_, v)| v),
}
}
#[must_use]
pub fn insert(&self, ts: Timestamp, v: <C as Container<D>>::Item) -> Self {
let mut res = self.clone();
if let Some(x) = self.time_map.get(&BigEndianU64(ts.to_secs())) {
for val in x.clone().iter_items() {
res.set = res.set.remove(&val);
}
}
res.time_map = res
.time_map
.insert(BigEndianU64(ts.to_secs()), C::once(v.clone()));
res.set = res.set.insert(v);
res
}
#[must_use]
pub fn upsert_one(&self, ts: Timestamp, v: <C as Container<D>>::Item) -> Self
where
C: Semigroup + Default,
{
self.upsert(ts, &C::once(v))
}
#[must_use]
pub fn upsert(&self, ts: Timestamp, v: &C) -> Self
where
C: Semigroup + Default,
{
let xs = self
.time_map
.get(&BigEndianU64(ts.to_secs()))
.cloned()
.unwrap_or_default();
let mut res = self.clone();
for new_val in v.clone().iter_items() {
res.set = res.set.insert(new_val.clone());
}
res.time_map = res
.time_map
.insert(BigEndianU64(ts.to_secs()), xs.append(v));
res
}
pub fn contains(&self, v: &<C as Container<D>>::Item) -> bool {
self.set.member(v)
}
pub fn contains_all(&self, v: C) -> bool {
v.iter_items().all(|val| self.set.member(&val))
}
#[must_use]
pub fn filter(&self, cutoff_timestamp: Timestamp) -> Self
where
MerklePatriciaTrie<C, D>: 'static + Clone,
{
let cutoff_key = to_nibbles(&BigEndianU64(cutoff_timestamp.to_secs()));
let mut res = self.clone();
let (new_mpt, removed_items_for_set) = self.time_map.mpt.prune(&cutoff_key);
res.time_map.mpt = Sp::new(new_mpt);
for items in removed_items_for_set {
for item in items.deref().clone().iter_items() {
res.set = res.set.remove(&item);
}
}
res
}
}
#[derive_where(PartialEq, Eq, PartialOrd, Ord; V, A)]
#[derive_where(Hash, Clone)]
pub struct Map<K, V: Storable<D>, D: DB = DefaultDB, A: Storable<D> + Annotation<V> = SizeAnn> {
#[cfg(feature = "public-internal-structure")]
pub mpt: Sp<MerklePatriciaTrie<V, D, A>, D>,
#[cfg(feature = "public-internal-structure")]
pub key_type: PhantomData<K>,
#[cfg(not(feature = "public-internal-structure"))]
pub(crate) mpt: Sp<MerklePatriciaTrie<V, D, A>, D>,
#[cfg(not(feature = "public-internal-structure"))]
key_type: PhantomData<K>,
}
impl<K: Tagged, V: Storable<D> + Tagged, D: DB, A: Storable<D> + Annotation<V> + Tagged> Tagged
for Map<K, V, D, A>
{
fn tag() -> std::borrow::Cow<'static, str> {
format!("mpt-map({},{},{})", K::tag(), V::tag(), A::tag()).into()
}
fn tag_unique_factor() -> String {
<MerklePatriciaTrie<V, D, A>>::tag_unique_factor()
}
}
tag_enforcement_test!(Map<(), ()>);
impl<
K: Sync + Send + 'static + Deserializable,
V: Storable<D>,
D: DB,
A: Storable<D> + Annotation<V>,
> Storable<D> for Map<K, V, D, A>
{
fn children(&self) -> std::vec::Vec<ArenaKey<D::Hasher>> {
vec![Sp::as_child(&self.mpt)]
}
fn to_binary_repr<W: std::io::Write>(&self, _writer: &mut W) -> Result<(), std::io::Error>
where
Self: Sized,
{
Ok(())
}
fn from_binary_repr<R: std::io::Read>(
_reader: &mut R,
child_hashes: &mut impl Iterator<Item = ArenaKey<D::Hasher>>,
loader: &impl Loader<D>,
) -> Result<Self, std::io::Error>
where
Self: Sized,
{
let res = Self {
mpt: loader.get_next(child_hashes)?,
key_type: PhantomData,
};
loader.do_check(res)
}
fn check_invariant(&self) -> Result<(), std::io::Error> {
self.mpt
.iter()
.try_for_each(|(k, _)| from_nibbles::<K>(&k).and(Ok(())))
}
}
impl<
K: Sync + Send + 'static + Deserializable,
V: Storable<D>,
D: DB,
A: Storable<D> + Annotation<V>,
> Serializable for Map<K, V, D, A>
{
fn serialize(&self, writer: &mut impl std::io::Write) -> std::io::Result<()> {
Sp::new(self.clone()).serialize(writer)
}
fn serialized_size(&self) -> usize {
Sp::new(self.clone()).serialized_size()
}
}
impl<
K: Sync + Send + 'static + Deserializable,
V: Storable<D>,
D: DB,
A: Storable<D> + Annotation<V>,
> Deserializable for Map<K, V, D, A>
{
fn deserialize(reader: &mut impl std::io::Read, recursion_depth: u32) -> std::io::Result<Self> {
<Sp<Map<K, V, D, A>, D> as Deserializable>::deserialize(reader, recursion_depth)
.map(|s| (*s).clone())
}
}
impl<K, V, D, A> FromIterator<(K, V)> for Map<K, V, D, A>
where
K: Serializable + Deserializable,
V: Storable<D>,
D: DB,
A: Storable<D> + Annotation<V>,
{
fn from_iter<T: IntoIterator<Item = (K, V)>>(iter: T) -> Self {
iter.into_iter()
.fold(Map::new(), |map, (k, v)| map.insert(k, v))
}
}
#[cfg(feature = "proptest")]
impl<
K: Serializable + Deserializable + Debug,
V: Storable<D> + Debug,
D: DB,
A: Storable<D> + Annotation<V>,
> Arbitrary for Map<K, V, D, A>
where
Standard: Distribution<V> + Distribution<K>,
{
type Strategy = NoStrategy<Map<K, V, D, A>>;
type Parameters = ();
fn arbitrary_with(_args: Self::Parameters) -> Self::Strategy {
NoStrategy(PhantomData)
}
}
impl<K: Serializable + Deserializable, V: Storable<D>, D: DB, A: Storable<D> + Annotation<V>>
Distribution<Map<K, V, D, A>> for Standard
where
Standard: Distribution<V> + Distribution<K>,
{
fn sample<R: rand::Rng + ?Sized>(&self, rng: &mut R) -> Map<K, V, D, A> {
let mut map = Map::new();
let size: usize = rng.gen_range(0..8);
for _ in 0..size {
map = map.insert(rng.r#gen(), rng.r#gen());
}
map
}
}
impl<
K: serde::Serialize + Serializable + Deserializable,
V: Storable<D> + serde::Serialize,
D: DB,
A: Storable<D> + Annotation<V>,
> serde::Serialize for Map<K, V, D, A>
{
fn serialize<S: serde::Serializer>(&self, ser: S) -> Result<S::Ok, S::Error> {
ser.collect_map(self.iter().map(|kv| (kv.0, kv.1.deref().clone())))
}
}
struct MapVisitor<K, V, D, A>(PhantomData<(K, V, D, A)>);
impl<
'de,
K: serde::Deserialize<'de> + Serializable + Deserializable,
V: Storable<D> + serde::Deserialize<'de>,
D: DB,
A: Storable<D> + Annotation<V>,
> serde::de::Visitor<'de> for MapVisitor<K, V, D, A>
{
type Value = Map<K, V, D, A>;
fn expecting(&self, formatter: &mut Formatter) -> std::fmt::Result {
write!(formatter, "a map")
}
fn visit_map<ACC: serde::de::MapAccess<'de>>(
self,
mut seq: ACC,
) -> Result<Map<K, V, D, A>, ACC::Error> {
std::iter::from_fn(|| seq.next_entry::<K, V>().transpose()).collect()
}
}
impl<
'de,
K: serde::Deserialize<'de> + Serializable + Deserializable,
V: serde::Deserialize<'de> + Storable<D1>,
D1: DB,
A: Storable<D1> + Annotation<V>,
> serde::Deserialize<'de> for Map<K, V, D1, A>
{
fn deserialize<D: serde::Deserializer<'de>>(de: D) -> Result<Self, D::Error> {
de.deserialize_map(MapVisitor(PhantomData))
}
}
fn to_nibbles<T: Serializable>(value: &T) -> std::vec::Vec<u8> {
let mut bytes = std::vec::Vec::new();
T::serialize(value, &mut bytes).unwrap();
let mut nibbles = std::vec::Vec::new();
for b in bytes {
nibbles.push((b & 0xf0) >> 4);
nibbles.push(b & 0x0f);
}
nibbles
}
fn from_nibbles<T: Deserializable>(value: &[u8]) -> std::io::Result<T> {
if value.iter().any(|v| *v >= 16) {
return Err(std::io::Error::new(
std::io::ErrorKind::InvalidData,
"nibble out of range",
));
}
let bytes = value
.chunks(2)
.map(|nibbles_pair| {
if nibbles_pair.len() != 2 {
return Err(std::io::Error::new(
std::io::ErrorKind::InvalidData,
"nibble array must have even length",
));
}
Ok((nibbles_pair[0] << 4) | nibbles_pair[1])
})
.collect::<Result<std::vec::Vec<u8>, std::io::Error>>()?;
T::deserialize(&mut &bytes[..], 0)
}
impl<K: Serializable + Deserializable, V: Storable<D>, D: DB, A: Storable<D> + Annotation<V>>
Default for Map<K, V, D, A>
{
fn default() -> Self {
Self::new()
}
}
impl<K: Serializable + Deserializable, V: Storable<D>, D: DB, A: Storable<D> + Annotation<V>>
Map<K, V, D, A>
{
pub fn new() -> Self {
Self {
mpt: Sp::new(MerklePatriciaTrie::new()),
key_type: PhantomData,
}
}
#[must_use]
pub fn insert(&self, key: K, value: V) -> Self {
Map {
mpt: Sp::new(self.mpt.insert(&to_nibbles(&key), value)),
key_type: self.key_type,
}
}
#[must_use]
pub fn remove(&self, key: &K) -> Self {
Map {
mpt: Sp::new(self.mpt.remove(&to_nibbles(&key))),
key_type: self.key_type,
}
}
pub fn into_inner_for_drop(self) -> impl Iterator<Item = V> {
Sp::into_inner(self.mpt)
.into_iter()
.flat_map(MerklePatriciaTrie::into_inner_for_drop)
}
pub fn iter(&self) -> impl Iterator<Item = (K, Sp<V, D>)> + use<'_, K, V, D, A> + '_ {
self.mpt.iter().filter_map(|(p, v)| {
let key = from_nibbles::<K>(&p).ok()?;
Some((key, v))
})
}
pub fn keys(&self) -> impl Iterator<Item = K> + use<'_, K, V, D, A> + '_ {
self.iter().map(|(k, _)| k)
}
pub fn contains_key<Q>(&self, key: &Q) -> bool
where
K: Borrow<Q>,
Q: Ord + Serializable,
{
self.mpt.lookup(&to_nibbles(key)).is_some()
}
pub fn get<Q>(&self, key: &Q) -> Option<&V>
where
K: Borrow<Q>,
Q: Serializable,
{
self.mpt.lookup(&to_nibbles(&key))
}
pub fn lookup_sp<Q>(&self, key: &Q) -> Option<Sp<V, D>>
where
Q: Serializable,
{
self.mpt.lookup_sp(&to_nibbles(key))
}
pub fn is_empty(&self) -> bool {
self.mpt.is_empty()
}
pub fn size(&self) -> usize {
self.mpt.deref().clone().size()
}
fn build_from_mpt(&self, mpt: MerklePatriciaTrie<V, D, A>) -> Self {
Self {
mpt: Sp::new(mpt),
key_type: self.key_type,
}
}
pub fn ann(&self) -> A {
self.mpt.ann()
}
}
impl<V: Storable<D>, D: DB, A: Storable<D> + Annotation<V>> Map<BigEndianU64, V, D, A> {
pub fn find_predecessor<'a>(
&'a self,
target_path: &BigEndianU64,
) -> Option<(std::vec::Vec<u8>, &'a V)> {
let target_nibbles = to_nibbles(&target_path);
self.mpt.find_predecessor(target_nibbles.as_slice())
}
pub fn prune(&self, target_path: &[u8]) -> (Self, std::vec::Vec<Sp<V, D>>) {
let (mpt, removed) = self.mpt.prune(target_path);
(self.build_from_mpt(mpt), removed)
}
}
enum Decodable<T> {
Yes(T),
No,
}
impl<T, E> From<Result<T, E>> for Decodable<T> {
fn from(value: Result<T, E>) -> Self {
match value {
Ok(v) => Decodable::Yes(v),
Err(_) => Decodable::No,
}
}
}
impl<T: Debug> Debug for Decodable<T> {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
match self {
Decodable::Yes(v) => v.fmt(f),
Decodable::No => write!(f, "!decode error!"),
}
}
}
impl<K: Deserializable + Debug, V: Storable<D> + Debug, D: DB, A: Storable<D> + Annotation<V>> Debug
for Map<K, V, D, A>
{
fn fmt(&self, formatter: &mut Formatter) -> std::fmt::Result {
formatter
.debug_map()
.entries(
self.mpt
.iter()
.map(|(k, v)| (Decodable::from(from_nibbles::<K>(&k)), v)),
)
.finish()
}
}
impl<
K: Clone + Serializable + Deserializable,
V: Clone + Storable<D>,
D: DB,
A: Storable<D> + Annotation<V>,
> IntoIterator for Map<K, V, D, A>
{
type Item = (K, V);
type IntoIter = std::vec::IntoIter<(K, V)>;
fn into_iter(self) -> Self::IntoIter {
self.iter()
.map(|(k, x)| (k, x.deref().clone()))
.collect::<std::vec::Vec<_>>()
.into_iter()
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::storable::SMALL_OBJECT_LIMIT;
#[test]
fn iter_map() {
let mut map = Map::<_, _>::new();
map = map.insert(1, 4);
map = map.insert(2, 5);
map = map.insert(3, 6);
for (k, v) in map.iter() {
match (k, Sp::deref(&v)) {
(1, 4) | (2, 5) | (3, 6) => {}
_ => unreachable!(),
}
}
}
#[test]
fn array_get() {
let array: super::Array<_> = vec![0, 1, 2, 3].into();
assert_eq!(array.get(0).cloned(), Some(0));
assert_eq!(array.get(1).cloned(), Some(1));
assert_eq!(array.get(2).cloned(), Some(2));
assert_eq!(array.get(3).cloned(), Some(3));
assert!(array.get(4).is_none());
assert!(array.get(5).is_none());
assert!(array.get(6).is_none());
assert!(array.get(7).is_none());
}
#[test]
fn array_push() {
let mut array = super::Array::<u32>::new();
assert_eq!(array.len(), 0);
array = array.push(0);
assert_eq!(array.len(), 1);
array = array.push(1);
assert_eq!(array.len(), 2);
assert_eq!(array, vec![0, 1].into());
}
#[test]
fn array_with_more_than_16_elements() {
let _: super::Array<_> = (0..1024).collect();
}
#[test]
fn array_index_to_nibbles_is_big_endian() {
assert_eq!(Array::<u8>::index_to_nibbles(0), Vec::<u8>::new());
assert_eq!(Array::<u8>::index_to_nibbles(1), vec![1]);
assert_eq!(Array::<u8>::index_to_nibbles(15), vec![15]);
assert_eq!(Array::<u8>::index_to_nibbles(16), vec![1, 0]);
assert_eq!(Array::<u8>::index_to_nibbles(255), vec![15, 15]);
assert_eq!(Array::<u8>::index_to_nibbles(256), vec![1, 0, 0]);
assert_eq!(
Array::<u8>::index_to_nibbles((1 << 12) - 1),
vec![15, 15, 15]
);
assert_eq!(Array::<u8>::index_to_nibbles(1 << 12), vec![1, 0, 0, 0]);
assert_eq!(
Array::<u8>::index_to_nibbles((1 << 32) - 1),
vec![15; 32 / 4]
);
let mut expected = vec![0; 32 / 4 + 1];
expected[0] = 1;
assert_eq!(Array::<u8>::index_to_nibbles(1 << 32), expected);
}
#[test]
fn test_map_iterators() {
let map = Map::<_, _>::new()
.insert(40026u64, 12u64)
.insert(12u64, 40026u64);
let mut keys = map.keys().collect::<std::vec::Vec<_>>();
keys.sort();
assert_eq!(keys, vec![12u64, 40026u64]);
let mut entries = map
.iter()
.map(|(k, v)| (k, *(v.deref())))
.collect::<std::vec::Vec<_>>();
entries.sort();
assert_eq!(entries, vec![(12u64, 40026u64), (40026u64, 12u64)]);
}
#[test]
fn test_hashmap() {
let mut hashmap = HashMap::<_, _>::new()
.insert(40026u64, 12u64)
.insert(12u64, 40026u64);
assert_eq!(hashmap.get(&40026u64).map(|sp| *(sp.deref())), Some(12u64));
assert_eq!(hashmap.get(&12u64).map(|sp| *(sp.deref())), Some(40026u64));
hashmap = hashmap.remove(&12u64);
assert_eq!(hashmap.get(&12u64), None);
}
#[test]
fn test_predecessor_when_target_is_branch_prefix_time_map() {
let mut time_map = TimeFilterMap::<Identity<i32>, InMemoryDB>::new();
time_map = time_map.insert(Timestamp::from_secs(1), 1);
time_map = time_map.insert(Timestamp::from_secs(256), 256);
time_map = time_map.insert(Timestamp::from_secs(512), 512);
assert_eq!(
time_map.get(Timestamp::from_secs(257)).copied(),
Some(Identity(256))
);
}
#[test]
fn test_smoke_time_map() {
let mut time_map = TimeFilterMap::<Identity<i32>, InMemoryDB>::new();
time_map = time_map.insert(Timestamp::from_secs(1), 1);
time_map = time_map.insert(Timestamp::from_secs(2), 2);
assert_eq!(time_map.get(Timestamp::from_secs(0)).copied(), None);
assert_eq!(
time_map.get(Timestamp::from_secs(1)).copied(),
Some(Identity(1))
);
assert_eq!(
time_map.get(Timestamp::from_secs(2)).copied(),
Some(Identity(2))
);
assert_eq!(
time_map.get(Timestamp::from_secs(3)).copied(),
Some(Identity(2))
);
assert!(!time_map.contains(&0));
assert!(time_map.contains(&1));
assert!(time_map.contains(&2));
assert!(!time_map.contains(&3));
time_map = time_map.filter(Timestamp::from_secs(2));
assert_eq!(time_map.get(Timestamp::from_secs(1)).copied(), None);
assert_eq!(
time_map.get(Timestamp::from_secs(2)).copied(),
Some(Identity(2))
);
assert_eq!(
time_map.get(Timestamp::from_secs(3)).copied(),
Some(Identity(2))
);
assert!(!time_map.contains(&0));
assert!(!time_map.contains(&1));
assert!(time_map.contains(&2));
assert!(!time_map.contains(&3));
time_map = time_map.insert(Timestamp::from_secs(16), 16);
assert_eq!(
time_map.get(Timestamp::from_secs(16)).copied(),
Some(Identity(16))
);
assert_eq!(
time_map.get(Timestamp::from_secs(17)).copied(),
Some(Identity(16))
);
time_map = time_map.filter(Timestamp::from_secs(2));
assert_eq!(time_map.get(Timestamp::from_secs(1)).copied(), None);
assert_eq!(
time_map.get(Timestamp::from_secs(2)).copied(),
Some(Identity(2))
);
assert_eq!(
time_map.get(Timestamp::from_secs(3)).copied(),
Some(Identity(2))
);
assert_eq!(
time_map.get(Timestamp::from_secs(17)).copied(),
Some(Identity(16))
);
assert!(!time_map.contains(&0));
assert!(!time_map.contains(&1));
assert!(time_map.contains(&2));
assert!(!time_map.contains(&3));
assert!(time_map.contains(&16));
time_map = time_map.insert(Timestamp::from_secs(256), 256);
assert_eq!(
time_map.get(Timestamp::from_secs(256)).copied(),
Some(Identity(256))
);
assert_eq!(
time_map.get(Timestamp::from_secs(257)).copied(),
Some(Identity(256))
);
time_map = time_map.filter(Timestamp::from_secs(2));
assert_eq!(time_map.get(Timestamp::from_secs(1)).copied(), None);
assert_eq!(
time_map.get(Timestamp::from_secs(2)).copied(),
Some(Identity(2))
);
assert_eq!(
time_map.get(Timestamp::from_secs(3)).copied(),
Some(Identity(2))
);
assert_eq!(
time_map.get(Timestamp::from_secs(257)).copied(),
Some(Identity(256))
);
assert!(!time_map.contains(&0));
assert!(!time_map.contains(&1));
assert!(time_map.contains(&2));
assert!(!time_map.contains(&3));
assert!(time_map.contains(&256));
}
#[test]
fn test_get_empty_time_map() {
let time_map = TimeFilterMap::<Identity<i32>, InMemoryDB>::new();
assert_eq!(time_map.get(Timestamp::from_secs(100)), None);
}
#[test]
fn test_insert_duplicate_value_allowed_time_map() {
let mut time_map = TimeFilterMap::<Identity<i32>, InMemoryDB>::new();
assert_eq!(0, time_map.set.count(&100));
time_map = time_map.insert(Timestamp::from_secs(10), 100);
assert_eq!(1, time_map.set.count(&100));
time_map = time_map.insert(Timestamp::from_secs(20), 100);
assert_eq!(2, time_map.set.count(&100));
}
#[test]
fn test_insert_filter_clears_set_via_duplicate_logic_time_map() {
let mut time_map = TimeFilterMap::<Identity<i32>, InMemoryDB>::new();
time_map = time_map.insert(Timestamp::from_secs(10), 100);
time_map = time_map.filter(Timestamp::from_secs(11));
let _ = time_map.insert(Timestamp::from_secs(20), 100); }
#[test]
fn test_replace_existing_timestamp() {
let mut time_map = TimeFilterMap::<Identity<i32>, InMemoryDB>::new();
time_map = time_map.insert(Timestamp::from_secs(100), 1);
assert_eq!(
time_map.get(Timestamp::from_secs(100)).copied(),
Some(Identity(1))
);
assert!(time_map.contains(&1));
time_map = time_map.insert(Timestamp::from_secs(100), 2);
assert!(!time_map.contains(&1));
assert!(time_map.contains(&2));
assert_eq!(
time_map.get(Timestamp::from_secs(100)).copied(),
Some(Identity(2))
);
assert_eq!(
time_map.get(Timestamp::from_secs(101)).copied(),
Some(Identity(2))
);
}
#[test]
fn test_filter_below_minimum_key_time_map() {
let mut time_map = TimeFilterMap::<Identity<i32>, InMemoryDB>::new();
time_map = time_map.insert(Timestamp::from_secs(10), 10);
time_map = time_map.insert(Timestamp::from_secs(20), 20);
time_map = time_map.filter(Timestamp::from_secs(9));
assert!(time_map.contains(&10));
assert!(time_map.contains(&20));
assert_eq!(
time_map.get(Timestamp::from_secs(10)).copied(),
Some(Identity(10))
);
assert_eq!(
time_map.get(Timestamp::from_secs(20)).copied(),
Some(Identity(20))
);
}
#[test]
fn test_filter_cutoff_above_maximum_key_time_map() {
let mut time_map = TimeFilterMap::<Identity<i32>, InMemoryDB>::new();
time_map = time_map.insert(Timestamp::from_secs(10), 10);
time_map = time_map.insert(Timestamp::from_secs(20), 20);
time_map = time_map.filter(Timestamp::from_secs(21));
assert!(!time_map.contains(&10));
assert!(!time_map.contains(&20));
assert_eq!(time_map.get(Timestamp::from_secs(10)), None);
assert_eq!(time_map.get(Timestamp::from_secs(20)), None);
}
#[test]
fn test_filter_exact_match_time_map() {
let mut time_map = TimeFilterMap::<Identity<i32>, InMemoryDB>::new();
time_map = time_map.insert(Timestamp::from_secs(10), 10);
time_map = time_map.insert(Timestamp::from_secs(20), 20);
time_map = time_map.insert(Timestamp::from_secs(30), 30);
time_map = time_map.filter(Timestamp::from_secs(20));
assert!(!time_map.contains(&10));
assert!(time_map.contains(&20));
assert!(time_map.contains(&30));
assert_eq!(time_map.get(Timestamp::from_secs(10)), None);
assert_eq!(
time_map.get(Timestamp::from_secs(20)).copied(),
Some(Identity(20))
);
assert_eq!(
time_map.get(Timestamp::from_secs(21)).copied(),
Some(Identity(20))
);
assert_eq!(
time_map.get(Timestamp::from_secs(30)).copied(),
Some(Identity(30))
);
}
#[test]
fn test_multiple_filters_time_map() {
let mut time_map = TimeFilterMap::<Identity<i32>, InMemoryDB>::new();
time_map = time_map.insert(Timestamp::from_secs(10), 10);
time_map = time_map.insert(Timestamp::from_secs(20), 20);
time_map = time_map.insert(Timestamp::from_secs(30), 30);
time_map = time_map.insert(Timestamp::from_secs(40), 40);
time_map = time_map.filter(Timestamp::from_secs(20)); assert!(!time_map.contains(&10));
assert!(time_map.contains(&20));
assert_eq!(
time_map.get(Timestamp::from_secs(30)).copied(),
Some(Identity(30))
);
assert_eq!(
time_map.get(Timestamp::from_secs(31)).copied(),
Some(Identity(30))
);
time_map = time_map.filter(Timestamp::from_secs(35)); assert!(!time_map.contains(&20));
assert!(!time_map.contains(&30));
assert!(time_map.contains(&40));
assert_eq!(time_map.get(Timestamp::from_secs(39)), None);
assert_eq!(
time_map.get(Timestamp::from_secs(40)).copied(),
Some(Identity(40))
);
assert_eq!(
time_map.get(Timestamp::from_secs(41)).copied(),
Some(Identity(40))
);
time_map = time_map.filter(Timestamp::from_secs(41)); assert!(!time_map.contains(&40));
assert_eq!(time_map.get(Timestamp::from_secs(40)), None);
}
#[test]
fn test_zero_timestamp_time_map() {
let mut time_map = TimeFilterMap::<Identity<i32>, InMemoryDB>::new();
time_map = time_map.insert(Timestamp::from_secs(0), 0);
time_map = time_map.insert(Timestamp::from_secs(10), 10);
assert_eq!(
time_map.get(Timestamp::from_secs(0)).copied(),
Some(Identity(0))
);
assert_eq!(
time_map.get(Timestamp::from_secs(5)).copied(),
Some(Identity(0))
);
assert!(time_map.contains(&0));
time_map = time_map.filter(Timestamp::from_secs(0));
assert_eq!(
time_map.get(Timestamp::from_secs(0)).copied(),
Some(Identity(0))
);
assert!(time_map.contains(&0));
time_map = time_map.filter(Timestamp::from_secs(1));
assert_eq!(time_map.get(Timestamp::from_secs(0)), None);
assert!(!time_map.contains(&0));
assert_eq!(time_map.get(Timestamp::from_secs(5)), None);
assert_eq!(
time_map.get(Timestamp::from_secs(10)).copied(),
Some(Identity(10))
);
}
#[test]
fn test_large_key_differences_time_map() {
let mut time_map = TimeFilterMap::<Identity<i32>, InMemoryDB>::new();
time_map = time_map.insert(Timestamp::from_secs(10), 10);
time_map = time_map.insert(Timestamp::from_secs(1000000), 1000000);
assert_eq!(time_map.get(Timestamp::from_secs(9)).copied(), None);
assert_eq!(
time_map.get(Timestamp::from_secs(10)).copied(),
Some(Identity(10))
);
assert_eq!(
time_map.get(Timestamp::from_secs(11)).copied(),
Some(Identity(10))
);
assert_eq!(
time_map.get(Timestamp::from_secs(999999)).copied(),
Some(Identity(10))
);
assert_eq!(
time_map.get(Timestamp::from_secs(1000000)).copied(),
Some(Identity(1000000))
);
assert_eq!(
time_map.get(Timestamp::from_secs(1000001)).copied(),
Some(Identity(1000000))
);
assert!(time_map.contains(&10));
assert!(time_map.contains(&1000000));
time_map = time_map.filter(Timestamp::from_secs(10));
assert_eq!(
time_map.get(Timestamp::from_secs(10)).copied(),
Some(Identity(10))
);
assert_eq!(
time_map.get(Timestamp::from_secs(1000000)).copied(),
Some(Identity(1000000))
);
assert!(time_map.contains(&10));
assert!(time_map.contains(&1000000));
time_map = time_map.filter(Timestamp::from_secs(1000000));
assert_eq!(time_map.get(Timestamp::from_secs(10)), None);
assert_eq!(
time_map.get(Timestamp::from_secs(1000000)).copied(),
Some(Identity(1000000))
);
assert!(!time_map.contains(&10));
assert!(time_map.contains(&1000000));
time_map = time_map.filter(Timestamp::from_secs(99999999));
assert_eq!(time_map.get(Timestamp::from_secs(1000000)), None);
assert!(!time_map.contains(&1000000));
}
#[test]
fn test_default_storage() {
struct Tag1;
type D1 = WrappedDB<DefaultDB, Tag1>;
struct Tag2;
type D2 = WrappedDB<DefaultDB, Tag2>;
{
default_storage::<InMemoryDB>();
assert!(try_get_default_storage::<InMemoryDB>().is_some());
}
assert!(try_get_default_storage::<D1>().is_none());
let result = std::panic::catch_unwind(|| {
default_storage::<D1>();
});
assert!(result.is_err());
let b1 = set_default_storage::<D1>(Storage::<D1>::default).unwrap();
let s1 = b1.arena.alloc([42u8; SMALL_OBJECT_LIMIT]);
assert!(
default_storage::<D1>()
.get::<[u8; SMALL_OBJECT_LIMIT]>(&s1.as_typed_key())
.is_ok()
);
set_default_storage::<D2>(Storage::<D2>::default).unwrap();
assert!(
default_storage::<D2>()
.get::<[u8; SMALL_OBJECT_LIMIT]>(&s1.as_typed_key())
.is_err()
);
unsafe_drop_default_storage::<D1>();
assert!(try_get_default_storage::<D1>().is_none());
set_default_storage::<D1>(Storage::<D1>::default).unwrap();
assert!(
default_storage::<D1>()
.get::<[u8; SMALL_OBJECT_LIMIT]>(&s1.as_typed_key())
.is_err()
);
assert!(
b1.get::<[u8; SMALL_OBJECT_LIMIT]>(&s1.as_typed_key())
.is_ok()
);
assert!(
default_storage::<D1>()
.get::<[u8; SMALL_OBJECT_LIMIT]>(&s1.as_typed_key())
.is_err()
);
let s = Arc::into_inner(b1).expect("we should have the only reference");
unsafe_drop_default_storage::<D1>();
set_default_storage::<D1>(|| s).unwrap();
assert!(
default_storage::<D1>()
.get::<[u8; SMALL_OBJECT_LIMIT]>(&s1.as_typed_key())
.is_ok()
);
}
#[cfg(feature = "sqlite")]
#[test]
fn persist_to_disk_sqldb() {
use crate::{DefaultHasher, db::SqlDB};
let path = tempfile::NamedTempFile::new().unwrap().into_temp_path();
test_persist_to_disk::<SqlDB<DefaultHasher>>(|| SqlDB::exclusive_file(&path));
}
#[cfg(feature = "parity-db")]
#[test]
fn persist_to_disk_paritydb() {
use crate::{DefaultHasher, db::ParityDb};
let path = tempfile::TempDir::new().unwrap().keep();
test_persist_to_disk::<ParityDb<DefaultHasher>>(|| ParityDb::open(&path));
}
#[cfg(any(feature = "sqlite", feature = "parity-db"))]
fn test_persist_to_disk<D: DB>(mk_db: impl Fn() -> D) {
struct Tag;
type W<D> = WrappedDB<D, Tag>;
let key1 = {
let db1: W<D> = WrappedDB::wrap(mk_db());
let storage1 = Storage::new(DEFAULT_CACHE_SIZE, db1);
let storage1 = set_default_storage(|| storage1).unwrap();
let arena = &storage1.arena;
let vals1 = vec![1u8, 1, 2, 3, 5];
let array1: super::Array<_, W<D>> = vals1.into();
let mut sp1 = arena.alloc(array1.clone());
sp1.persist();
storage1.with_backend(|backend| backend.flush_all_changes_to_db());
sp1.as_typed_key()
};
unsafe_drop_default_storage::<W<D>>();
std::thread::sleep(std::time::Duration::from_secs(1));
let db2: W<D> = WrappedDB::wrap(mk_db());
let storage2 = Storage::new(DEFAULT_CACHE_SIZE, db2);
let storage2 = set_default_storage(|| storage2).unwrap();
let array1 = storage2.arena.get::<super::Array<_, _>>(&key1).unwrap();
let vals2 = vec![1u8, 1, 2, 3, 5];
let array2: super::Array<_, W<D>> = vals2.into();
assert_eq!(*array1, array2);
}
#[test]
fn deserialization_malicious_map() {
use crate::arena::Sp;
use crate::merkle_patricia_trie::{MerklePatriciaTrie, Node};
use serialize::{Deserializable, Serializable};
let leaf: Node<u32> = Node::Leaf {
ann: SizeAnn(0),
value: Sp::new(42u32),
};
let extension_node = Node::Extension {
ann: SizeAnn(1),
compressed_path: vec![1, 2, 3], child: Sp::new(leaf),
};
let mpt = MerklePatriciaTrie(Sp::new(extension_node));
let malformed_map = Map {
mpt: Sp::new(mpt),
key_type: std::marker::PhantomData::<u32>,
};
let mut serialized = std::vec::Vec::new();
malformed_map.serialize(&mut serialized).unwrap();
let mut cursor = std::io::Cursor::new(&serialized);
assert!(Map::<u32, u32>::deserialize(&mut cursor, 0).is_err());
}
#[test]
fn init_many() {
for _ in 0..10_000 {
Array::<()>::new();
}
}
}