use crate::types::Value;
pub const MEMORY_SAFETY_MARGIN_PERCENT: usize = 90;
pub fn effective_limit(max_bytes: usize) -> usize {
((max_bytes as u128) * (MEMORY_SAFETY_MARGIN_PERCENT as u128) / 100) as usize
}
pub(crate) const ENTRY_OVERHEAD: usize = 104;
#[derive(Debug)]
pub struct MemoryTracker {
used_bytes: usize,
key_count: usize,
}
impl MemoryTracker {
pub fn new() -> Self {
Self {
used_bytes: 0,
key_count: 0,
}
}
pub fn reset(&mut self) {
self.used_bytes = 0;
self.key_count = 0;
}
pub fn used_bytes(&self) -> usize {
self.used_bytes
}
pub fn key_count(&self) -> usize {
self.key_count
}
pub fn add(&mut self, key: &str, value: &Value) {
self.used_bytes += entry_size(key, value);
self.key_count += 1;
}
pub fn remove(&mut self, key: &str, value: &Value) {
let size = entry_size(key, value);
self.used_bytes = self.used_bytes.saturating_sub(size);
self.key_count = self.key_count.saturating_sub(1);
}
pub fn replace(&mut self, key: &str, old_value: &Value, new_value: &Value) {
let old_size = entry_size(key, old_value);
let new_size = entry_size(key, new_value);
self.used_bytes = self
.used_bytes
.saturating_sub(old_size)
.saturating_add(new_size);
}
pub fn adjust(&mut self, old_entry_size: usize, new_entry_size: usize) {
self.used_bytes = self
.used_bytes
.saturating_sub(old_entry_size)
.saturating_add(new_entry_size);
}
pub fn grow_by(&mut self, delta: usize) {
self.used_bytes = self.used_bytes.saturating_add(delta);
}
pub fn shrink_by(&mut self, delta: usize) {
self.used_bytes = self.used_bytes.saturating_sub(delta);
}
pub fn remove_with_size(&mut self, size: usize) {
self.used_bytes = self.used_bytes.saturating_sub(size);
self.key_count = self.key_count.saturating_sub(1);
}
}
impl Default for MemoryTracker {
fn default() -> Self {
Self::new()
}
}
pub const LAZY_FREE_THRESHOLD: usize = 64;
pub fn is_large_value(value: &Value) -> bool {
match value {
Value::String(_) => false,
Value::List(d) => d.len() > LAZY_FREE_THRESHOLD,
Value::SortedSet(ss) => ss.len() > LAZY_FREE_THRESHOLD,
Value::Hash(m) => m.len() > LAZY_FREE_THRESHOLD,
Value::Set(s) => s.len() > LAZY_FREE_THRESHOLD,
#[cfg(feature = "vector")]
Value::Vector(vs) => vs.len() > LAZY_FREE_THRESHOLD,
#[cfg(feature = "protobuf")]
Value::Proto { .. } => false,
}
}
pub fn entry_size(key: &str, value: &Value) -> usize {
key.len() + value_size(value) + ENTRY_OVERHEAD
}
pub(crate) const VECDEQUE_ELEMENT_OVERHEAD: usize = 32;
pub(crate) const VECDEQUE_BASE_OVERHEAD: usize = 24;
pub(crate) const PACKED_HASH_ENTRY_OVERHEAD: usize = 6;
pub(crate) const PACKED_HASH_BASE_OVERHEAD: usize = 26;
pub(crate) const HASHMAP_ENTRY_OVERHEAD: usize = 64;
pub(crate) const HASHMAP_BASE_OVERHEAD: usize = 48;
pub(crate) const HASHSET_MEMBER_OVERHEAD: usize = 40;
pub(crate) const HASHSET_BASE_OVERHEAD: usize = 48;
pub fn value_size(value: &Value) -> usize {
match value {
Value::String(data) => data.len(),
Value::List(deque) => {
let element_bytes: usize = deque
.iter()
.map(|b| b.len() + VECDEQUE_ELEMENT_OVERHEAD)
.sum();
VECDEQUE_BASE_OVERHEAD + element_bytes
}
Value::SortedSet(ss) => ss.memory_usage(),
Value::Hash(hash) => {
use crate::types::hash::HashValue;
match hash.as_ref() {
HashValue::Packed(buf) => {
24 + buf.len()
}
HashValue::Full(map) => {
let entry_bytes: usize = map
.iter()
.map(|(k, v)| k.len() + v.len() + HASHMAP_ENTRY_OVERHEAD)
.sum();
HASHMAP_BASE_OVERHEAD + entry_bytes
}
}
}
Value::Set(set) => {
let member_bytes: usize = set.iter().map(|m| m.len() + HASHSET_MEMBER_OVERHEAD).sum();
HASHSET_BASE_OVERHEAD + member_bytes
}
#[cfg(feature = "vector")]
Value::Vector(vs) => vs.memory_usage(),
#[cfg(feature = "protobuf")]
Value::Proto { type_name, data } => type_name.len() + data.len() + 48,
}
}
#[cfg(test)]
mod tests {
use super::*;
use bytes::Bytes;
fn string_val(s: &str) -> Value {
Value::String(Bytes::from(s.to_string()))
}
#[test]
fn new_tracker_is_empty() {
let t = MemoryTracker::new();
assert_eq!(t.used_bytes(), 0);
assert_eq!(t.key_count(), 0);
}
#[test]
fn add_increases_usage() {
let mut t = MemoryTracker::new();
let val = string_val("hello");
t.add("key", &val);
assert_eq!(t.key_count(), 1);
assert_eq!(t.used_bytes(), entry_size("key", &val));
}
#[test]
fn remove_decreases_usage() {
let mut t = MemoryTracker::new();
let val = string_val("data");
t.add("k", &val);
t.remove("k", &val);
assert_eq!(t.used_bytes(), 0);
assert_eq!(t.key_count(), 0);
}
#[test]
fn replace_adjusts_usage() {
let mut t = MemoryTracker::new();
let old = string_val("short");
let new = string_val("a much longer value here");
t.add("k", &old);
let before = t.used_bytes();
t.replace("k", &old, &new);
assert_eq!(t.key_count(), 1);
assert!(t.used_bytes() > before);
assert_eq!(t.used_bytes(), entry_size("k", &new),);
}
#[test]
fn remove_saturates_at_zero() {
let mut t = MemoryTracker::new();
let val = string_val("x");
t.remove("k", &val);
assert_eq!(t.used_bytes(), 0);
assert_eq!(t.key_count(), 0);
}
#[test]
fn entry_size_accounts_for_key_and_value() {
let val = string_val("test");
let size = entry_size("mykey", &val);
assert_eq!(size, 5 + 4 + ENTRY_OVERHEAD);
}
#[test]
fn entry_overhead_not_too_small() {
use crate::keyspace::Entry;
use compact_str::CompactString;
let entry_size = std::mem::size_of::<Entry>();
let key_struct_size = std::mem::size_of::<CompactString>();
let hashmap_per_entry = 8;
let minimum = entry_size + key_struct_size + hashmap_per_entry;
assert!(
ENTRY_OVERHEAD >= minimum,
"ENTRY_OVERHEAD ({ENTRY_OVERHEAD}) is less than measured minimum \
({minimum} = Entry({entry_size}) + CompactString({key_struct_size}) + \
hashmap({hashmap_per_entry}))"
);
}
#[test]
fn list_value_size() {
let mut deque = std::collections::VecDeque::new();
deque.push_back(Bytes::from("hello"));
deque.push_back(Bytes::from("world"));
let val = Value::List(deque);
let size = value_size(&val);
let expected = VECDEQUE_BASE_OVERHEAD
+ (5 + VECDEQUE_ELEMENT_OVERHEAD)
+ (5 + VECDEQUE_ELEMENT_OVERHEAD);
assert_eq!(size, expected);
}
#[test]
fn empty_list_value_size() {
let val = Value::List(std::collections::VecDeque::new());
assert_eq!(value_size(&val), VECDEQUE_BASE_OVERHEAD);
}
#[test]
fn multiple_entries() {
let mut t = MemoryTracker::new();
let v1 = string_val("aaa");
let v2 = string_val("bbbbb");
t.add("k1", &v1);
t.add("k2", &v2);
assert_eq!(t.key_count(), 2);
assert_eq!(
t.used_bytes(),
entry_size("k1", &v1) + entry_size("k2", &v2),
);
t.remove("k1", &v1);
assert_eq!(t.key_count(), 1);
assert_eq!(t.used_bytes(), entry_size("k2", &v2));
}
#[test]
fn effective_limit_applies_margin() {
assert_eq!(effective_limit(1000), 900);
}
#[test]
fn effective_limit_rounds_down() {
assert_eq!(effective_limit(1001), 900);
}
#[test]
fn effective_limit_zero() {
assert_eq!(effective_limit(0), 0);
}
#[test]
fn string_is_never_large() {
let val = Value::String(Bytes::from(vec![0u8; 10_000]));
assert!(!is_large_value(&val));
}
#[test]
fn small_list_is_not_large() {
let mut d = std::collections::VecDeque::new();
for _ in 0..LAZY_FREE_THRESHOLD {
d.push_back(Bytes::from("x"));
}
assert!(!is_large_value(&Value::List(d)));
}
#[test]
fn big_list_is_large() {
let mut d = std::collections::VecDeque::new();
for _ in 0..=LAZY_FREE_THRESHOLD {
d.push_back(Bytes::from("x"));
}
assert!(is_large_value(&Value::List(d)));
}
#[test]
fn big_hash_is_large() {
use crate::types::hash::HashValue;
let mut h = HashValue::default();
for i in 0..=LAZY_FREE_THRESHOLD {
h.insert(format!("f{i}").into(), Bytes::from("v"));
}
assert!(is_large_value(&Value::Hash(Box::new(h))));
}
#[test]
fn big_set_is_large() {
let mut s = std::collections::HashSet::new();
for i in 0..=LAZY_FREE_THRESHOLD {
s.insert(format!("m{i}"));
}
assert!(is_large_value(&Value::Set(Box::new(s))));
}
}