use anyhow::Context as AContext;
use lru_time_cache::LruCache;
use rusqlite::Connection;
use rusqlite::{functions::Context, params};
use std::sync::LazyLock;
use std::sync::{Arc, Mutex};
use std::time::Duration;
use zstd::dict::{DecoderDictionary, EncoderDictionary};
type EncoderCache = LruCache<(usize, i32, i32), Arc<EncoderDictionary<'static>>>;
static ENCODER_DICTS: LazyLock<Mutex<EncoderCache>> =
LazyLock::new(|| Mutex::new(LruCache::with_expiry_duration(Duration::from_secs(10))));
type DecoderCache = LruCache<(usize, i32), Arc<DecoderDictionary<'static>>>;
static DECODER_DICTS: LazyLock<Mutex<DecoderCache>> =
LazyLock::new(|| Mutex::new(LruCache::with_expiry_duration(Duration::from_secs(10))));
pub(crate) fn invalidate_caches(_db: &Connection) {
log::debug!("Invalidating dict caches");
{
let mut cache = ENCODER_DICTS.lock().unwrap();
cache.clear();
}
{
let mut cache = DECODER_DICTS.lock().unwrap();
cache.clear();
}
}
pub fn encoder_dict_from_ctx(
ctx: &Context,
arg_index: usize,
level: i32,
) -> anyhow::Result<Arc<EncoderDictionary<'static>>> {
let id: i32 = ctx.get(arg_index)?;
let db = unsafe { ctx.get_connection()? }; let db_handle_pointer = unsafe { db.handle() } as usize;
let mut dicts_write = ENCODER_DICTS.lock().unwrap();
let entry = dicts_write.entry((db_handle_pointer, id, level));
let res = match entry {
lru_time_cache::Entry::Vacant(e) => e.insert({
log::debug!(
"loading encoder dictionary {} level {} (should only happen once per 10s)",
id,
level
);
let dict_raw: Vec<u8> = db
.query_row(
"select dict from _zstd_dicts where id = ?",
params![id],
|r| r.get(0),
)
.with_context(|| format!("getting dict with id={id} from _zstd_dicts"))?;
let dict = EncoderDictionary::copy(&dict_raw, level);
Arc::new(dict)
}),
lru_time_cache::Entry::Occupied(o) => o.into_mut(),
}
.clone();
Ok(res)
}
pub fn decoder_dict_from_ctx(
ctx: &Context,
arg_index: usize,
) -> anyhow::Result<Arc<DecoderDictionary<'static>>> {
let id: i32 = ctx.get(arg_index)?;
let db = unsafe { ctx.get_connection()? }; let db_handle_pointer = unsafe { db.handle() } as usize; log::trace!("Using DB Handle pointer {db_handle_pointer} as cache key");
let cache_key = (db_handle_pointer, id);
let mut dicts_write = DECODER_DICTS.lock().unwrap();
let entry = dicts_write.entry(cache_key);
let res = match entry {
lru_time_cache::Entry::Vacant(e) => e.insert({
log::debug!(
"loading decoder dictionary {} (should only happen once per 10s)",
id
);
let db = unsafe { ctx.get_connection()? };
let dict_raw: Vec<u8> = db
.query_row(
"select dict from _zstd_dicts where id = ?",
params![id],
|r| r.get(0),
)
.with_context(|| format!("getting dict with id={id} from _zstd_dicts"))?;
let dict = DecoderDictionary::copy(&dict_raw);
Arc::new(dict)
}),
lru_time_cache::Entry::Occupied(o) => o.into_mut(),
}
.clone();
Ok(res)
}