use std::sync::Arc;
use std::sync::atomic::{AtomicBool, Ordering};
use ahash::HashMap;
use re_byte_size::SizeBytes as _;
use re_chunk::RowId;
use re_chunk_store::ChunkStoreEvent;
use re_entity_db::EntityDb;
use re_log_types::hash::Hash64;
use re_renderer::external::re_video::VideoLoadError;
use re_renderer::video::Video;
use re_sdk_types::ComponentIdentifier;
use re_sdk_types::components::MediaType;
use re_video::DecodeSettings;
use crate::Cache;
use crate::cache::filter_blob_removed_events;
use crate::image_info::StoredBlobCacheKey;
struct Entry {
used_this_frame: AtomicBool,
video: Arc<Result<Video, VideoLoadError>>,
debug_name: String,
}
impl re_byte_size::SizeBytes for Entry {
fn heap_size_bytes(&self) -> u64 {
let Self {
used_this_frame: _,
video,
debug_name,
} = self;
debug_name.heap_size_bytes() + video.heap_size_bytes()
}
}
#[derive(Default)]
pub struct VideoAssetCache(HashMap<StoredBlobCacheKey, HashMap<Hash64, Entry>>);
impl VideoAssetCache {
pub fn entry(
&mut self,
debug_name: String,
blob_row_id: RowId,
blob_component: ComponentIdentifier,
video_buffer: &re_sdk_types::datatypes::Blob,
media_type: Option<&MediaType>,
decode_settings: DecodeSettings,
) -> Arc<Result<Video, VideoLoadError>> {
re_tracing::profile_function!(&debug_name);
let blob_cache_key = StoredBlobCacheKey::new(blob_row_id, blob_component);
let Some(media_type) = media_type
.cloned()
.or_else(|| MediaType::guess_from_data(video_buffer))
else {
return Arc::new(Err(VideoLoadError::UnrecognizedMimeType));
};
let inner_key = Hash64::hash((media_type.as_str(), decode_settings.hw_acceleration));
let entry = self
.0
.entry(blob_cache_key)
.or_default()
.entry(inner_key)
.or_insert_with(|| {
let video = re_video::VideoDataDescription::load_from_bytes(
video_buffer,
&media_type,
&debug_name,
blob_row_id.as_tuid(),
)
.map(|data| Video::load(debug_name.clone(), data, decode_settings));
Entry {
used_this_frame: AtomicBool::new(true),
video: Arc::new(video),
debug_name,
}
});
entry.used_this_frame.store(true, Ordering::Release);
entry.video.clone()
}
}
impl Cache for VideoAssetCache
where
Video: Send + Sync,
VideoLoadError: Send + Sync,
{
fn name(&self) -> &'static str {
"VideoAssetCache"
}
fn begin_frame(&mut self) {
re_tracing::profile_function!();
self.0.retain(|_row_id, per_key| {
per_key.retain(|_, v| v.used_this_frame.load(Ordering::Acquire));
!per_key.is_empty()
});
#[expect(clippy::iter_over_hash_type)]
for per_key in self.0.values() {
for v in per_key.values() {
v.used_this_frame.store(false, Ordering::Release);
if let Ok(video) = v.video.as_ref() {
video.begin_frame();
}
}
}
}
fn purge_memory(&mut self) {
}
fn on_store_events(&mut self, events: &[&ChunkStoreEvent], _entity_db: &EntityDb) {
re_tracing::profile_function!();
let cache_key_removed = filter_blob_removed_events(events);
self.0
.retain(|cache_key, _per_key| !cache_key_removed.contains(cache_key));
}
}
impl re_byte_size::MemUsageTreeCapture for VideoAssetCache {
fn capture_mem_usage_tree(&self) -> re_byte_size::MemUsageTree {
let mut node = re_byte_size::MemUsageNode::new();
let mut items: Vec<_> = self
.0
.values()
.flat_map(|per_key| per_key.values())
.map(|entry| {
let size = entry.heap_size_bytes();
(entry.debug_name.as_str(), size)
})
.collect();
items.sort_by(|a, b| a.0.cmp(b.0));
for (debug_name, size) in items {
node.add(debug_name, re_byte_size::MemUsageTree::Bytes(size));
}
node.with_total_size_bytes(self.0.total_size_bytes())
}
}