mod video_frame_reference;
mod video_stream;
use re_log_types::hash::Hash64;
use re_log_types::{EntityPath, EntityPathHash};
use re_renderer::renderer;
use re_renderer::resource_managers::ImageDataDesc;
use re_sdk_types::ViewClassIdentifier;
use re_sdk_types::blueprint::components::VisualizerInstructionId;
use re_ui::ContextExt as _;
use re_video::player::{VideoPlaybackIssueSeverity, VideoPlayerError};
use re_viewer_context::{ViewClass as _, ViewContext};
pub use video_frame_reference::VideoFrameReferenceVisualizer;
pub use video_stream::VideoStreamVisualizer;
use super::{LoadingIndicator, SpatialViewVisualizerData, UiLabel, UiLabelStyle, UiLabelTarget};
use crate::{PickableRectSourceData, PickableTexturedRect, SpatialView2D};
pub const AT_TIME_CURSOR_SALT: u64 = 0x12356;
fn video_stream_id(
entity_path: &EntityPath,
sample_component: re_sdk_types::ComponentIdentifier,
time_track_salt: u64,
) -> re_video::player::VideoPlayerStreamId {
re_video::player::VideoPlayerStreamId(
re_log_types::hash::Hash64::hash((entity_path.hash(), sample_component, time_track_salt))
.hash64(),
)
}
struct VideoFrameRenderInfo {
texture: re_renderer::video::VideoFrameTexture,
depth_offset: re_renderer::DepthOffset,
multiplicative_tint: egui::Rgba,
}
struct VideoPlaybackIssue {
message: String,
severity: VideoPlaybackIssueSeverity,
should_request_more_frames: bool,
show_frame: bool,
}
impl VideoPlaybackIssue {
pub fn custom(message: String, severity: VideoPlaybackIssueSeverity) -> Self {
Self {
message,
severity,
should_request_more_frames: false,
show_frame: false,
}
}
}
impl From<VideoPlayerError> for VideoPlaybackIssue {
fn from(error: VideoPlayerError) -> Self {
Self {
message: error.to_string(),
severity: error.severity(),
should_request_more_frames: error.should_request_more_frames(),
show_frame: match error {
VideoPlayerError::NegativeTimestamp
| VideoPlayerError::InsufficientSampleData(_) => false,
VideoPlayerError::EmptyBuffer
| VideoPlayerError::UnloadedSampleData(_)
| VideoPlayerError::CreateChunk(_)
| VideoPlayerError::DecodeChunk(_)
| VideoPlayerError::Decoding(_)
| VideoPlayerError::BadData
| VideoPlayerError::TextureUploadError(_)
| VideoPlayerError::DecoderUnexpectedlyExited => true,
},
}
}
}
#[expect(clippy::too_many_arguments)]
fn show_video_frame(
ctx: &ViewContext<'_>,
visualizer_data: &mut SpatialViewVisualizerData,
entity_path: &EntityPath,
world_from_entity: glam::Affine3A,
highlight: &re_viewer_context::ViewOutlineMasks,
fallback_video_size: glam::Vec2,
visualizer_instruction: VisualizerInstructionId,
frame: Option<VideoFrameRenderInfo>,
issue: Option<VideoPlaybackIssue>,
) {
let show_frame = issue.as_ref().map(|issue| issue.show_frame).unwrap_or(true);
let video_size = frame
.as_ref()
.and_then(|f| f.texture.texture.as_ref())
.map(|t| glam::vec2(t.width() as _, t.height() as _))
.unwrap_or(fallback_video_size);
let top_left_corner_position = world_from_entity.transform_point3(glam::Vec3::ZERO);
let extent_u = world_from_entity.transform_vector3(glam::Vec3::X * video_size.x);
let extent_v = world_from_entity.transform_vector3(glam::Vec3::Y * video_size.y);
let mut has_rendered_texture = false;
let mut depth_offset = 0;
let loading_indicator_reason = if let Some(issue) = &issue {
if matches!(issue.severity, VideoPlaybackIssueSeverity::Loading) {
Some(issue.message.clone())
} else {
None
}
} else if let Some(frame) = &frame
&& frame.texture.show_loading_indicator
{
Some(format!("Decoder: {:?}", frame.texture.decoder_delay_state))
} else {
None
};
if let Some(reason) = loading_indicator_reason {
visualizer_data.loading_indicators.push(LoadingIndicator {
center: top_left_corner_position + 0.5 * (extent_u + extent_v),
half_extent_u: 0.5 * extent_u,
half_extent_v: 0.5 * extent_v,
reason,
});
}
if let Some(frame) = frame
&& show_frame
{
let re_renderer::video::VideoFrameTexture {
texture,
decoder_delay_state,
show_loading_indicator,
frame_info: _,
source_pixel_format: _,
} = frame.texture;
if decoder_delay_state.should_request_more_frames() {
ctx.egui_ctx().request_repaint();
}
if let Some(texture) = texture {
has_rendered_texture = true;
let animated_valid_frame = ctx.egui_ctx().animate_bool(
egui::Id::new(format!("{entity_path} video loading indicator"))
.with(visualizer_instruction),
issue.is_none() && !show_loading_indicator,
);
depth_offset = frame.depth_offset;
let textured_rect = renderer::TexturedRect {
top_left_corner_position,
extent_u,
extent_v,
colormapped_texture: renderer::ColormappedTexture::from_unorm_rgba(texture),
options: renderer::RectangleOptions {
texture_filter_magnification: renderer::TextureFilterMag::Nearest,
texture_filter_minification: renderer::TextureFilterMin::Linear,
outline_mask: highlight.overall,
depth_offset: frame.depth_offset,
multiplicative_tint: frame
.multiplicative_tint
.multiply(0.5 + 0.5 * animated_valid_frame),
},
};
visualizer_data.add_pickable_rect(
PickableTexturedRect {
ent_path: entity_path.clone(),
textured_rect,
source_data: PickableRectSourceData::Video,
},
ctx.view_class_identifier,
);
}
}
if !has_rendered_texture {
register_video_bounds_with_bounding_box(
entity_path.hash(),
visualizer_data,
world_from_entity,
video_size,
ctx.view_class_identifier,
);
}
let Some(issue) = issue else {
return;
};
if issue.should_request_more_frames {
ctx.egui_ctx().request_repaint();
}
let style = match issue.severity {
VideoPlaybackIssueSeverity::Error => UiLabelStyle::Error,
VideoPlaybackIssueSeverity::Informational => UiLabelStyle::Default,
VideoPlaybackIssueSeverity::Loading => {
return;
}
};
let render_ctx = ctx.viewer_ctx.render_ctx();
let video_error_image = match re_ui::icons::VIDEO_ERROR
.load_image(ctx.viewer_ctx.egui_ctx(), egui::SizeHint::default())
{
Err(err) => {
re_log::error_once!("Failed to load video error icon: {err}");
return;
}
Ok(egui::load::ImagePoll::Ready { image }) => image,
Ok(egui::load::ImagePoll::Pending { .. }) => {
return; }
};
let video_error_texture_result = render_ctx
.texture_manager_2d
.get_or_try_create_with::<image::ImageError>(
Hash64::hash("video_error").hash64(),
render_ctx,
|| {
Ok(ImageDataDesc {
label: "video_error".into(),
data: std::borrow::Cow::Owned(video_error_image.as_raw().to_vec()),
format: re_renderer::external::wgpu::TextureFormat::Rgba8UnormSrgb.into(),
width_height: [
video_error_image.width() as _,
video_error_image.height() as _,
],
alpha_channel_usage: re_renderer::AlphaChannelUsage::AlphaChannelInUse,
})
},
);
let Ok(video_error_texture) = video_error_texture_result.inspect_err(|err| {
re_log::error_once!("Failed to show video error icon: {err}");
}) else {
return;
};
let video_error_rect_size = {
let mut rect_size = glam::vec2(
video_error_texture.width() as f32,
video_error_texture.height() as f32,
) / 2.0;
if rect_size.x > video_size.x {
let scale = video_size.x / rect_size.x;
rect_size *= scale;
}
if rect_size.y > video_size.y {
let scale = video_size.y / rect_size.y;
rect_size *= scale;
}
rect_size
};
let center = glam::Vec3::from(world_from_entity.translation).truncate() + video_size * 0.5;
let error_icon_top_left = center - video_error_rect_size * 0.5;
let label_target_rect = egui::Rect::from_min_size(
egui::pos2(
error_icon_top_left.x - video_error_rect_size.x,
error_icon_top_left.y,
),
egui::vec2(video_error_rect_size.x * 3.0, video_error_rect_size.y),
);
visualizer_data.ui_labels.push(UiLabel {
text: issue.message,
style,
target: UiLabelTarget::Rect(label_target_rect),
labeled_instance: re_entity_db::InstancePathHash::entity_all(entity_path),
visualizer_instruction,
});
let error_rect = renderer::TexturedRect {
top_left_corner_position: error_icon_top_left.extend(0.0),
extent_u: glam::Vec3::X * video_error_rect_size.x,
extent_v: glam::Vec3::Y * video_error_rect_size.y,
colormapped_texture: renderer::ColormappedTexture::from_unorm_rgba(video_error_texture),
options: renderer::RectangleOptions {
texture_filter_magnification: renderer::TextureFilterMag::Linear,
texture_filter_minification: renderer::TextureFilterMin::Linear,
outline_mask: highlight.overall,
multiplicative_tint: egui::Rgba::from(ctx.egui_ctx().tokens().text_default).to_opaque(),
depth_offset,
},
};
visualizer_data.add_pickable_rect(
PickableTexturedRect {
ent_path: entity_path.clone(),
textured_rect: error_rect,
source_data: PickableRectSourceData::Placeholder,
},
ctx.view_class_identifier,
);
}
fn register_video_bounds_with_bounding_box(
entity_path: EntityPathHash,
visualizer_data: &mut SpatialViewVisualizerData,
world_from_entity: glam::Affine3A,
video_size: glam::Vec2,
class_identifier: ViewClassIdentifier,
) {
if class_identifier != SpatialView2D::identifier() {
return;
}
let top_left = glam::Vec3::from(world_from_entity.translation);
visualizer_data.add_bounding_box(
entity_path,
macaw::BoundingBox {
min: top_left,
max: top_left + glam::Vec3::new(video_size.x, video_size.y, 0.0),
},
world_from_entity,
);
}