use ffmpeg_next::{
Error as FfmpegError, Packet, Rational,
codec::context::Context as CodecContext,
decoder::Video as VideoDecoder,
frame::Video as VideoFrame,
software::scaling::{Context as ScalingContext, Flags as ScalingFlags},
};
use image::{DynamicImage, GrayImage, RgbImage, RgbaImage};
use crate::configuration::{FrameOutputOptions, PixelFormat};
use crate::error::UnbundleError;
use crate::unbundle::MediaFile;
pub struct FrameIterator<'a> {
unbundler: &'a mut MediaFile,
decoder: VideoDecoder,
scaler: ScalingContext,
video_stream_index: usize,
target_frames: Vec<u64>,
target_index: usize,
time_base: Rational,
frames_per_second: f64,
output_config: FrameOutputOptions,
target_width: u32,
target_height: u32,
decoded_frame: VideoFrame,
scaled_frame: VideoFrame,
eof_sent: bool,
done: bool,
}
impl<'a> FrameIterator<'a> {
pub(crate) fn new(
unbundler: &'a mut MediaFile,
frame_numbers: Vec<u64>,
output_config: FrameOutputOptions,
stream_index: Option<usize>,
) -> Result<Self, UnbundleError> {
let video_stream_index = stream_index
.or(unbundler.video_stream_index)
.ok_or(UnbundleError::NoVideoStream)?;
log::debug!(
"Creating FrameIterator for {} frames (stream={})",
frame_numbers.len(),
video_stream_index,
);
let video_metadata = unbundler
.metadata
.video
.as_ref()
.ok_or(UnbundleError::NoVideoStream)?;
let frames_per_second = video_metadata.frames_per_second;
let (target_width, target_height) =
output_config.resolve_dimensions(video_metadata.width, video_metadata.height);
let output_pixel = output_config.pixel_format.to_ffmpeg_pixel();
let stream = unbundler
.input_context
.stream(video_stream_index)
.ok_or(UnbundleError::NoVideoStream)?;
let time_base = stream.time_base();
let codec_parameters = stream.parameters();
let decoder_context = CodecContext::from_parameters(codec_parameters)?;
let decoder = decoder_context.decoder().video()?;
let scaler = ScalingContext::get(
decoder.format(),
decoder.width(),
decoder.height(),
output_pixel,
target_width,
target_height,
ScalingFlags::BILINEAR,
)?;
if let Some(&first) = frame_numbers.first() {
let seek_timestamp =
crate::conversion::frame_number_to_seek_timestamp(first, frames_per_second);
let _ = unbundler
.input_context
.seek(seek_timestamp, ..seek_timestamp);
}
Ok(Self {
unbundler,
decoder,
scaler,
video_stream_index,
target_frames: frame_numbers,
target_index: 0,
time_base,
frames_per_second,
output_config,
target_width,
target_height,
decoded_frame: VideoFrame::empty(),
scaled_frame: VideoFrame::empty(),
eof_sent: false,
done: false,
})
}
fn convert_current_frame(&mut self) -> Result<DynamicImage, UnbundleError> {
self.scaler
.run(&self.decoded_frame, &mut self.scaled_frame)?;
let width = self.target_width;
let height = self.target_height;
match self.output_config.pixel_format {
PixelFormat::Rgb8 => {
let buffer =
crate::conversion::frame_to_buffer(&self.scaled_frame, width, height, 3);
let rgb_image = RgbImage::from_raw(width, height, buffer).ok_or_else(|| {
UnbundleError::VideoDecodeError(
"Failed to construct RGB image from decoded frame data".to_string(),
)
})?;
Ok(DynamicImage::ImageRgb8(rgb_image))
}
PixelFormat::Rgba8 => {
let buffer =
crate::conversion::frame_to_buffer(&self.scaled_frame, width, height, 4);
let rgba_image = RgbaImage::from_raw(width, height, buffer).ok_or_else(|| {
UnbundleError::VideoDecodeError(
"Failed to construct RGBA image from decoded frame data".to_string(),
)
})?;
Ok(DynamicImage::ImageRgba8(rgba_image))
}
PixelFormat::Gray8 => {
let buffer =
crate::conversion::frame_to_buffer(&self.scaled_frame, width, height, 1);
let gray_image = GrayImage::from_raw(width, height, buffer).ok_or_else(|| {
UnbundleError::VideoDecodeError(
"Failed to construct grayscale image from decoded frame data".to_string(),
)
})?;
Ok(DynamicImage::ImageLuma8(gray_image))
}
}
}
}
impl Iterator for FrameIterator<'_> {
type Item = Result<(u64, DynamicImage), UnbundleError>;
fn next(&mut self) -> Option<Self::Item> {
if self.done || self.target_index >= self.target_frames.len() {
return None;
}
loop {
if self.decoder.receive_frame(&mut self.decoded_frame).is_ok() {
let pts = self.decoded_frame.pts().unwrap_or(0);
let current_frame = crate::conversion::pts_to_frame_number(
pts,
self.time_base,
self.frames_per_second,
);
while self.target_index < self.target_frames.len()
&& self.target_frames[self.target_index] < current_frame
{
self.target_index += 1;
}
if self.target_index >= self.target_frames.len() {
self.done = true;
return None;
}
if current_frame == self.target_frames[self.target_index] {
match self.convert_current_frame() {
Ok(image) => {
let frame_number = current_frame;
self.target_index += 1;
return Some(Ok((frame_number, image)));
}
Err(e) => {
self.done = true;
return Some(Err(e));
}
}
}
continue;
}
if self.eof_sent {
self.done = true;
return None;
}
let mut packet = Packet::empty();
match packet.read(&mut self.unbundler.input_context) {
Ok(()) => {
if packet.stream() == self.video_stream_index {
if let Err(e) = self.decoder.send_packet(&packet) {
self.done = true;
return Some(Err(UnbundleError::from(e)));
}
}
}
Err(FfmpegError::Eof) => {
if let Err(e) = self.decoder.send_eof() {
self.done = true;
return Some(Err(UnbundleError::from(e)));
}
self.eof_sent = true;
}
Err(_) => {
}
}
}
}
}