use crate::error::NativeErrorExt;
use crate::formats::yuv2rgb::{write_rgb8_f32x8, write_rgb8_scalar, write_rgba8_f32x8, write_rgba8_scalar};
use crate::formats::{YUVSlices, YUVSource};
use crate::{Error, OpenH264API, Timestamp};
use openh264_sys2::{
API, DECODER_OPTION, DECODER_OPTION_ERROR_CON_IDC, DECODER_OPTION_NUM_OF_FRAMES_REMAINING_IN_BUFFER,
DECODER_OPTION_NUM_OF_THREADS, DECODER_OPTION_TRACE_LEVEL, DECODING_STATE, ISVCDecoder, ISVCDecoderVtbl, SBufferInfo,
SDecodingParam, SParserBsInfo, SSysMEMBuffer, SVideoProperty, TagBufferInfo, WELS_LOG_DETAIL, WELS_LOG_QUIET,
videoFormatI420,
};
use std::os::raw::{c_int, c_long, c_uchar, c_void};
use std::ptr::{addr_of_mut, from_mut, null, null_mut};
#[rustfmt::skip]
#[allow(non_snake_case)]
pub struct DecoderRawAPI {
api: OpenH264API,
decoder_ptr: *mut *const ISVCDecoderVtbl,
initialize: unsafe extern "C" fn(arg1: *mut ISVCDecoder, pParam: *const SDecodingParam) -> c_long,
uninitialize: unsafe extern "C" fn(arg1: *mut ISVCDecoder) -> c_long,
decode_frame: unsafe extern "C" fn(arg1: *mut ISVCDecoder, pSrc: *const c_uchar, iSrcLen: c_int, ppDst: *mut *mut c_uchar, pStride: *mut c_int, iWidth: *mut c_int, iHeight: *mut c_int) -> DECODING_STATE,
decode_frame_no_delay: unsafe extern "C" fn(arg1: *mut ISVCDecoder, pSrc: *const c_uchar, iSrcLen: c_int, ppDst: *mut *mut c_uchar, pDstInfo: *mut SBufferInfo) -> DECODING_STATE,
decode_frame2: unsafe extern "C" fn(arg1: *mut ISVCDecoder, pSrc: *const c_uchar, iSrcLen: c_int, ppDst: *mut *mut c_uchar, pDstInfo: *mut SBufferInfo) -> DECODING_STATE,
flush_frame: unsafe extern "C" fn(arg1: *mut ISVCDecoder, ppDst: *mut *mut c_uchar, pDstInfo: *mut SBufferInfo) -> DECODING_STATE,
decode_parser: unsafe extern "C" fn(arg1: *mut ISVCDecoder, pSrc: *const c_uchar, iSrcLen: c_int, pDstInfo: *mut SParserBsInfo) -> DECODING_STATE,
decode_frame_ex: unsafe extern "C" fn(arg1: *mut ISVCDecoder, pSrc: *const c_uchar, iSrcLen: c_int, pDst: *mut c_uchar, iDstStride: c_int, iDstLen: *mut c_int, iWidth: *mut c_int, iHeight: *mut c_int, iColorFormat: *mut c_int) -> DECODING_STATE,
set_option: unsafe extern "C" fn(arg1: *mut ISVCDecoder, eOptionId: DECODER_OPTION, pOption: *mut c_void) -> c_long,
get_option: unsafe extern "C" fn(arg1: *mut ISVCDecoder, eOptionId: DECODER_OPTION, pOption: *mut c_void) -> c_long,
}
#[rustfmt::skip]
#[allow(clippy::too_many_arguments)]
#[allow(clippy::missing_safety_doc)]
#[allow(non_snake_case, unused, missing_docs)]
impl DecoderRawAPI {
fn new(api: OpenH264API) -> Result<Self, Error> {
unsafe {
let mut decoder_ptr = null::<ISVCDecoderVtbl>() as *mut *const ISVCDecoderVtbl;
api.WelsCreateDecoder(from_mut(&mut decoder_ptr)).ok()?;
let e = || {
Error::msg("VTable missing function.")
};
Ok(Self {
api,
decoder_ptr,
initialize: (*(*decoder_ptr)).Initialize.ok_or_else(e)?,
uninitialize: (*(*decoder_ptr)).Uninitialize.ok_or_else(e)?,
decode_frame: (*(*decoder_ptr)).DecodeFrame.ok_or_else(e)?,
decode_frame_no_delay: (*(*decoder_ptr)).DecodeFrameNoDelay.ok_or_else(e)?,
decode_frame2: (*(*decoder_ptr)).DecodeFrame2.ok_or_else(e)?,
flush_frame: (*(*decoder_ptr)).FlushFrame.ok_or_else(e)?,
decode_parser: (*(*decoder_ptr)).DecodeParser.ok_or_else(e)?,
decode_frame_ex: (*(*decoder_ptr)).DecodeFrameEx.ok_or_else(e)?,
set_option: (*(*decoder_ptr)).SetOption.ok_or_else(e)?,
get_option: (*(*decoder_ptr)).GetOption.ok_or_else(e)?,
})
}
}
unsafe fn initialize(&self, pParam: *const SDecodingParam) -> c_long { unsafe { (self.initialize)(self.decoder_ptr, pParam) }}
unsafe fn uninitialize(&self, ) -> c_long { unsafe { (self.uninitialize)(self.decoder_ptr) }}
pub unsafe fn decode_frame(&self, Src: *const c_uchar, iSrcLen: c_int, ppDst: *mut *mut c_uchar, pStride: *mut c_int, iWidth: *mut c_int, iHeight: *mut c_int) -> DECODING_STATE { unsafe { (self.decode_frame)(self.decoder_ptr, Src, iSrcLen, ppDst, pStride, iWidth, iHeight) }}
pub unsafe fn decode_frame_no_delay(&self, pSrc: *const c_uchar, iSrcLen: c_int, ppDst: *mut *mut c_uchar, pDstInfo: *mut SBufferInfo) -> DECODING_STATE { unsafe { (self.decode_frame_no_delay)(self.decoder_ptr, pSrc, iSrcLen, ppDst, pDstInfo) }}
pub unsafe fn decode_frame2(&self, pSrc: *const c_uchar, iSrcLen: c_int, ppDst: *mut *mut c_uchar, pDstInfo: *mut SBufferInfo) -> DECODING_STATE { unsafe { (self.decode_frame2)(self.decoder_ptr, pSrc, iSrcLen, ppDst, pDstInfo) }}
pub unsafe fn flush_frame(&self, ppDst: *mut *mut c_uchar, pDstInfo: *mut SBufferInfo) -> DECODING_STATE { unsafe { (self.flush_frame)(self.decoder_ptr, ppDst, pDstInfo) }}
pub unsafe fn decode_parser(&self, pSrc: *const c_uchar, iSrcLen: c_int, pDstInfo: *mut SParserBsInfo) -> DECODING_STATE { unsafe { (self.decode_parser)(self.decoder_ptr, pSrc, iSrcLen, pDstInfo) }}
pub unsafe fn decode_frame_ex(&self, pSrc: *const c_uchar, iSrcLen: c_int, pDst: *mut c_uchar, iDstStride: c_int, iDstLen: *mut c_int, iWidth: *mut c_int, iHeight: *mut c_int, iColorFormat: *mut c_int) -> DECODING_STATE { unsafe { (self.decode_frame_ex)(self.decoder_ptr, pSrc, iSrcLen, pDst, iDstStride, iDstLen, iWidth, iHeight, iColorFormat) }}
pub unsafe fn set_option(&self, eOptionId: DECODER_OPTION, pOption: *mut c_void) -> c_long { unsafe { (self.set_option)(self.decoder_ptr, eOptionId, pOption) }}
pub unsafe fn get_option(&self, eOptionId: DECODER_OPTION, pOption: *mut c_void) -> c_long { unsafe { (self.get_option)(self.decoder_ptr, eOptionId, pOption) }}
}
impl Drop for DecoderRawAPI {
fn drop(&mut self) {
unsafe {
self.api.WelsDestroyDecoder(self.decoder_ptr);
}
}
}
unsafe impl Send for DecoderRawAPI {}
unsafe impl Sync for DecoderRawAPI {}
#[derive(Default, Copy, Clone, Debug, Eq, PartialEq)]
pub enum Flush {
#[default]
Auto,
Flush,
NoFlush,
}
impl Flush {
#[allow(clippy::match_same_arms)]
#[allow(clippy::needless_pass_by_value)]
const fn should_flush(self, decoder_options: DecodeOptions) -> bool {
match (self, decoder_options.flush_after_decode) {
(Self::Auto, Self::Auto) => true,
(Self::NoFlush, Self::Auto) => false,
(Self::Flush, Self::Auto) => true,
(_, Self::NoFlush) => false,
(_, Self::Flush) => true,
}
}
}
#[derive(Default, Copy, Clone, Debug)]
#[must_use]
pub struct DecoderConfig {
params: SDecodingParam,
num_threads: DECODER_OPTION,
debug: DECODER_OPTION,
error_concealment: DECODER_OPTION,
flush_after_decode: Flush,
}
unsafe impl Send for DecoderConfig {}
unsafe impl Sync for DecoderConfig {}
impl DecoderConfig {
pub const fn new() -> Self {
Self {
params: SDecodingParam {
pFileNameRestructed: null_mut(),
uiCpuLoad: 0,
uiTargetDqLayer: 0,
eEcActiveIdc: 0,
bParseOnly: false,
sVideoProperty: SVideoProperty {
size: 0,
eVideoBsType: 0,
},
},
num_threads: 0,
debug: WELS_LOG_QUIET,
error_concealment: 0,
flush_after_decode: Flush::Flush,
}
}
pub const unsafe fn num_threads(mut self, num_threads: u32) -> Self {
self.num_threads = num_threads as i32;
self
}
pub const fn debug(mut self, value: bool) -> Self {
self.debug = if value { WELS_LOG_DETAIL } else { WELS_LOG_QUIET };
self
}
pub const fn flush_after_decode(mut self, flush_behavior: Flush) -> Self {
self.flush_after_decode = flush_behavior;
self
}
}
#[derive(Default, Clone, Debug, Eq, PartialEq)]
pub struct DecodeOptions {
flush_after_decode: Flush,
}
impl DecodeOptions {
#[must_use]
pub const fn new() -> Self {
Self {
flush_after_decode: Flush::Auto,
}
}
#[must_use]
pub const fn flush_after_decode(mut self, value: Flush) -> Self {
self.flush_after_decode = value;
self
}
}
pub struct Decoder {
raw_api: DecoderRawAPI,
config: DecoderConfig,
}
impl Decoder {
#[cfg(feature = "source")]
pub fn new() -> Result<Self, Error> {
let api = OpenH264API::from_source();
Self::with_api_config(api, DecoderConfig::new())
}
pub fn with_api_config(api: OpenH264API, mut config: DecoderConfig) -> Result<Self, Error> {
let raw_api = DecoderRawAPI::new(api)?;
#[rustfmt::skip]
unsafe {
raw_api.initialize(&raw const config.params).ok()?;
raw_api.set_option(DECODER_OPTION_TRACE_LEVEL, addr_of_mut!(config.debug).cast()).ok()?;
raw_api.set_option(DECODER_OPTION_NUM_OF_THREADS, addr_of_mut!(config.num_threads).cast()).ok()?;
raw_api.set_option(DECODER_OPTION_ERROR_CON_IDC, addr_of_mut!(config.error_concealment).cast()).ok()?;
};
Ok(Self { raw_api, config })
}
pub fn decode(&mut self, packet: &[u8]) -> Result<Option<DecodedYUV<'_>>, Error> {
self.decode_with_options(packet, DecodeOptions::default())
}
pub fn decode_with_options(&mut self, packet: &[u8], options: DecodeOptions) -> Result<Option<DecodedYUV<'_>>, Error> {
let mut dst = [null_mut::<u8>(); 3];
let mut buffer_info = SBufferInfo::default();
let flush = self.config.flush_after_decode.should_flush(options);
unsafe {
self.raw_api
.decode_frame_no_delay(
packet.as_ptr(),
packet.len() as i32,
from_mut(&mut dst).cast(),
&raw mut buffer_info,
)
.ok()?;
}
match (buffer_info.iBufferStatus, flush) {
(0, true) if self.num_frames_in_buffer()? > 0 => {
let (dst, buffer_info) = self.flush_single_frame_raw()?;
if buffer_info.iBufferStatus == 0 {
return Err(Error::msg(
"Buffer status invalid, we have outstanding frames but failed to flush them.",
));
}
unsafe { Ok(DecodedYUV::from_raw_open264_ptrs(&dst, &buffer_info)) }
}
(0, _) => Ok(None),
_ => unsafe { Ok(DecodedYUV::from_raw_open264_ptrs(&dst, &buffer_info)) },
}
}
pub fn flush_remaining(&'_ mut self) -> Result<Vec<DecodedYUV<'_>>, Error> {
let mut frames = Vec::new();
for _ in 0..self.num_frames_in_buffer()? {
let (dst, buffer_info) = self.flush_single_frame_raw()?;
if let Some(image) = unsafe { DecodedYUV::from_raw_open264_ptrs(&dst, &buffer_info) } {
frames.push(image);
}
}
Ok(frames)
}
pub const unsafe fn raw_api(&mut self) -> &mut DecoderRawAPI {
&mut self.raw_api
}
fn num_frames_in_buffer(&mut self) -> Result<usize, Error> {
let mut num_frames: DECODER_OPTION = 0;
unsafe {
self.raw_api()
.get_option(
DECODER_OPTION_NUM_OF_FRAMES_REMAINING_IN_BUFFER,
addr_of_mut!(num_frames).cast(),
)
.ok()?;
}
Ok(num_frames as usize)
}
fn flush_single_frame_raw(&mut self) -> Result<([*mut u8; 3], TagBufferInfo), Error> {
let mut dst = [null_mut::<u8>(); 3];
let mut buffer_info = SBufferInfo::default();
unsafe {
self.raw_api()
.flush_frame(from_mut(&mut dst).cast(), &raw mut buffer_info)
.ok()?;
Ok((dst, buffer_info))
}
}
}
impl Drop for Decoder {
fn drop(&mut self) {
unsafe {
self.raw_api.uninitialize();
}
}
}
#[derive(Debug)]
pub struct DecodedYUV<'a> {
info: SSysMEMBuffer,
timestamp: Timestamp,
y: &'a [u8],
u: &'a [u8],
v: &'a [u8],
}
impl DecodedYUV<'_> {
const unsafe fn from_raw_open264_ptrs(dst: &[*mut u8; 3], buffer_info: &TagBufferInfo) -> Option<Self> {
unsafe {
let info = buffer_info.UsrData.sSystemBuffer;
let timestamp = Timestamp::from_millis(buffer_info.uiInBsTimeStamp);
if dst[0].is_null() || dst[1].is_null() || dst[2].is_null() {
None
} else {
let y = std::slice::from_raw_parts(dst[0], (info.iHeight * info.iStride[0]) as usize);
let u = std::slice::from_raw_parts(dst[1], (info.iHeight * info.iStride[1] / 2) as usize);
let v = std::slice::from_raw_parts(dst[2], (info.iHeight * info.iStride[1] / 2) as usize);
Some(Self {
info,
timestamp,
y,
u,
v,
})
}
}
}
#[must_use]
pub const fn dimensions_uv(&self) -> (usize, usize) {
(self.info.iWidth as usize / 2, self.info.iHeight as usize / 2)
}
#[must_use]
pub const fn timestamp(&self) -> Timestamp {
self.timestamp
}
pub fn split<const N: usize>(&'_ self) -> [YUVSlices<'_>; N] {
if N == 1 {
return [YUVSlices::new((self.y, self.u, self.v), self.dimensions(), self.strides()); N];
}
let y_chunks: Vec<&[u8]> = self.y.chunks(self.y.len() / N).collect();
let u_chunks: Vec<&[u8]> = self.u.chunks(self.u.len() / N).collect();
let v_chunks: Vec<&[u8]> = self.v.chunks(self.v.len() / N).collect();
let mut parts = [YUVSlices::new((self.y, self.u, self.v), self.dimensions(), self.strides()); N];
for i in 0..N {
parts[i] = YUVSlices::new(
(y_chunks[i], u_chunks[i], v_chunks[i]),
(self.dimensions().0, self.info.iHeight as usize / N),
self.strides(),
);
}
parts
}
#[allow(clippy::unnecessary_cast)]
pub fn write_rgb8(&self, target: &mut [u8]) {
let dim = self.dimensions();
let strides = self.strides();
let wanted = dim.0 * dim.1 * 3;
assert_eq!(self.info.iFormat, videoFormatI420 as i32);
assert_eq!(
target.len(),
wanted,
"Target RGB8 array does not match image dimensions. Wanted: {} * {} * 3 = {}, got {}",
dim.0,
dim.1,
wanted,
target.len()
);
if dim.0 % 8 == 0 && dim.1 >= 2 {
write_rgb8_f32x8(self.y, self.u, self.v, dim, strides, target);
} else {
write_rgb8_scalar(self.y, self.u, self.v, dim, strides, target);
}
}
#[allow(clippy::unnecessary_cast)]
pub fn write_rgba8(&self, target: &mut [u8]) {
let dim = self.dimensions();
let strides = self.strides();
let wanted = dim.0 * dim.1 * 4;
assert_eq!(self.info.iFormat, videoFormatI420 as i32);
assert_eq!(
target.len(),
wanted,
"Target RGBA8 array does not match image dimensions. Wanted: {} * {} * 4 = {}, got {}",
dim.0,
dim.1,
wanted,
target.len()
);
if dim.0 % 8 == 0 && dim.1 >= 2 {
write_rgba8_f32x8(self.y, self.u, self.v, dim, strides, target);
} else {
write_rgba8_scalar(self.y, self.u, self.v, dim, strides, target);
}
}
}
impl YUVSource for DecodedYUV<'_> {
fn dimensions_i32(&self) -> (i32, i32) {
(self.info.iWidth, self.info.iHeight)
}
fn dimensions(&self) -> (usize, usize) {
(self.info.iWidth as usize, self.info.iHeight as usize)
}
fn strides(&self) -> (usize, usize, usize) {
(
self.info.iStride[0] as usize,
self.info.iStride[1] as usize,
self.info.iStride[1] as usize,
)
}
fn strides_i32(&self) -> (i32, i32, i32) {
(self.info.iStride[0], self.info.iStride[1], self.info.iStride[1])
}
fn y(&self) -> &[u8] {
self.y
}
fn u(&self) -> &[u8] {
self.u
}
fn v(&self) -> &[u8] {
self.v
}
}
#[cfg(test)]
mod test {
use openh264_sys2::SSysMEMBuffer;
use crate::{
Timestamp,
formats::{YUVSlices, YUVSource},
};
use super::DecodedYUV;
macro_rules! yuv420_planes {
(y_stride: $y_stride:literal, height: $height:literal) => {{
let numbers = (0..u32::MAX).map(|i| (i % 256) as u8);
let y_plane_len = ($y_stride * $height) as usize;
let y = numbers.clone().take(y_plane_len).collect::<Vec<_>>();
let uv_plane_len = ($y_stride * $height / 4) as usize;
let u = numbers.clone().take(uv_plane_len).collect::<Vec<_>>();
let v = numbers.clone().take(uv_plane_len).collect::<Vec<_>>();
(y, u, v)
}};
}
macro_rules! decoded_yuv420 {
(y_stride: $y_stride:literal, dim: ($width:literal, $height:literal), $y:expr, $u:expr, $v:expr) => {
DecodedYUV {
info: SSysMEMBuffer {
iWidth: $width,
iHeight: $height,
iFormat: 23,
iStride: [$y_stride as i32, $y_stride / 2 as i32],
},
timestamp: Timestamp::ZERO,
y: $y,
u: $u,
v: $v,
}
};
}
#[test]
fn test_split_01() {
let (y, u, v) = yuv420_planes!(y_stride: 4, height: 4);
let buf = decoded_yuv420!(y_stride: 4, dim: (4, 4), &y, &u, &v);
let parts: [YUVSlices; 1] = buf.split();
assert_eq!(1, parts.len());
assert_eq!(parts[0].y(), y.as_slice());
assert_eq!(parts[0].u(), u.as_slice());
assert_eq!(parts[0].v(), v.as_slice());
}
#[test]
fn test_split_02() {
let (y, u, v) = yuv420_planes!(y_stride: 132, height: 128);
let buf = decoded_yuv420!(y_stride: 132, dim: (128, 128), &y, &u, &v);
let parts: [YUVSlices; 4] = buf.split();
let (mut y_plane, mut u_plane, mut v_plane) = (vec![], vec![], vec![]);
for slice in parts {
y_plane.extend_from_slice(slice.y());
u_plane.extend_from_slice(slice.u());
v_plane.extend_from_slice(slice.v());
}
assert_eq!(buf.y().len(), y_plane.len());
assert_eq!(buf.y(), y_plane);
assert_eq!(buf.u().len(), u_plane.len());
assert_eq!(buf.u(), u_plane);
assert_eq!(buf.v().len(), v_plane.len());
assert_eq!(buf.v(), v_plane);
}
}