use crate::error::NativeErrorExt;
use crate::formats::YUVSource;
use crate::{Error, OpenH264API, Timestamp};
use openh264_sys2::{
API, DEBLOCKING_IDC_0, ELevelIdc, ENCODER_OPTION, ENCODER_OPTION_DATAFORMAT, ENCODER_OPTION_SVC_ENCODE_PARAM_EXT,
ENCODER_OPTION_TRACE_LEVEL, EProfileIdc, EUsageType, EVideoFormatType, ISVCEncoder, ISVCEncoderVtbl, RC_MODES, SEncParamBase,
SEncParamExt, SFrameBSInfo, SLayerBSInfo, SM_SINGLE_SLICE, SM_SIZELIMITED_SLICE, SSourcePicture, VIDEO_CODING_LAYER,
WELS_LOG_DETAIL, WELS_LOG_QUIET, videoFormatI420,
};
use std::os::raw::{c_int, c_uchar, c_void};
use std::ptr::{addr_of_mut, from_mut, null, null_mut};
#[rustfmt::skip]
#[allow(non_snake_case)]
pub struct EncoderRawAPI {
api: OpenH264API,
encoder_ptr: *mut *const ISVCEncoderVtbl,
initialize: unsafe extern "C" fn(arg1: *mut ISVCEncoder, pParam: *const SEncParamBase) -> c_int,
initialize_ext: unsafe extern "C" fn(arg1: *mut ISVCEncoder, pParam: *const SEncParamExt) -> c_int,
get_default_params: unsafe extern "C" fn(arg1: *mut ISVCEncoder, pParam: *mut SEncParamExt) -> c_int,
uninitialize: unsafe extern "C" fn(arg1: *mut ISVCEncoder) -> c_int,
encode_frame: unsafe extern "C" fn(arg1: *mut ISVCEncoder, kpSrcPic: *const SSourcePicture, pBsInfo: *mut SFrameBSInfo) -> c_int,
encode_parameter_sets: unsafe extern "C" fn(arg1: *mut ISVCEncoder, pBsInfo: *mut SFrameBSInfo) -> c_int,
force_intra_frame: unsafe extern "C" fn(arg1: *mut ISVCEncoder, bIDR: bool) -> c_int,
set_option: unsafe extern "C" fn(arg1: *mut ISVCEncoder, eOptionId: ENCODER_OPTION, pOption: *mut c_void) -> c_int,
get_option: unsafe extern "C" fn(arg1: *mut ISVCEncoder, eOptionId: ENCODER_OPTION, pOption: *mut c_void) -> c_int,
}
#[rustfmt::skip]
#[allow(clippy::too_many_arguments)]
#[allow(clippy::missing_safety_doc)]
#[allow(clippy::must_use_candidate)]
#[allow(non_snake_case, unused, missing_docs)]
impl EncoderRawAPI {
fn new(api: OpenH264API) -> Result<Self, Error> {
unsafe {
let mut encoder_ptr = null::<ISVCEncoderVtbl>() as *mut *const ISVCEncoderVtbl;
api.WelsCreateSVCEncoder(from_mut(&mut encoder_ptr)).ok()?;
let e = || {
Error::msg("VTable missing function.")
};
Ok(Self {
api,
encoder_ptr,
initialize: (*(*encoder_ptr)).Initialize.ok_or_else(e)?,
initialize_ext: (*(*encoder_ptr)).InitializeExt.ok_or_else(e)?,
get_default_params: (*(*encoder_ptr)).GetDefaultParams.ok_or_else(e)?,
uninitialize: (*(*encoder_ptr)).Uninitialize.ok_or_else(e)?,
encode_frame: (*(*encoder_ptr)).EncodeFrame.ok_or_else(e)?,
encode_parameter_sets: (*(*encoder_ptr)).EncodeParameterSets.ok_or_else(e)?,
force_intra_frame: (*(*encoder_ptr)).ForceIntraFrame.ok_or_else(e)?,
set_option: (*(*encoder_ptr)).SetOption.ok_or_else(e)?,
get_option: (*(*encoder_ptr)).GetOption.ok_or_else(e)?,
})
}
}
unsafe fn uninitialize(&self) -> c_int { unsafe { (self.uninitialize)(self.encoder_ptr) }}
unsafe fn initialize(&self, pParam: *const SEncParamBase) -> c_int { unsafe { (self.initialize)(self.encoder_ptr, pParam) }}
unsafe fn initialize_ext(&self, pParam: *const SEncParamExt) -> c_int { unsafe { (self.initialize_ext)(self.encoder_ptr, pParam) }}
pub unsafe fn get_default_params(&self, pParam: *mut SEncParamExt) -> c_int { unsafe { (self.get_default_params)(self.encoder_ptr, pParam) }}
pub unsafe fn encode_frame(&self, kpSrcPic: *const SSourcePicture, pBsInfo: *mut SFrameBSInfo) -> c_int { unsafe { (self.encode_frame)(self.encoder_ptr, kpSrcPic, pBsInfo) }}
pub unsafe fn encode_parameter_sets(&self, pBsInfo: *mut SFrameBSInfo) -> c_int { unsafe { (self.encode_parameter_sets)(self.encoder_ptr, pBsInfo) }}
pub unsafe fn force_intra_frame(&self, bIDR: bool) -> c_int { unsafe { (self.force_intra_frame)(self.encoder_ptr, bIDR) }}
pub unsafe fn set_option(&self, eOptionId: ENCODER_OPTION, pOption: *mut c_void) -> c_int { unsafe { (self.set_option)(self.encoder_ptr, eOptionId, pOption) }}
pub unsafe fn get_option(&self, eOptionId: ENCODER_OPTION, pOption: *mut c_void) -> c_int { unsafe { (self.get_option)(self.encoder_ptr, eOptionId, pOption) }}
}
impl Drop for EncoderRawAPI {
fn drop(&mut self) {
unsafe {
self.api.WelsDestroySVCEncoder(self.encoder_ptr);
}
}
}
unsafe impl Send for EncoderRawAPI {}
unsafe impl Sync for EncoderRawAPI {}
#[derive(Copy, Clone, Debug, Default)]
pub enum RateControlMode {
#[default]
Quality,
Bitrate,
Bufferbased,
Timestamp,
BitrateModePostSkip,
Off,
}
impl RateControlMode {
const fn to_c(self) -> RC_MODES {
match self {
Self::Quality => openh264_sys2::RC_QUALITY_MODE,
Self::Bitrate => openh264_sys2::RC_BITRATE_MODE,
Self::Bufferbased => openh264_sys2::RC_BUFFERBASED_MODE,
Self::Timestamp => openh264_sys2::RC_TIMESTAMP_MODE,
Self::BitrateModePostSkip => openh264_sys2::RC_BITRATE_MODE_POST_SKIP,
Self::Off => openh264_sys2::RC_OFF_MODE,
}
}
}
#[derive(Copy, Clone, Debug, Default)]
pub enum SpsPpsStrategy {
#[default]
ConstantId,
IncreasingId,
SpsListing,
SpsListingAndPpsIncreasing,
SpsPpsListing,
}
impl SpsPpsStrategy {
const fn to_c(self) -> RC_MODES {
match self {
Self::ConstantId => openh264_sys2::CONSTANT_ID,
Self::IncreasingId => openh264_sys2::INCREASING_ID,
Self::SpsListing => openh264_sys2::SPS_LISTING,
Self::SpsListingAndPpsIncreasing => openh264_sys2::SPS_LISTING_AND_PPS_INCREASING,
Self::SpsPpsListing => openh264_sys2::SPS_PPS_LISTING,
}
}
}
#[derive(Copy, Clone, Debug, Default)]
pub enum UsageType {
#[default]
CameraVideoRealTime,
ScreenContentRealTime,
CameraVideoNonRealTime,
ScreenContentNonRealTime,
InputContentTypeAll,
}
impl UsageType {
const fn to_c(self) -> EUsageType {
match self {
Self::CameraVideoRealTime => openh264_sys2::CAMERA_VIDEO_REAL_TIME,
Self::ScreenContentRealTime => openh264_sys2::SCREEN_CONTENT_REAL_TIME,
Self::CameraVideoNonRealTime => openh264_sys2::CAMERA_VIDEO_NON_REAL_TIME,
Self::ScreenContentNonRealTime => openh264_sys2::SCREEN_CONTENT_NON_REAL_TIME,
Self::InputContentTypeAll => openh264_sys2::INPUT_CONTENT_TYPE_ALL,
}
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
pub struct BitRate(u32);
impl BitRate {
#[must_use]
pub const fn from_bps(bps: u32) -> Self {
Self(bps)
}
}
#[derive(Debug, Clone, Copy, PartialEq, PartialOrd, Default)]
pub struct FrameRate(f32);
impl FrameRate {
#[must_use]
pub const fn from_hz(hz: f32) -> Self {
Self(hz)
}
}
#[derive(Copy, Clone, Debug)]
#[allow(missing_docs)]
pub enum Profile {
Baseline,
Main,
Extended,
High,
High10,
High422,
High444,
CAVLC444,
ScalableBaseline,
ScalableHigh,
}
impl Profile {
const fn to_c(self) -> EProfileIdc {
match self {
Self::Baseline => openh264_sys2::PRO_BASELINE,
Self::Main => openh264_sys2::PRO_MAIN,
Self::Extended => openh264_sys2::PRO_EXTENDED,
Self::High => openh264_sys2::PRO_HIGH,
Self::High10 => openh264_sys2::PRO_HIGH10,
Self::High422 => openh264_sys2::PRO_HIGH422,
Self::High444 => openh264_sys2::PRO_HIGH444,
Self::CAVLC444 => openh264_sys2::PRO_CAVLC444,
Self::ScalableBaseline => openh264_sys2::PRO_SCALABLE_BASELINE,
Self::ScalableHigh => openh264_sys2::PRO_SCALABLE_HIGH,
}
}
}
#[derive(Copy, Clone, Debug)]
#[allow(missing_docs, non_camel_case_types)]
pub enum Level {
Level_1_0,
Level_1_B,
Level_1_1,
Level_1_2,
Level_1_3,
Level_2_0,
Level_2_1,
Level_2_2,
Level_3_0,
Level_3_1,
Level_3_2,
Level_4_0,
Level_4_1,
Level_4_2,
Level_5_0,
Level_5_1,
Level_5_2,
}
impl Level {
const fn to_c(self) -> ELevelIdc {
match self {
Self::Level_1_0 => openh264_sys2::LEVEL_1_0,
Self::Level_1_B => openh264_sys2::LEVEL_1_B,
Self::Level_1_1 => openh264_sys2::LEVEL_1_1,
Self::Level_1_2 => openh264_sys2::LEVEL_1_2,
Self::Level_1_3 => openh264_sys2::LEVEL_1_3,
Self::Level_2_0 => openh264_sys2::LEVEL_2_0,
Self::Level_2_1 => openh264_sys2::LEVEL_2_1,
Self::Level_2_2 => openh264_sys2::LEVEL_2_2,
Self::Level_3_0 => openh264_sys2::LEVEL_3_0,
Self::Level_3_1 => openh264_sys2::LEVEL_3_1,
Self::Level_3_2 => openh264_sys2::LEVEL_3_2,
Self::Level_4_0 => openh264_sys2::LEVEL_4_0,
Self::Level_4_1 => openh264_sys2::LEVEL_4_1,
Self::Level_4_2 => openh264_sys2::LEVEL_4_2,
Self::Level_5_0 => openh264_sys2::LEVEL_5_0,
Self::Level_5_1 => openh264_sys2::LEVEL_5_1,
Self::Level_5_2 => openh264_sys2::LEVEL_5_2,
}
}
}
#[derive(Debug, Default, Clone, Copy)]
#[allow(missing_docs)]
pub enum Complexity {
Low,
#[default]
Medium,
High,
}
impl Complexity {
const fn to_c(self) -> ELevelIdc {
match self {
Self::Low => openh264_sys2::LOW_COMPLEXITY,
Self::Medium => openh264_sys2::MEDIUM_COMPLEXITY,
Self::High => openh264_sys2::HIGH_COMPLEXITY,
}
}
}
#[derive(Debug, Clone, Copy)]
pub struct QpRange {
min: u8,
max: u8,
}
impl QpRange {
#[must_use]
pub const fn new(min: u8, max: u8) -> Self {
assert!(max <= 51, "quantization value out of range (0..=51)");
assert!(min <= max, "quantization min value larger than max");
Self { min, max }
}
}
impl Default for QpRange {
fn default() -> Self {
Self { min: 0, max: 51 }
}
}
#[derive(Debug, Clone, Copy, Default)]
pub struct IntraFramePeriod(u32);
impl IntraFramePeriod {
#[must_use]
pub const fn from_num_frames(frames: u32) -> Self {
Self(frames)
}
#[must_use]
pub const fn auto() -> Self {
Self(0)
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Default)]
#[repr(u8)]
pub enum ColorPrimaries {
#[default]
Bt709 = 1,
Unspecified = 2,
Bt470M = 4,
Bt470BG = 5,
Smpte170M = 6,
Smpte240M = 7,
Film = 8,
Bt2020 = 9,
}
impl ColorPrimaries {
#[must_use]
pub const fn as_u8(self) -> u8 {
self as u8
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Default)]
#[repr(u8)]
pub enum TransferCharacteristics {
#[default]
Bt709 = 1,
Unspecified = 2,
Bt470M = 4,
Bt470Bg = 5,
Smpte170M = 6,
Smpte240M = 7,
Linear = 8,
Srgb = 13,
Bt2020_10 = 14,
Bt2020_12 = 15,
Smpte2084 = 16,
Hlg = 18,
}
impl TransferCharacteristics {
#[must_use]
pub const fn as_u8(self) -> u8 {
self as u8
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Default)]
#[repr(u8)]
pub enum MatrixCoefficients {
Identity = 0,
#[default]
Bt709 = 1,
Unspecified = 2,
Fcc = 4,
Bt470Bg = 5,
Smpte170M = 6,
Smpte240M = 7,
Ycgco = 8,
Bt2020Ncl = 9,
Bt2020Cl = 10,
}
impl MatrixCoefficients {
#[must_use]
pub const fn as_u8(self) -> u8 {
self as u8
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Default)]
#[must_use]
pub struct VuiConfig {
color_primaries: ColorPrimaries,
transfer_characteristics: TransferCharacteristics,
matrix_coefficients: MatrixCoefficients,
full_range: bool,
}
impl VuiConfig {
pub const fn new() -> Self {
Self::bt709()
}
pub const fn bt709() -> Self {
Self {
color_primaries: ColorPrimaries::Bt709,
transfer_characteristics: TransferCharacteristics::Bt709,
matrix_coefficients: MatrixCoefficients::Bt709,
full_range: false,
}
}
pub const fn bt709_full() -> Self {
Self {
color_primaries: ColorPrimaries::Bt709,
transfer_characteristics: TransferCharacteristics::Bt709,
matrix_coefficients: MatrixCoefficients::Bt709,
full_range: true,
}
}
pub const fn bt601() -> Self {
Self {
color_primaries: ColorPrimaries::Smpte170M,
transfer_characteristics: TransferCharacteristics::Smpte170M,
matrix_coefficients: MatrixCoefficients::Smpte170M,
full_range: false,
}
}
pub const fn srgb() -> Self {
Self {
color_primaries: ColorPrimaries::Bt709,
transfer_characteristics: TransferCharacteristics::Srgb,
matrix_coefficients: MatrixCoefficients::Bt709,
full_range: true,
}
}
pub const fn bt2020() -> Self {
Self {
color_primaries: ColorPrimaries::Bt2020,
transfer_characteristics: TransferCharacteristics::Bt2020_10,
matrix_coefficients: MatrixCoefficients::Bt2020Ncl,
full_range: false,
}
}
pub const fn color_primaries(mut self, value: ColorPrimaries) -> Self {
self.color_primaries = value;
self
}
pub const fn transfer_characteristics(mut self, value: TransferCharacteristics) -> Self {
self.transfer_characteristics = value;
self
}
pub const fn matrix_coefficients(mut self, value: MatrixCoefficients) -> Self {
self.matrix_coefficients = value;
self
}
pub const fn full_range(mut self, value: bool) -> Self {
self.full_range = value;
self
}
}
#[derive(Default, Copy, Clone, Debug)]
#[must_use]
#[allow(clippy::struct_excessive_bools)]
pub struct EncoderConfig {
enable_skip_frame: bool,
target_bitrate: BitRate,
enable_denoise: bool,
debug: i32,
data_format: EVideoFormatType,
max_frame_rate: FrameRate,
rate_control_mode: RateControlMode,
sps_pps_strategy: SpsPpsStrategy,
multiple_thread_idc: u16,
usage_type: UsageType,
max_slice_len: Option<u32>,
profile: Option<Profile>,
level: Option<Level>,
complexity: Complexity,
qp: QpRange,
scene_change_detect: bool,
adaptive_quantization: bool,
background_detection: bool,
long_term_reference: bool,
intra_frame_period: IntraFramePeriod,
vui: Option<VuiConfig>,
}
impl EncoderConfig {
pub const fn new() -> Self {
Self {
enable_skip_frame: true,
target_bitrate: BitRate::from_bps(120_000),
enable_denoise: false,
debug: 0,
data_format: videoFormatI420,
max_frame_rate: FrameRate::from_hz(0.0),
rate_control_mode: RateControlMode::Quality,
sps_pps_strategy: SpsPpsStrategy::ConstantId,
multiple_thread_idc: 0,
usage_type: UsageType::CameraVideoRealTime,
max_slice_len: None,
profile: None,
level: None,
complexity: Complexity::Medium,
qp: QpRange::new(0, 51),
scene_change_detect: true,
adaptive_quantization: true,
background_detection: true,
long_term_reference: false,
intra_frame_period: IntraFramePeriod::from_num_frames(0),
vui: None,
}
}
pub const fn bitrate(mut self, bps: BitRate) -> Self {
self.target_bitrate = bps;
self
}
pub const fn debug(mut self, value: bool) -> Self {
self.debug = if value { WELS_LOG_DETAIL } else { WELS_LOG_QUIET };
self
}
pub const fn skip_frames(mut self, value: bool) -> Self {
self.enable_skip_frame = value;
self
}
pub const fn max_frame_rate(mut self, value: FrameRate) -> Self {
self.max_frame_rate = value;
self
}
pub const fn usage_type(mut self, value: UsageType) -> Self {
self.usage_type = value;
self
}
pub const fn rate_control_mode(mut self, value: RateControlMode) -> Self {
self.rate_control_mode = value;
self
}
pub const fn sps_pps_strategy(mut self, value: SpsPpsStrategy) -> Self {
self.sps_pps_strategy = value;
self
}
pub const fn max_slice_len(mut self, max_slice_len: u32) -> Self {
self.max_slice_len = Some(max_slice_len);
self
}
pub const fn profile(mut self, profile: Profile) -> Self {
self.profile = Some(profile);
self
}
pub const fn level(mut self, level: Level) -> Self {
self.level = Some(level);
self
}
pub const fn complexity(mut self, complexity: Complexity) -> Self {
self.complexity = complexity;
self
}
pub const fn qp(mut self, value: QpRange) -> Self {
self.qp = value;
self
}
pub const fn scene_change_detect(mut self, value: bool) -> Self {
self.scene_change_detect = value;
self
}
pub const fn adaptive_quantization(mut self, value: bool) -> Self {
self.adaptive_quantization = value;
self
}
pub const fn background_detection(mut self, value: bool) -> Self {
self.background_detection = value;
self
}
pub const fn long_term_reference(mut self, value: bool) -> Self {
self.long_term_reference = value;
self
}
pub const fn intra_frame_period(mut self, value: IntraFramePeriod) -> Self {
self.intra_frame_period = value;
self
}
pub const fn num_threads(mut self, threads: u16) -> Self {
self.multiple_thread_idc = threads;
self
}
pub const fn vui(mut self, config: VuiConfig) -> Self {
self.vui = Some(config);
self
}
}
pub struct Encoder {
config: EncoderConfig,
raw_api: EncoderRawAPI,
bit_stream_info: SFrameBSInfo,
previous_dimensions: Option<(i32, i32)>,
}
unsafe impl Send for Encoder {}
unsafe impl Sync for Encoder {}
impl Encoder {
#[cfg(feature = "source")]
pub fn new() -> Result<Self, Error> {
let api = OpenH264API::from_source();
let config = EncoderConfig::new();
let raw_api = EncoderRawAPI::new(api)?;
Ok(Self {
config,
raw_api,
bit_stream_info: SFrameBSInfo::default(),
previous_dimensions: None,
})
}
pub fn with_api_config(api: OpenH264API, config: EncoderConfig) -> Result<Self, Error> {
let raw_api = EncoderRawAPI::new(api)?;
Ok(Self {
config,
raw_api,
bit_stream_info: SFrameBSInfo::default(),
previous_dimensions: None,
})
}
pub fn encode<T: YUVSource>(&mut self, yuv_source: &T) -> Result<EncodedBitStream<'_>, Error> {
self.encode_at(yuv_source, Timestamp::ZERO)
}
pub fn encode_at<T: YUVSource>(&mut self, yuv_source: &T, timestamp: Timestamp) -> Result<EncodedBitStream<'_>, Error> {
let new_dimensions = yuv_source.dimensions_i32();
if self.previous_dimensions != Some(new_dimensions) {
self.reinit(new_dimensions.0, new_dimensions.1)?;
self.previous_dimensions = Some(new_dimensions);
}
let strides = yuv_source.strides_i32();
let source = SSourcePicture {
iColorFormat: videoFormatI420,
iStride: [strides.0, strides.1, strides.2, 0],
pData: [
yuv_source.y().as_ptr().cast_mut(),
yuv_source.u().as_ptr().cast_mut(),
yuv_source.v().as_ptr().cast_mut(),
null_mut(),
],
iPicWidth: new_dimensions.0,
iPicHeight: new_dimensions.1,
uiTimeStamp: timestamp.as_native(),
bPsnrY: false,
bPsnrU: false,
bPsnrV: false,
};
unsafe {
self.raw_api
.encode_frame(&raw const source, &raw mut self.bit_stream_info)
.ok()?;
}
Ok(EncodedBitStream {
bit_stream_info: &self.bit_stream_info,
})
}
#[rustfmt::skip]
fn reinit(&mut self, width: i32, height: i32) -> Result<(), Error> {
let greater_dim = std::cmp::max(width, height);
let smaller_dim = std::cmp::min(width, height);
if greater_dim > 3840 || smaller_dim > 2160 {
return Err(Error::msg("Encoder max resolution 3840x2160 horizontal or 2160x3840 vertical"));
}
let mut params = SEncParamExt::default();
unsafe { self.raw_api.get_default_params(&raw mut params).ok()? };
params.iPicWidth = width as c_int; params.iPicHeight = height as c_int; params.iRCMode = self.config.rate_control_mode.to_c();
params.bEnableFrameSkip = self.config.enable_skip_frame;
params.iTargetBitrate = self.config.target_bitrate.0.try_into()?;
params.bEnableDenoise = self.config.enable_denoise;
params.fMaxFrameRate = self.config.max_frame_rate.0;
params.eSpsPpsIdStrategy = self.config.sps_pps_strategy.to_c();
params.iMultipleThreadIdc = self.config.multiple_thread_idc;
params.iUsageType = self.config.usage_type.to_c();
params.bEnableSceneChangeDetect = self.config.scene_change_detect;
params.bEnableAdaptiveQuant = self.config.adaptive_quantization;
params.bEnableBackgroundDetection = self.config.background_detection;
params.bEnableLongTermReference = self.config.long_term_reference;
params.iComplexityMode = self.config.complexity.to_c();
params.uiIntraPeriod = self.config.intra_frame_period.0;
params.iLoopFilterDisableIdc = DEBLOCKING_IDC_0;
params.iMinQp = self.config.qp.min.into();
params.iMaxQp = self.config.qp.max.into();
if let Some(profile) = self.config.profile {
params.sSpatialLayers[0].uiProfileIdc = profile.to_c();
}
if let Some(level) = self.config.level {
params.sSpatialLayers[0].uiLevelIdc = level.to_c();
}
if let Some(ref vui) = self.config.vui {
params.sSpatialLayers[0].bVideoSignalTypePresent = true;
params.sSpatialLayers[0].bColorDescriptionPresent = true;
params.sSpatialLayers[0].bFullRange = vui.full_range;
params.sSpatialLayers[0].uiColorPrimaries = vui.color_primaries.as_u8();
params.sSpatialLayers[0].uiTransferCharacteristics = vui.transfer_characteristics.as_u8();
params.sSpatialLayers[0].uiColorMatrix = vui.matrix_coefficients.as_u8();
}
params.iSpatialLayerNum = 1;
params.iTemporalLayerNum = 1;
params.iLtrMarkPeriod = 30;
params.sSpatialLayers[0].iMaxSpatialBitrate = self.config.target_bitrate.0.try_into()?;
params.sSpatialLayers[0].iSpatialBitrate = self.config.target_bitrate.0.try_into()?;
params.sSpatialLayers[0].fFrameRate = self.config.max_frame_rate.0;
params.sSpatialLayers[0].iVideoWidth = width;
params.sSpatialLayers[0].iVideoHeight = height;
if let Some(max_slice_len) = self.config.max_slice_len {
params.uiMaxNalSize = max_slice_len;
params.sSpatialLayers[0].sSliceArgument.uiSliceMode = SM_SIZELIMITED_SLICE;
params.sSpatialLayers[0].sSliceArgument.uiSliceSizeConstraint = max_slice_len;
} else {
params.sSpatialLayers[0].sSliceArgument.uiSliceMode = SM_SINGLE_SLICE;
params.sSpatialLayers[0].sSliceArgument.uiSliceNum = 1;
}
unsafe {
if self.previous_dimensions.is_none() {
self.raw_api.initialize_ext(&raw const params).ok()?;
self.raw_api.set_option(ENCODER_OPTION_TRACE_LEVEL, addr_of_mut!(self.config.debug).cast()).ok()?;
self.raw_api.set_option(ENCODER_OPTION_DATAFORMAT, addr_of_mut!(self.config.data_format).cast()).ok()?;
} else {
self.raw_api.set_option(ENCODER_OPTION_SVC_ENCODE_PARAM_EXT, addr_of_mut!(params).cast()).ok()?;
self.force_intra_frame();
}
}
Ok(())
}
pub fn force_intra_frame(&mut self) {
unsafe {
self.raw_api.force_intra_frame(true);
}
}
pub const unsafe fn raw_api(&mut self) -> &mut EncoderRawAPI {
&mut self.raw_api
}
}
impl Drop for Encoder {
fn drop(&mut self) {
unsafe {
self.raw_api.uninitialize();
}
}
}
pub struct EncodedBitStream<'a> {
bit_stream_info: &'a SFrameBSInfo,
}
impl<'a> EncodedBitStream<'a> {
#[must_use]
pub const fn raw_info(&self) -> &'a SFrameBSInfo {
self.bit_stream_info
}
#[must_use]
pub const fn frame_type(&self) -> FrameType {
FrameType::from_c_int(self.bit_stream_info.eFrameType)
}
#[must_use]
pub const fn num_layers(&self) -> usize {
self.bit_stream_info.iLayerNum as usize
}
#[must_use]
pub const fn layer(&self, i: usize) -> Option<Layer<'a>> {
if i < self.num_layers() {
Some(Layer {
layer_info: &self.bit_stream_info.sLayerInfo[i],
})
} else {
None
}
}
#[allow(clippy::missing_panics_doc)]
pub fn write_vec(&self, dst: &mut Vec<u8>) {
for l in 0..self.num_layers() {
let layer = self.layer(l).unwrap();
for n in 0..layer.nal_count() {
let nal = layer.nal_unit(n).unwrap();
dst.extend_from_slice(nal);
}
}
}
#[allow(clippy::missing_panics_doc)]
pub fn write<T: std::io::Write>(&self, writer: &mut T) -> Result<(), Error> {
for l in 0..self.num_layers() {
let layer = self.layer(l).unwrap();
for n in 0..layer.nal_count() {
let nal = layer.nal_unit(n).unwrap();
match writer.write(nal) {
Ok(num) if num < nal.len() => {
return Err(Error::msg(&format!("only wrote {} out of {} bytes", num, nal.len())));
}
Err(e) => {
return Err(Error::msg(&format!("failed to write: {e}")));
}
_ => {}
}
}
}
Ok(())
}
#[must_use]
pub fn to_vec(&self) -> Vec<u8> {
let mut rval = Vec::new();
self.write_vec(&mut rval);
rval
}
}
#[derive(Debug)]
pub struct Layer<'a> {
layer_info: &'a SLayerBSInfo,
}
impl<'a> Layer<'a> {
#[must_use]
pub const fn raw_info(&self) -> &'a SLayerBSInfo {
self.layer_info
}
#[must_use]
pub const fn nal_count(&self) -> usize {
self.layer_info.iNalCount as usize
}
#[must_use]
pub fn nal_unit(&self, i: usize) -> Option<&[u8]> {
if i < self.nal_count() {
let mut offset = 0;
let slice = unsafe {
for nal_idx in 0..i {
let size = *self.layer_info.pNalLengthInByte.add(nal_idx) as usize;
offset += size;
}
let size = *self.layer_info.pNalLengthInByte.add(i) as usize;
std::slice::from_raw_parts(self.layer_info.pBsBuf.add(offset), size)
};
Some(slice)
} else {
None
}
}
#[must_use]
pub const fn is_video(&self) -> bool {
self.layer_info.uiLayerType == VIDEO_CODING_LAYER as c_uchar
}
}
#[derive(Debug, Ord, PartialOrd, Eq, PartialEq, Copy, Clone)]
pub enum FrameType {
Invalid,
IDR,
I,
P,
Skip,
IPMixed,
}
impl FrameType {
const fn from_c_int(native: std::os::raw::c_int) -> Self {
use openh264_sys2::{videoFrameTypeI, videoFrameTypeIDR, videoFrameTypeIPMixed, videoFrameTypeP, videoFrameTypeSkip};
#[allow(non_upper_case_globals)]
match native {
videoFrameTypeIDR => Self::IDR,
videoFrameTypeI => Self::I,
videoFrameTypeP => Self::P,
videoFrameTypeSkip => Self::Skip,
videoFrameTypeIPMixed => Self::IPMixed,
_ => Self::Invalid,
}
}
}