#![allow(unsafe_code)]
#![allow(unsafe_op_in_unsafe_fn)]
#![allow(clippy::ptr_as_ptr)]
#![allow(clippy::cast_possible_wrap)]
#![allow(clippy::cast_sign_loss)]
#![allow(clippy::borrow_as_ptr)]
#![allow(clippy::ref_as_ptr)]
use ff_sys::{
AVCodec, AVCodecContext, AVFormatContext, AVPixelFormat, AVPixelFormat_AV_PIX_FMT_NONE,
AVRational, AVSampleFormat, av_interleaved_write_frame, av_packet_alloc, av_packet_free,
av_packet_rescale_ts, av_packet_unref, av_rescale_q,
};
use ff_format::{PixelFormat, SampleFormat};
use crate::error::StreamError;
pub(crate) fn ffmpeg_err(code: i32) -> StreamError {
StreamError::Ffmpeg {
code,
message: ff_sys::av_error_string(code),
}
}
pub(crate) fn ffmpeg_err_msg(msg: &str) -> StreamError {
StreamError::Ffmpeg {
code: 0,
message: msg.to_owned(),
}
}
pub(crate) unsafe fn select_h264_encoder(log_prefix: &str) -> Option<*const AVCodec> {
let candidates = [
"h264_nvenc",
"h264_qsv",
"h264_amf",
"h264_videotoolbox",
"libx264",
"mpeg4",
];
for name in candidates {
if let Ok(c_name) = std::ffi::CString::new(name)
&& let Some(codec) = ff_sys::avcodec::find_encoder_by_name(c_name.as_ptr())
{
log::info!("{log_prefix} selected video encoder encoder={name}");
return Some(codec);
}
}
None
}
pub(crate) unsafe fn open_aac_encoder(
sample_rate: i32,
nb_channels: i32,
bit_rate: i64,
log_prefix: &str,
) -> Result<*mut AVCodecContext, StreamError> {
let codec = ff_sys::avcodec::find_encoder_by_name(c"aac".as_ptr())
.or_else(|| ff_sys::avcodec::find_encoder_by_name(c"libfdk_aac".as_ptr()))
.ok_or_else(|| ffmpeg_err_msg("no AAC encoder available (tried aac, libfdk_aac)"))?;
let mut ctx = ff_sys::avcodec::alloc_context3(codec).map_err(ffmpeg_err)?;
(*ctx).sample_rate = sample_rate;
(*ctx).sample_fmt = ff_sys::swresample::sample_format::FLTP;
(*ctx).bit_rate = bit_rate;
(*ctx).time_base.num = 1;
(*ctx).time_base.den = sample_rate;
ff_sys::swresample::channel_layout::set_default(&mut (*ctx).ch_layout, nb_channels);
ff_sys::avcodec::open2(ctx, codec, std::ptr::null_mut()).map_err(|e| {
ff_sys::avcodec::free_context(&mut ctx as *mut *mut _);
ffmpeg_err(e)
})?;
log::info!(
"{log_prefix} aac encoder opened \
sample_rate={sample_rate} channels={nb_channels} bit_rate={bit_rate}"
);
Ok(ctx)
}
pub(crate) fn pixel_format_to_av(fmt: PixelFormat) -> AVPixelFormat {
match fmt {
PixelFormat::Yuv420p => ff_sys::AVPixelFormat_AV_PIX_FMT_YUV420P,
PixelFormat::Yuv422p => ff_sys::AVPixelFormat_AV_PIX_FMT_YUV422P,
PixelFormat::Yuv444p => ff_sys::AVPixelFormat_AV_PIX_FMT_YUV444P,
PixelFormat::Rgb24 => ff_sys::AVPixelFormat_AV_PIX_FMT_RGB24,
PixelFormat::Bgr24 => ff_sys::AVPixelFormat_AV_PIX_FMT_BGR24,
PixelFormat::Rgba => ff_sys::AVPixelFormat_AV_PIX_FMT_RGBA,
PixelFormat::Bgra => ff_sys::AVPixelFormat_AV_PIX_FMT_BGRA,
PixelFormat::Nv12 => ff_sys::AVPixelFormat_AV_PIX_FMT_NV12,
PixelFormat::Nv21 => ff_sys::AVPixelFormat_AV_PIX_FMT_NV21,
PixelFormat::Yuv420p10le => ff_sys::AVPixelFormat_AV_PIX_FMT_YUV420P10LE,
PixelFormat::Yuv422p10le => ff_sys::AVPixelFormat_AV_PIX_FMT_YUV422P10LE,
PixelFormat::Yuv444p10le => ff_sys::AVPixelFormat_AV_PIX_FMT_YUV444P10LE,
PixelFormat::Yuva444p10le => ff_sys::AVPixelFormat_AV_PIX_FMT_YUVA444P10LE,
PixelFormat::P010le => ff_sys::AVPixelFormat_AV_PIX_FMT_P010LE,
PixelFormat::Gray8 => ff_sys::AVPixelFormat_AV_PIX_FMT_GRAY8,
PixelFormat::Gbrpf32le => ff_sys::AVPixelFormat_AV_PIX_FMT_GBRPF32LE,
PixelFormat::Other(_) | _ => AVPixelFormat_AV_PIX_FMT_NONE,
}
}
pub(crate) fn sample_format_to_av(fmt: SampleFormat) -> AVSampleFormat {
match fmt {
SampleFormat::U8 => ff_sys::swresample::sample_format::U8,
SampleFormat::I16 => ff_sys::swresample::sample_format::S16,
SampleFormat::I32 => ff_sys::swresample::sample_format::S32,
SampleFormat::F32 => ff_sys::swresample::sample_format::FLT,
SampleFormat::F64 => ff_sys::swresample::sample_format::DBL,
SampleFormat::U8p => ff_sys::swresample::sample_format::U8P,
SampleFormat::I16p => ff_sys::swresample::sample_format::S16P,
SampleFormat::I32p => ff_sys::swresample::sample_format::S32P,
SampleFormat::F32p => ff_sys::swresample::sample_format::FLTP,
SampleFormat::F64p => ff_sys::swresample::sample_format::DBLP,
SampleFormat::Other(_) | _ => ff_sys::swresample::sample_format::NONE,
}
}
pub(crate) unsafe fn drain_encoder(
enc_ctx: *mut AVCodecContext,
out_ctx: *mut AVFormatContext,
stream_idx: i32,
log_prefix: &str,
frame_period: AVRational,
) {
let mut pkt = av_packet_alloc();
if pkt.is_null() {
return;
}
let stream_tb = (*(*(*out_ctx).streams.add(stream_idx as usize))).time_base;
let enc_tb = (*enc_ctx).time_base;
let frame_dur_enc_tb = av_rescale_q(1, frame_period, enc_tb);
loop {
match ff_sys::avcodec::receive_packet(enc_ctx, pkt) {
Err(e) if e == ff_sys::error_codes::EAGAIN || e == ff_sys::error_codes::EOF => {
break;
}
Err(_) => break,
Ok(()) => {}
}
if frame_dur_enc_tb > 0 {
(*pkt).duration = frame_dur_enc_tb;
}
av_packet_rescale_ts(pkt, enc_tb, stream_tb);
(*pkt).stream_index = stream_idx;
let ret = av_interleaved_write_frame(out_ctx, pkt);
av_packet_unref(pkt);
if ret < 0 {
log::warn!(
"{log_prefix} av_interleaved_write_frame failed \
stream_index={stream_idx} error={}",
ff_sys::av_error_string(ret)
);
break;
}
}
av_packet_free(&mut pkt);
}