extern crate ffmpeg_next as ffmpeg;
#[cfg(feature = "ndarray")]
use ndarray::Array3;
use ffmpeg::codec::codec::Codec;
use ffmpeg::codec::context::Context;
use ffmpeg::encoder::video::Video;
use ffmpeg::format::context::Output;
use ffmpeg::util::frame::video::Video as Frame;
use ffmpeg::{Error, Rational};
#[cfg(feature = "ndarray")]
use ffmpeg::util::format::Pixel;
use ffmpeg::ffi::*;
pub fn output_raw(format: &str) -> Result<Output, Error> {
unsafe {
let mut output_ptr = std::ptr::null_mut();
let format = std::ffi::CString::new(format).unwrap();
match avformat_alloc_output_context2(
&mut output_ptr,
std::ptr::null_mut(),
format.as_ptr(),
std::ptr::null(),
) {
0 => Ok(Output::wrap(output_ptr)),
e => Err(Error::from(e)),
}
}
}
pub fn output_raw_buf_start(output: &mut Output) {
unsafe {
let mut p: *mut AVIOContext = std::ptr::null_mut();
match avio_open_dyn_buf((&mut p) as *mut *mut AVIOContext) {
0 => {
(*output.as_mut_ptr()).pb = p;
}
_ => {
panic!("Failed to open dynamic buffer for output context.");
}
}
}
}
pub fn output_raw_buf_end(output: &mut Output) -> Vec<u8> {
unsafe {
let output_pb = (*output.as_mut_ptr()).pb;
let mut buffer_raw: *mut u8 = std::ptr::null_mut();
let buffer_size = avio_close_dyn_buf(output_pb, (&mut buffer_raw) as *mut *mut u8) as usize;
((*output.as_mut_ptr()).pb) = std::ptr::null_mut::<AVIOContext>();
let buffer = std::slice::from_raw_parts(buffer_raw, buffer_size).to_vec();
av_free(buffer_raw as *mut std::ffi::c_void);
buffer
}
}
pub fn output_raw_packetized_buf_start(
output: &mut Output,
packet_buffer: &mut Vec<Vec<u8>>,
max_packet_size: usize,
) {
unsafe {
let buffer = av_malloc(max_packet_size) as *mut u8;
let io: *mut AVIOContext = avio_alloc_context(
buffer,
max_packet_size.try_into().unwrap(),
1,
packet_buffer as *mut Vec<Vec<u8>> as *mut std::ffi::c_void,
None,
#[allow(clippy::missing_transmute_annotations)]
Some(std::mem::transmute::<*const (), _>(
output_raw_buf_start_callback as _,
)),
None,
);
(*io).max_packet_size = max_packet_size.try_into().unwrap();
(*output.as_mut_ptr()).pb = io;
}
}
pub fn output_raw_packetized_buf_end(output: &mut Output) {
unsafe {
let output_pb = (*output.as_mut_ptr()).pb;
avio_flush(output_pb);
av_free((*output_pb).buffer as *mut std::ffi::c_void);
av_free(output_pb as *mut std::ffi::c_void);
((*output.as_mut_ptr()).pb) = std::ptr::null_mut::<AVIOContext>();
}
}
pub fn flush_output(output: &mut Output) -> Result<(), Error> {
unsafe {
match av_write_frame(output.as_mut_ptr(), std::ptr::null_mut()) {
0 => Ok(()),
1 => Ok(()),
e => Err(Error::from(e)),
}
}
}
pub fn codec_context_as(codec: &Codec) -> Result<Context, Error> {
unsafe {
let context_ptr = ffmpeg::ffi::avcodec_alloc_context3(codec.as_ptr());
if !context_ptr.is_null() {
Ok(Context::wrap(context_ptr, None))
} else {
Err(Error::Unknown)
}
}
}
pub fn set_decoder_context_time_base(decoder_context: &mut Context, time_base: Rational) {
unsafe {
(*decoder_context.as_mut_ptr()).time_base = time_base.into();
}
}
pub fn get_encoder_time_base(encoder: &Video) -> Rational {
unsafe { (*encoder.0.as_ptr()).time_base.into() }
}
pub fn copy_frame_props(src: &Frame, dst: &mut Frame) {
unsafe {
av_frame_copy_props(dst.as_mut_ptr(), src.as_ptr());
}
}
#[cfg(feature = "ndarray")]
pub type FrameArray = Array3<u8>;
#[cfg(feature = "ndarray")]
pub fn convert_ndarray_to_frame_rgb24(frame_array: &FrameArray) -> Result<Frame, Error> {
unsafe {
assert!(frame_array.is_standard_layout());
let (frame_height, frame_width, _) = frame_array.dim();
let mut frame_tmp = Frame::empty();
let frame_tmp_ptr = frame_tmp.as_mut_ptr();
let bytes_copied = av_image_fill_arrays(
(*frame_tmp_ptr).data.as_ptr() as *mut *mut u8,
(*frame_tmp_ptr).linesize.as_ptr() as *mut i32,
frame_array.as_ptr(),
AVPixelFormat::AV_PIX_FMT_RGB24,
frame_width as i32,
frame_height as i32,
1,
);
if bytes_copied != frame_array.len() as i32 {
return Err(Error::from(bytes_copied));
}
let mut frame = Frame::new(Pixel::RGB24, frame_width as u32, frame_height as u32);
let frame_ptr = frame.as_mut_ptr();
av_image_copy(
(*frame_ptr).data.as_ptr() as *mut *mut u8,
(*frame_ptr).linesize.as_ptr() as *mut i32,
(*frame_tmp_ptr).data.as_ptr() as *mut *const u8,
(*frame_tmp_ptr).linesize.as_ptr(),
AVPixelFormat::AV_PIX_FMT_RGB24,
frame_width as i32,
frame_height as i32,
);
Ok(frame)
}
}
#[cfg(feature = "ndarray")]
pub fn convert_frame_to_ndarray_rgb24(frame: &mut Frame) -> Result<FrameArray, Error> {
unsafe {
let frame_ptr = frame.as_mut_ptr();
let frame_width: i32 = (*frame_ptr).width;
let frame_height: i32 = (*frame_ptr).height;
let frame_format =
std::mem::transmute::<std::ffi::c_int, AVPixelFormat>((*frame_ptr).format);
assert_eq!(frame_format, AVPixelFormat::AV_PIX_FMT_RGB24);
let mut frame_array =
FrameArray::default((frame_height as usize, frame_width as usize, 3_usize));
let bytes_copied = av_image_copy_to_buffer(
frame_array.as_mut_ptr(),
frame_array.len() as i32,
(*frame_ptr).data.as_ptr() as *const *const u8,
(*frame_ptr).linesize.as_ptr(),
frame_format,
frame_width,
frame_height,
1,
);
if bytes_copied == frame_array.len() as i32 {
Ok(frame_array)
} else {
Err(Error::from(bytes_copied))
}
}
}
pub fn extradata(output: &Output, stream_index: usize) -> Result<&[u8], Error> {
let parameters = output
.stream(stream_index)
.map(|stream| stream.parameters())
.ok_or(Error::StreamNotFound)?;
Ok(unsafe {
std::slice::from_raw_parts(
(*parameters.as_ptr()).extradata,
(*parameters.as_ptr()).extradata_size as usize,
)
})
}
pub fn rtp_h264_mode_0(output: &Output) -> bool {
unsafe {
av_opt_flag_is_set(
(*output.as_ptr()).priv_data,
"rtpflags".as_ptr() as *const std::ffi::c_char,
"h264_mode0".as_ptr() as *const std::ffi::c_char,
) != 0
}
}
pub fn rtp_seq_and_timestamp(output: &Output) -> (u16, u32) {
unsafe {
let rtp_mux_context = &*((*output.as_ptr()).priv_data as *const RTPMuxContext);
(rtp_mux_context.seq, rtp_mux_context.timestamp)
}
}
pub fn sdp(output: &Output) -> Result<String, Error> {
const BUF_SIZE: i32 = 4096;
unsafe {
let mut buf: [std::ffi::c_char; BUF_SIZE as usize] = [0; BUF_SIZE as usize];
let buf_ptr = &mut buf as *mut std::ffi::c_char;
let mut output_format_context = output.as_ptr();
let output_format_context_ptr = &mut output_format_context as *mut *const AVFormatContext;
let output_format_context_ptr = output_format_context_ptr as *mut *mut AVFormatContext;
let ret = av_sdp_create(output_format_context_ptr, 1, buf_ptr, BUF_SIZE);
if ret == 0 {
let sdp_c_str = std::ffi::CStr::from_ptr(buf_ptr);
let sdp = sdp_c_str.to_string_lossy().to_string();
Ok(sdp)
} else {
Err(Error::from(ret))
}
}
}
pub fn init_logging() {
unsafe {
av_log_set_callback(Some(log_callback));
}
}
extern "C" fn output_raw_buf_start_callback(
opaque: *mut std::ffi::c_void,
buffer: *const u8,
buffer_size: i32,
) -> i32 {
unsafe {
let packet_buffer: &mut Vec<Vec<u8>> = &mut *(opaque as *mut Vec<Vec<u8>>);
packet_buffer.push(std::slice::from_raw_parts(buffer, buffer_size as usize).to_vec());
}
buffer_size
}
unsafe extern "C" fn log_callback(
avcl: *mut std::ffi::c_void,
level_no: std::ffi::c_int,
fmt: *const std::ffi::c_char,
#[cfg(all(target_arch = "x86_64", target_family = "unix"))] vl: *mut __va_list_tag,
#[cfg(not(all(target_arch = "x86_64", target_family = "unix")))] vl: va_list,
) {
let event_would_log = match level_no {
AV_LOG_PANIC | AV_LOG_FATAL | AV_LOG_ERROR => tracing::enabled!(tracing::Level::ERROR),
AV_LOG_WARNING => tracing::enabled!(tracing::Level::WARN),
AV_LOG_INFO => tracing::enabled!(tracing::Level::INFO),
AV_LOG_VERBOSE | AV_LOG_DEBUG => tracing::enabled!(tracing::Level::DEBUG),
AV_LOG_TRACE => tracing::enabled!(tracing::Level::TRACE),
_ => {
return;
}
};
if event_would_log {
let mut line = [0; 1024];
let mut print_prefix: std::ffi::c_int = 1;
let ret = av_log_format_line2(
avcl,
level_no,
fmt,
vl,
line.as_mut_ptr(),
(line.len()) as std::ffi::c_int,
(&mut print_prefix) as *mut std::ffi::c_int,
);
if ret > 0 {
if let Ok(line) = std::ffi::CStr::from_ptr(line.as_mut_ptr()).to_str() {
let line = line.trim();
if log_filter_hacks(line) {
match level_no {
AV_LOG_PANIC | AV_LOG_FATAL | AV_LOG_ERROR => {
tracing::error!(target: "video", "{}", line)
}
AV_LOG_WARNING => tracing::warn!(target: "video", "{}", line),
AV_LOG_INFO => tracing::info!(target: "video", "{}", line),
AV_LOG_VERBOSE | AV_LOG_DEBUG => {
tracing::debug!(target: "video", "{}", line)
}
AV_LOG_TRACE => tracing::trace!(target: "video", "{}", line),
_ => {}
};
}
}
}
}
}
fn log_filter_hacks(line: &str) -> bool {
const HACK_1_PELCO_NEEDLE_1: &str = "SEI type 5 size";
const HACK_1_PELCO_NEEDLE_2: &str = "truncated at";
if line.contains(HACK_1_PELCO_NEEDLE_1) && line.contains(HACK_1_PELCO_NEEDLE_2) {
return false;
}
true
}
#[repr(C)]
struct RTPMuxContext {
_av_class: *const AVClass,
_ic: *mut AVFormatContext,
_st: *mut AVStream,
pub payload_type: std::ffi::c_int,
pub ssrc: u32,
pub cname: *const std::ffi::c_char,
pub seq: u16,
pub timestamp: u32,
pub base_timestamp: u32,
pub cur_timestamp: u32,
pub max_payload_size: std::ffi::c_int,
}