pub use crate::encoder_config::{AV1EncoderConfig, AomUsage, BitstreamProfile, TileCodingMode};
use crate::common::AOMCodec;
use crate::ffi::*;
use std::mem::{self, MaybeUninit};
use std::ptr;
use av_data::frame::{Frame, FrameBufferConv, MediaKind};
use av_data::packet::Packet;
use av_data::pixel::formats::YUV420;
use av_data::pixel::Formaton;
#[derive(Clone, Debug, PartialEq)]
pub struct PSNR {
pub samples: [u32; 4],
pub sse: [u64; 4],
pub psnr: [f64; 4],
}
#[derive(Clone, Debug)]
pub enum AOMPacket {
Packet(Packet),
Stats(Vec<u8>),
MBStats(Vec<u8>),
PSNR(PSNR),
Custom(Vec<u8>),
}
fn to_buffer(buf: aom_fixed_buf_t) -> Vec<u8> {
let mut v: Vec<u8> = Vec::with_capacity(buf.sz);
unsafe {
ptr::copy_nonoverlapping(buf.buf as *const u8, v.as_mut_ptr(), buf.sz);
v.set_len(buf.sz);
}
v
}
impl AOMPacket {
fn new(pkt: aom_codec_cx_pkt) -> AOMPacket {
match pkt.kind {
aom_codec_cx_pkt_kind::AOM_CODEC_CX_FRAME_PKT => {
let f = unsafe { pkt.data.frame };
let mut p = Packet::with_capacity(f.sz);
unsafe {
ptr::copy_nonoverlapping(f.buf as *const u8, p.data.as_mut_ptr(), f.sz);
p.data.set_len(f.sz);
}
p.t.pts = Some(f.pts);
p.is_key = (f.flags & AOM_FRAME_IS_KEY) != 0;
AOMPacket::Packet(p)
}
aom_codec_cx_pkt_kind::AOM_CODEC_STATS_PKT => {
let b = to_buffer(unsafe { pkt.data.twopass_stats });
AOMPacket::Stats(b)
}
aom_codec_cx_pkt_kind::AOM_CODEC_FPMB_STATS_PKT => {
let b = to_buffer(unsafe { pkt.data.firstpass_mb_stats });
AOMPacket::MBStats(b)
}
aom_codec_cx_pkt_kind::AOM_CODEC_PSNR_PKT => {
let p = unsafe { pkt.data.psnr };
AOMPacket::PSNR(PSNR {
samples: p.samples,
sse: p.sse,
psnr: p.psnr,
})
}
aom_codec_cx_pkt_kind::AOM_CODEC_CUSTOM_PKT => {
let b = to_buffer(unsafe { pkt.data.raw });
AOMPacket::Custom(b)
}
_ => panic!("No packet defined"),
}
}
}
#[cfg(target_os = "windows")]
fn map_fmt_to_img(img: &mut aom_image, fmt: &Formaton) {
img.cp = fmt.get_primaries() as i32;
img.tc = fmt.get_xfer() as i32;
img.mc = fmt.get_matrix() as i32;
}
#[cfg(not(target_os = "windows"))]
fn map_fmt_to_img(img: &mut aom_image, fmt: &Formaton) {
img.cp = fmt.get_primaries() as u32;
img.tc = fmt.get_xfer() as u32;
img.mc = fmt.get_matrix() as u32;
}
fn map_formaton(img: &mut aom_image, fmt: &Formaton) {
if fmt == YUV420 {
img.fmt = aom_img_fmt::AOM_IMG_FMT_I420;
} else {
unimplemented!();
}
img.bit_depth = 8;
img.bps = 12;
img.x_chroma_shift = 1;
img.y_chroma_shift = 1;
map_fmt_to_img(img, fmt);
}
fn img_from_frame(frame: &Frame) -> aom_image {
let mut img: aom_image = unsafe { mem::zeroed() };
if let MediaKind::Video(ref v) = frame.kind {
map_formaton(&mut img, &v.format);
img.w = v.width as u32;
img.h = v.height as u32;
img.d_w = v.width as u32;
img.d_h = v.height as u32;
}
for i in 0..frame.buf.count() {
let s: &[u8] = frame.buf.as_slice(i).unwrap();
img.planes[i] = s.as_ptr() as *mut u8;
img.stride[i] = frame.buf.linesize(i).unwrap() as i32;
}
img
}
pub struct AV1Encoder {
pub(crate) ctx: aom_codec_ctx_t,
pub(crate) iter: aom_codec_iter_t,
}
unsafe impl Send for AV1Encoder {}
impl AV1Encoder {
pub fn new(cfg: &mut AV1EncoderConfig) -> Result<AV1Encoder, aom_codec_err_t::Type> {
let mut ctx = MaybeUninit::uninit();
let ret = unsafe {
aom_codec_enc_init_ver(
ctx.as_mut_ptr(),
aom_codec_av1_cx(),
cfg.cfg(),
0,
AOM_ENCODER_ABI_VERSION as i32,
)
};
match ret {
aom_codec_err_t::AOM_CODEC_OK => {
let ctx = unsafe { ctx.assume_init() };
let mut enc = AV1Encoder {
ctx,
iter: ptr::null(),
};
enc.control(aome_enc_control_id::AOME_SET_CPUUSED, 2)
.expect("Cannot set CPUUSED");
Ok(enc)
}
_ => Err(ret),
}
}
pub fn control(
&mut self,
id: aome_enc_control_id::Type,
val: i32,
) -> Result<(), aom_codec_err_t::Type> {
let ret = unsafe { aom_codec_control(&mut self.ctx, id as i32, val) };
match ret {
aom_codec_err_t::AOM_CODEC_OK => Ok(()),
_ => Err(ret),
}
}
pub fn encode(&mut self, frame: &Frame) -> Result<(), aom_codec_err_t::Type> {
let img = img_from_frame(frame);
let ret = unsafe { aom_codec_encode(&mut self.ctx, &img, frame.t.pts.unwrap(), 1, 0) };
self.iter = ptr::null();
match ret {
aom_codec_err_t::AOM_CODEC_OK => Ok(()),
_ => Err(ret),
}
}
pub fn flush(&mut self) -> Result<(), aom_codec_err_t::Type> {
let ret = unsafe { aom_codec_encode(&mut self.ctx, ptr::null_mut(), 0, 1, 0) };
self.iter = ptr::null();
match ret {
aom_codec_err_t::AOM_CODEC_OK => Ok(()),
_ => Err(ret),
}
}
pub fn get_packet(&mut self) -> Option<AOMPacket> {
let pkt = unsafe { aom_codec_get_cx_data(&mut self.ctx, &mut self.iter) };
if pkt.is_null() {
None
} else {
Some(AOMPacket::new(unsafe { *pkt }))
}
}
}
impl Drop for AV1Encoder {
fn drop(&mut self) {
unsafe { aom_codec_destroy(&mut self.ctx) };
}
}
impl AOMCodec for AV1Encoder {
fn get_context(&mut self) -> &mut aom_codec_ctx {
&mut self.ctx
}
}
#[cfg(feature = "codec-trait")]
mod encoder_trait {
use super::*;
use av_codec::encoder::*;
use av_codec::error::*;
use av_data::frame::ArcFrame;
use av_data::params::{CodecParams, MediaKind, VideoInfo};
use av_data::value::Value;
pub struct Des {
descr: Descr,
}
pub struct Enc {
cfg: AV1EncoderConfig,
enc: Option<AV1Encoder>,
}
impl Descriptor for Des {
type OutputEncoder = Enc;
fn create(&self) -> Self::OutputEncoder {
Enc {
cfg: AV1EncoderConfig::new().unwrap(),
enc: None,
}
}
fn describe(&self) -> &Descr {
&self.descr
}
}
impl Encoder for Enc {
fn configure(&mut self) -> Result<()> {
if self.enc.is_none() {
self.cfg
.get_encoder()
.map(|enc| {
self.enc = Some(enc);
})
.map_err(|_err| Error::ConfigurationIncomplete)
} else {
unimplemented!()
}
}
fn get_extradata(&self) -> Option<Vec<u8>> {
None
}
fn send_frame(&mut self, frame: &ArcFrame) -> Result<()> {
let enc = self.enc.as_mut().unwrap();
enc.encode(frame).map_err(|_| unimplemented!())
}
fn receive_packet(&mut self) -> Result<Packet> {
let enc = self.enc.as_mut().unwrap();
if let Some(p) = enc.get_packet() {
match p {
AOMPacket::Packet(pkt) => Ok(pkt),
_ => unimplemented!(),
}
} else {
Err(Error::MoreDataNeeded)
}
}
fn flush(&mut self) -> Result<()> {
let enc = self.enc.as_mut().unwrap();
enc.flush().map_err(|_| unimplemented!())
}
fn set_option<'a>(&mut self, key: &str, val: Value<'a>) -> Result<()> {
match (key, val) {
("w", Value::U64(v)) => self.cfg.g_w = v as u32,
("h", Value::U64(v)) => self.cfg.g_h = v as u32,
("qmin", Value::U64(v)) => self.cfg.rc_min_quantizer = v as u32,
("qmax", Value::U64(v)) => self.cfg.rc_max_quantizer = v as u32,
("timebase", Value::Pair(num, den)) => {
self.cfg.g_timebase.num = num as i32;
self.cfg.g_timebase.den = den as i32;
}
_ => unimplemented!(),
}
Ok(())
}
fn get_params(&self) -> Result<CodecParams> {
use std::sync::Arc;
Ok(CodecParams {
kind: Some(MediaKind::Video(VideoInfo {
height: self.cfg.g_h as usize,
width: self.cfg.g_w as usize,
format: Some(Arc::new(*YUV420)), })),
codec_id: Some("av1".to_owned()),
extradata: None,
bit_rate: 0, convergence_window: 0,
delay: 0,
})
}
fn set_params(&mut self, params: &CodecParams) -> Result<()> {
if let Some(MediaKind::Video(ref info)) = params.kind {
self.cfg.g_w = info.width as u32;
self.cfg.g_h = info.height as u32;
}
Ok(())
}
}
pub const AV1_DESCR: &Des = &Des {
descr: Descr {
codec: "av1",
name: "aom",
desc: "libaom AV1 encoder",
mime: "video/AV1",
},
};
}
#[cfg(feature = "codec-trait")]
pub use self::encoder_trait::AV1_DESCR;
#[cfg(test)]
pub(crate) mod tests {
use super::*;
#[test]
fn init() {
let mut c = AV1EncoderConfig::new().unwrap();
let mut e = c.get_encoder().unwrap();
println!("{}", e.error_to_str());
}
use av_data::rational::*;
use av_data::timeinfo::TimeInfo;
pub fn setup(w: u32, h: u32, t: &TimeInfo) -> AV1Encoder {
if (w % 2) != 0 || (h % 2) != 0 {
panic!("Invalid frame size: w: {} h: {}", w, h);
}
let mut cfg = AV1EncoderConfig::new()
.unwrap()
.width(w)
.height(h)
.timebase(t.timebase.unwrap())
.rc_min_quantizer(0)
.rc_min_quantizer(0)
.threads(4)
.pass(aom_enc_pass::AOM_RC_ONE_PASS)
.rc_end_usage(aom_rc_mode::AOM_CQ);
let mut enc = cfg.get_encoder().unwrap();
enc.control(aome_enc_control_id::AOME_SET_CQ_LEVEL, 4)
.unwrap();
enc.control(aome_enc_control_id::AOME_SET_CPUUSED, 2)
.unwrap();
enc
}
pub fn setup_frame(w: u32, h: u32, t: &TimeInfo) -> Frame {
use av_data::frame::*;
use av_data::pixel::formats;
use std::sync::Arc;
let v = VideoInfo::new(
w as usize,
h as usize,
false,
FrameType::OTHER,
Arc::new(*formats::YUV420),
);
Frame::new_default_frame(v, Some(t.clone()))
}
#[test]
fn encode() {
let w = 200;
let h = 200;
let t = TimeInfo {
pts: Some(0),
dts: Some(0),
duration: Some(1),
timebase: Some(Rational64::new(1, 1000)),
user_private: None,
};
let mut e = setup(w, h, &t);
let mut f = setup_frame(w, h, &t);
let mut out = 0;
for i in 0..100 {
e.encode(&f).unwrap();
f.t.pts = Some(i);
loop {
let p = e.get_packet();
if p.is_none() {
break;
} else {
out = 1;
}
}
}
if out != 1 {
panic!("No packet produced");
}
}
#[cfg(all(test, feature = "codec-trait"))]
#[test]
fn encode_codec_trait() {
use super::AV1_DESCR;
use av_codec::common::CodecList;
use av_codec::encoder::*;
use av_codec::error::*;
use std::sync::Arc;
let encoders = Codecs::from_list(&[AV1_DESCR]);
let mut ctx = Context::by_name(&encoders, "av1").unwrap();
let w = 200;
let h = 200;
ctx.set_option("w", u64::from(w)).unwrap();
ctx.set_option("h", u64::from(h)).unwrap();
ctx.set_option("timebase", (1, 1000)).unwrap();
ctx.set_option("qmin", 0u64).unwrap();
ctx.set_option("qmax", 0u64).unwrap();
let t = TimeInfo {
pts: Some(0),
dts: Some(0),
duration: Some(1),
timebase: Some(Rational64::new(1, 1000)),
user_private: None,
};
ctx.configure().unwrap();
let mut f = Arc::new(setup_frame(w, h, &t));
let mut out = 0;
for i in 0..100 {
Arc::get_mut(&mut f).unwrap().t.pts = Some(i);
println!("Sending {}", i);
ctx.send_frame(&f).unwrap();
loop {
match ctx.receive_packet() {
Ok(p) => {
println!("{:#?}", p);
out = 1
}
Err(e) => match e {
Error::MoreDataNeeded => break,
_ => unimplemented!(),
},
}
}
}
ctx.flush().unwrap();
loop {
match ctx.receive_packet() {
Ok(p) => {
println!("{:#?}", p);
out = 1
}
Err(e) => match e {
Error::MoreDataNeeded => break,
_ => unimplemented!(),
},
}
}
if out != 1 {
panic!("No packet produced");
}
}
}