cros_codecs/decoder/stateless/h264/
vaapi.rs

1// Copyright 2022 The ChromiumOS Authors
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5use std::rc::Rc;
6
7use anyhow::anyhow;
8use anyhow::Context as AnyhowContext;
9use libva::BufferType;
10use libva::Display;
11use libva::IQMatrix;
12use libva::IQMatrixBufferH264;
13use libva::Picture as VaPicture;
14use libva::PictureParameter;
15use libva::PictureParameterBufferH264;
16use libva::SliceParameter;
17use libva::SurfaceMemoryDescriptor;
18
19use crate::backend::vaapi::decoder::va_surface_id;
20use crate::backend::vaapi::decoder::DecodedHandle as VADecodedHandle;
21use crate::backend::vaapi::decoder::PoolCreationMode;
22use crate::backend::vaapi::decoder::VaStreamInfo;
23use crate::backend::vaapi::decoder::VaapiBackend;
24use crate::backend::vaapi::decoder::VaapiPicture;
25use crate::codec::h264::dpb::Dpb;
26use crate::codec::h264::dpb::DpbEntry;
27use crate::codec::h264::parser::Level;
28use crate::codec::h264::parser::Pps;
29use crate::codec::h264::parser::Profile;
30use crate::codec::h264::parser::Slice;
31use crate::codec::h264::parser::SliceHeader;
32use crate::codec::h264::parser::Sps;
33use crate::codec::h264::picture::Field;
34use crate::codec::h264::picture::PictureData;
35use crate::codec::h264::picture::Reference;
36use crate::decoder::stateless::h264::StatelessH264DecoderBackend;
37use crate::decoder::stateless::h264::H264;
38use crate::decoder::stateless::NewPictureError;
39use crate::decoder::stateless::NewPictureResult;
40use crate::decoder::stateless::NewStatelessDecoderError;
41use crate::decoder::stateless::StatelessBackendResult;
42use crate::decoder::stateless::StatelessDecoder;
43use crate::decoder::stateless::StatelessDecoderBackendPicture;
44use crate::decoder::BlockingMode;
45
46impl VaStreamInfo for &Rc<Sps> {
47    fn va_profile(&self) -> anyhow::Result<i32> {
48        let profile_idc = self.profile_idc;
49        let profile = Profile::n(profile_idc)
50            .with_context(|| format!("Invalid profile_idc {:?}", profile_idc))?;
51
52        match profile {
53            Profile::Baseline => {
54                if self.constraint_set0_flag {
55                    Ok(libva::VAProfile::VAProfileH264ConstrainedBaseline)
56                } else {
57                    Err(anyhow!(
58                        "Unsupported stream: profile_idc=66, but constraint_set0_flag is unset"
59                    ))
60                }
61            }
62            Profile::Main => Ok(libva::VAProfile::VAProfileH264Main),
63            Profile::Extended => {
64                if self.constraint_set1_flag {
65                    Ok(libva::VAProfile::VAProfileH264Main)
66                } else {
67                    Err(anyhow!(
68                        "Unsupported stream: profile_idc=88, but constraint_set1_flag is unset"
69                    ))
70                }
71            }
72            Profile::High | Profile::High422P | Profile::High10 => {
73                Ok(libva::VAProfile::VAProfileH264High)
74            }
75        }
76    }
77
78    fn rt_format(&self) -> anyhow::Result<u32> {
79        let bit_depth_luma = self.bit_depth_chroma_minus8 + 8;
80        let chroma_format_idc = self.chroma_format_idc;
81
82        match (bit_depth_luma, chroma_format_idc) {
83            (8, 0) | (8, 1) => Ok(libva::constants::VA_RT_FORMAT_YUV420),
84            (8, 2) => Ok(libva::constants::VA_RT_FORMAT_YUV422),
85            (8, 3) => Ok(libva::constants::VA_RT_FORMAT_YUV444),
86            (10, 0) | (10, 1) => Ok(libva::constants::VA_RT_FORMAT_YUV420_10),
87            (10, 2) => Ok(libva::constants::VA_RT_FORMAT_YUV422_10),
88            (10, 3) => Ok(libva::constants::VA_RT_FORMAT_YUV444_10),
89            (12, 0) | (12, 1) => Ok(libva::constants::VA_RT_FORMAT_YUV420_12),
90            (12, 2) => Ok(libva::constants::VA_RT_FORMAT_YUV422_12),
91            (12, 3) => Ok(libva::constants::VA_RT_FORMAT_YUV444_12),
92            _ => Err(anyhow!(
93                "unsupported bit depth/chroma format pair {}, {}",
94                bit_depth_luma,
95                chroma_format_idc
96            )),
97        }
98    }
99
100    fn min_num_surfaces(&self) -> usize {
101        self.max_dpb_frames() + 4
102    }
103
104    fn coded_size(&self) -> (u32, u32) {
105        (self.width(), self.height())
106    }
107
108    fn visible_rect(&self) -> ((u32, u32), (u32, u32)) {
109        let rect = self.visible_rectangle();
110
111        ((rect.min.x, rect.min.y), (rect.max.x, rect.max.y))
112    }
113}
114
115/// Fills the internal `va_pic` picture parameter with data from `h264_pic`
116fn fill_va_h264_pic(
117    h264_pic: &PictureData,
118    surface_id: libva::VASurfaceID,
119    merge_other_field: bool,
120) -> libva::PictureH264 {
121    let mut flags = 0;
122    let frame_idx = if matches!(h264_pic.reference(), Reference::LongTerm) {
123        flags |= libva::constants::VA_PICTURE_H264_LONG_TERM_REFERENCE;
124        h264_pic.long_term_frame_idx
125    } else {
126        if matches!(h264_pic.reference(), Reference::ShortTerm { .. }) {
127            flags |= libva::constants::VA_PICTURE_H264_SHORT_TERM_REFERENCE;
128        }
129
130        h264_pic.frame_num
131    };
132
133    let top_field_order_cnt;
134    let bottom_field_order_cnt;
135
136    match h264_pic.field {
137        Field::Frame => {
138            top_field_order_cnt = h264_pic.top_field_order_cnt;
139            bottom_field_order_cnt = h264_pic.bottom_field_order_cnt;
140        }
141        Field::Top => {
142            match (merge_other_field, h264_pic.other_field()) {
143                (true, Some(other_field)) => {
144                    bottom_field_order_cnt = other_field.borrow().bottom_field_order_cnt
145                }
146                (_, _) => {
147                    flags |= libva::constants::VA_PICTURE_H264_TOP_FIELD;
148                    bottom_field_order_cnt = 0;
149                }
150            }
151
152            top_field_order_cnt = h264_pic.top_field_order_cnt;
153        }
154        Field::Bottom => {
155            match (merge_other_field, h264_pic.other_field()) {
156                (true, Some(other_field)) => {
157                    top_field_order_cnt = other_field.borrow().top_field_order_cnt
158                }
159                (_, _) => {
160                    flags |= libva::constants::VA_PICTURE_H264_BOTTOM_FIELD;
161                    top_field_order_cnt = 0;
162                }
163            }
164
165            bottom_field_order_cnt = h264_pic.bottom_field_order_cnt;
166        }
167    }
168
169    libva::PictureH264::new(
170        surface_id,
171        frame_idx,
172        flags,
173        top_field_order_cnt,
174        bottom_field_order_cnt,
175    )
176}
177
178/// Builds an invalid VaPictureH264. These pictures are used to fill empty
179/// array slots there is no data to fill them with.
180fn build_invalid_va_h264_pic() -> libva::PictureH264 {
181    libva::PictureH264::new(
182        libva::constants::VA_INVALID_ID,
183        0,
184        libva::constants::VA_PICTURE_H264_INVALID,
185        0,
186        0,
187    )
188}
189
190fn build_iq_matrix(pps: &Pps) -> BufferType {
191    let mut scaling_list4x4 = [[0; 16]; 6];
192    let mut scaling_list8x8 = [[0; 64]; 2];
193
194    (0..6).for_each(|i| {
195        super::get_raster_from_zigzag_4x4(pps.scaling_lists_4x4[i], &mut scaling_list4x4[i]);
196    });
197
198    (0..2).for_each(|i| {
199        super::get_raster_from_zigzag_8x8(pps.scaling_lists_8x8[i], &mut scaling_list8x8[i]);
200    });
201
202    BufferType::IQMatrix(IQMatrix::H264(IQMatrixBufferH264::new(
203        scaling_list4x4,
204        scaling_list8x8,
205    )))
206}
207
208fn build_pic_param<M: SurfaceMemoryDescriptor>(
209    hdr: &SliceHeader,
210    current_picture: &PictureData,
211    current_surface_id: libva::VASurfaceID,
212    dpb: &Dpb<VADecodedHandle<M>>,
213    sps: &Sps,
214    pps: &Pps,
215) -> anyhow::Result<BufferType> {
216    let curr_pic = fill_va_h264_pic(current_picture, current_surface_id, false);
217
218    let mut refs: Vec<_> = dpb
219        .short_term_refs_iter()
220        .filter(|handle| {
221            let pic = handle.pic.borrow();
222            !pic.nonexisting && !pic.is_second_field()
223        })
224        .cloned()
225        .collect();
226
227    let mut va_refs = vec![];
228
229    for handle in &refs {
230        let surface_id = va_surface_id(&handle.handle);
231        let ref_pic = handle.pic.borrow();
232        let pic = fill_va_h264_pic(&ref_pic, surface_id, true);
233        va_refs.push(pic);
234    }
235
236    refs.clear();
237
238    let mut refs: Vec<_> = dpb
239        .long_term_refs_iter()
240        .filter(|handle| {
241            let pic = handle.pic.borrow();
242            !pic.is_second_field()
243        })
244        .cloned()
245        .collect();
246
247    for handle in &refs {
248        let surface_id = va_surface_id(&handle.handle);
249        let ref_pic = handle.pic.borrow();
250        let pic = fill_va_h264_pic(&ref_pic, surface_id, true);
251        va_refs.push(pic);
252    }
253
254    for _ in va_refs.len()..16 {
255        va_refs.push(build_invalid_va_h264_pic());
256    }
257
258    refs.clear();
259
260    let seq_fields = libva::H264SeqFields::new(
261        sps.chroma_format_idc as u32,
262        sps.separate_colour_plane_flag as u32,
263        sps.gaps_in_frame_num_value_allowed_flag as u32,
264        sps.frame_mbs_only_flag as u32,
265        sps.mb_adaptive_frame_field_flag as u32,
266        sps.direct_8x8_inference_flag as u32,
267        (sps.level_idc >= Level::L3_1) as u32, /* see A.3.3.2 */
268        sps.log2_max_frame_num_minus4 as u32,
269        sps.pic_order_cnt_type as u32,
270        sps.log2_max_pic_order_cnt_lsb_minus4 as u32,
271        sps.delta_pic_order_always_zero_flag as u32,
272    );
273    let interlaced = !sps.frame_mbs_only_flag as u32;
274    let picture_height_in_mbs_minus1 = ((sps.pic_height_in_map_units_minus1 + 1) << interlaced) - 1;
275
276    let pic_fields = libva::H264PicFields::new(
277        pps.entropy_coding_mode_flag as u32,
278        pps.weighted_pred_flag as u32,
279        pps.weighted_bipred_idc as u32,
280        pps.transform_8x8_mode_flag as u32,
281        hdr.field_pic_flag as u32,
282        pps.constrained_intra_pred_flag as u32,
283        pps.bottom_field_pic_order_in_frame_present_flag as u32,
284        pps.deblocking_filter_control_present_flag as u32,
285        pps.redundant_pic_cnt_present_flag as u32,
286        (current_picture.nal_ref_idc != 0) as u32,
287    );
288
289    let va_refs = va_refs.try_into();
290    let va_refs = match va_refs {
291        Ok(va_refs) => va_refs,
292        Err(_) => {
293            panic!("Bug: wrong number of references, expected 16");
294        }
295    };
296
297    let pic_param = PictureParameterBufferH264::new(
298        curr_pic,
299        va_refs,
300        sps.pic_width_in_mbs_minus1,
301        picture_height_in_mbs_minus1,
302        sps.bit_depth_luma_minus8,
303        sps.bit_depth_chroma_minus8,
304        sps.max_num_ref_frames,
305        &seq_fields,
306        0, /* FMO not supported by VA */
307        0, /* FMO not supported by VA */
308        0, /* FMO not supported by VA */
309        pps.pic_init_qp_minus26,
310        pps.pic_init_qs_minus26,
311        pps.chroma_qp_index_offset,
312        pps.second_chroma_qp_index_offset,
313        &pic_fields,
314        hdr.frame_num,
315    );
316
317    Ok(BufferType::PictureParameter(PictureParameter::H264(
318        pic_param,
319    )))
320}
321
322fn fill_ref_pic_list<M: SurfaceMemoryDescriptor>(
323    ref_list_x: &[&DpbEntry<VADecodedHandle<M>>],
324) -> [libva::PictureH264; 32] {
325    let mut va_pics = vec![];
326
327    for handle in ref_list_x {
328        let surface_id = va_surface_id(&handle.handle);
329        let ref_pic = handle.pic.borrow();
330        let merge = matches!(ref_pic.field, Field::Frame);
331        let va_pic = fill_va_h264_pic(&ref_pic, surface_id, merge);
332
333        va_pics.push(va_pic);
334    }
335
336    for _ in va_pics.len()..32 {
337        va_pics.push(build_invalid_va_h264_pic());
338    }
339
340    let va_pics: [libva::PictureH264; 32] = match va_pics.try_into() {
341        Ok(va_pics) => va_pics,
342        Err(e) => panic!(
343            "Bug: wrong number of references, expected 32, got {:?}",
344            e.len()
345        ),
346    };
347
348    va_pics
349}
350
351fn build_slice_param<M: SurfaceMemoryDescriptor>(
352    hdr: &SliceHeader,
353    slice_size: usize,
354    ref_list_0: &[&DpbEntry<VADecodedHandle<M>>],
355    ref_list_1: &[&DpbEntry<VADecodedHandle<M>>],
356    sps: &Sps,
357    pps: &Pps,
358) -> anyhow::Result<BufferType> {
359    let ref_list_0 = fill_ref_pic_list(ref_list_0);
360    let ref_list_1 = fill_ref_pic_list(ref_list_1);
361    let pwt = &hdr.pred_weight_table;
362
363    let mut luma_weight_l0_flag = false;
364    let mut chroma_weight_l0_flag = false;
365    let mut luma_weight_l0 = [0i16; 32];
366    let mut luma_offset_l0 = [0i16; 32];
367    let mut chroma_weight_l0: [[i16; 2]; 32] = [[0i16; 2]; 32];
368    let mut chroma_offset_l0: [[i16; 2]; 32] = [[0i16; 2]; 32];
369
370    let mut luma_weight_l1_flag = false;
371    let mut chroma_weight_l1_flag = false;
372    let mut luma_weight_l1 = [0i16; 32];
373    let mut luma_offset_l1 = [0i16; 32];
374    let mut chroma_weight_l1: [[i16; 2]; 32] = [[0i16; 2]; 32];
375    let mut chroma_offset_l1: [[i16; 2]; 32] = [[0i16; 2]; 32];
376
377    let mut fill_l0 = false;
378    let mut fill_l1 = false;
379
380    if pps.weighted_pred_flag && (hdr.slice_type.is_p() || hdr.slice_type.is_sp()) {
381        fill_l0 = true;
382    } else if pps.weighted_bipred_idc == 1 && hdr.slice_type.is_b() {
383        fill_l0 = true;
384        fill_l1 = true;
385    }
386
387    if fill_l0 {
388        luma_weight_l0_flag = true;
389
390        for i in 0..=hdr.num_ref_idx_l0_active_minus1 as usize {
391            luma_weight_l0[i] = pwt.luma_weight_l0[i];
392            luma_offset_l0[i] = i16::from(pwt.luma_offset_l0[i]);
393        }
394
395        chroma_weight_l0_flag = sps.chroma_array_type() != 0;
396        if chroma_weight_l0_flag {
397            for i in 0..=hdr.num_ref_idx_l0_active_minus1 as usize {
398                for j in 0..2 {
399                    chroma_weight_l0[i][j] = pwt.chroma_weight_l0[i][j];
400                    chroma_offset_l0[i][j] = i16::from(pwt.chroma_offset_l0[i][j]);
401                }
402            }
403        }
404    }
405
406    if fill_l1 {
407        luma_weight_l1_flag = true;
408
409        luma_weight_l1[..(hdr.num_ref_idx_l1_active_minus1 as usize + 1)].clone_from_slice(
410            &pwt.luma_weight_l1[..(hdr.num_ref_idx_l1_active_minus1 as usize + 1)],
411        );
412        luma_offset_l1[..(hdr.num_ref_idx_l1_active_minus1 as usize + 1)].clone_from_slice(
413            &pwt.luma_offset_l1[..(hdr.num_ref_idx_l1_active_minus1 as usize + 1)],
414        );
415
416        chroma_weight_l1_flag = sps.chroma_array_type() != 0;
417        if chroma_weight_l1_flag {
418            for i in 0..=hdr.num_ref_idx_l1_active_minus1 as usize {
419                for j in 0..2 {
420                    chroma_weight_l1[i][j] = pwt.chroma_weight_l1[i][j];
421                    chroma_offset_l1[i][j] = i16::from(pwt.chroma_offset_l1[i][j]);
422                }
423            }
424        }
425    }
426
427    let slice_param = libva::SliceParameterBufferH264::new(
428        slice_size as u32,
429        0,
430        libva::constants::VA_SLICE_DATA_FLAG_ALL,
431        hdr.header_bit_size as u16,
432        hdr.first_mb_in_slice as u16,
433        hdr.slice_type as u8,
434        hdr.direct_spatial_mv_pred_flag as u8,
435        hdr.num_ref_idx_l0_active_minus1,
436        hdr.num_ref_idx_l1_active_minus1,
437        hdr.cabac_init_idc,
438        hdr.slice_qp_delta,
439        hdr.disable_deblocking_filter_idc,
440        hdr.slice_alpha_c0_offset_div2,
441        hdr.slice_beta_offset_div2,
442        ref_list_0,
443        ref_list_1,
444        pwt.luma_log2_weight_denom,
445        pwt.chroma_log2_weight_denom,
446        luma_weight_l0_flag as u8,
447        luma_weight_l0,
448        luma_offset_l0,
449        chroma_weight_l0_flag as u8,
450        chroma_weight_l0,
451        chroma_offset_l0,
452        luma_weight_l1_flag as u8,
453        luma_weight_l1,
454        luma_offset_l1,
455        chroma_weight_l1_flag as u8,
456        chroma_weight_l1,
457        chroma_offset_l1,
458    );
459
460    Ok(BufferType::SliceParameter(SliceParameter::H264(
461        slice_param,
462    )))
463}
464
465impl<M: SurfaceMemoryDescriptor + 'static> StatelessDecoderBackendPicture<H264>
466    for VaapiBackend<M>
467{
468    type Picture = VaapiPicture<M>;
469}
470
471impl<M: SurfaceMemoryDescriptor + 'static> StatelessH264DecoderBackend for VaapiBackend<M> {
472    fn new_sequence(&mut self, sps: &Rc<Sps>) -> StatelessBackendResult<()> {
473        self.new_sequence(sps, PoolCreationMode::Highest)
474    }
475
476    fn start_picture(
477        &mut self,
478        picture: &mut Self::Picture,
479        picture_data: &PictureData,
480        sps: &Sps,
481        pps: &Pps,
482        dpb: &Dpb<Self::Handle>,
483        hdr: &SliceHeader,
484    ) -> StatelessBackendResult<()> {
485        let metadata = self.metadata_state.get_parsed()?;
486        let context = &metadata.context;
487
488        let surface_id = picture.surface().id();
489
490        let pic_param = build_pic_param(hdr, picture_data, surface_id, dpb, sps, pps)?;
491        let pic_param = context
492            .create_buffer(pic_param)
493            .context("while creating picture parameter buffer")?;
494
495        let iq_matrix = build_iq_matrix(pps);
496        let iq_matrix = context
497            .create_buffer(iq_matrix)
498            .context("while creating IQ matrix buffer")?;
499
500        picture.add_buffer(pic_param);
501        picture.add_buffer(iq_matrix);
502
503        Ok(())
504    }
505
506    fn decode_slice(
507        &mut self,
508        picture: &mut Self::Picture,
509        slice: &Slice,
510        sps: &Sps,
511        pps: &Pps,
512        ref_pic_list0: &[&DpbEntry<Self::Handle>],
513        ref_pic_list1: &[&DpbEntry<Self::Handle>],
514    ) -> StatelessBackendResult<()> {
515        let metadata = self.metadata_state.get_parsed()?;
516        let context = &metadata.context;
517
518        let slice_param = context
519            .create_buffer(build_slice_param(
520                &slice.header,
521                slice.nalu.size,
522                ref_pic_list0,
523                ref_pic_list1,
524                sps,
525                pps,
526            )?)
527            .context("while creating slice params buffer")?;
528
529        picture.add_buffer(slice_param);
530
531        let slice_data = context
532            .create_buffer(BufferType::SliceData(Vec::from(slice.nalu.as_ref())))
533            .context("while creating slice data buffer")?;
534
535        picture.add_buffer(slice_data);
536
537        Ok(())
538    }
539
540    fn submit_picture(&mut self, picture: Self::Picture) -> StatelessBackendResult<Self::Handle> {
541        self.process_picture::<H264>(picture)
542    }
543
544    fn new_picture(&mut self, timestamp: u64) -> NewPictureResult<Self::Picture> {
545        let highest_pool = self.highest_pool();
546        let surface = highest_pool
547            .get_surface()
548            .ok_or(NewPictureError::OutOfOutputBuffers)?;
549
550        let metadata = self.metadata_state.get_parsed()?;
551
552        Ok(VaPicture::new(
553            timestamp,
554            Rc::clone(&metadata.context),
555            surface,
556        ))
557    }
558
559    fn new_field_picture(
560        &mut self,
561        timestamp: u64,
562        first_field: &Self::Handle,
563    ) -> NewPictureResult<Self::Picture> {
564        // Decode to the same surface as the first field picture.
565        Ok(first_field
566            .borrow()
567            .new_picture_from_same_surface(timestamp))
568    }
569}
570
571impl<M: SurfaceMemoryDescriptor + 'static> StatelessDecoder<H264, VaapiBackend<M>> {
572    // Creates a new instance of the decoder using the VAAPI backend.
573    pub fn new_vaapi<S>(
574        display: Rc<Display>,
575        blocking_mode: BlockingMode,
576    ) -> Result<Self, NewStatelessDecoderError>
577    where
578        M: From<S>,
579        S: From<M>,
580    {
581        Self::new(VaapiBackend::new(display, false), blocking_mode)
582    }
583}
584
585#[cfg(test)]
586mod tests {
587    use libva::Display;
588
589    use crate::codec::h264::parser::Nalu;
590    use crate::decoder::stateless::h264::H264;
591    use crate::decoder::stateless::tests::test_decode_stream;
592    use crate::decoder::stateless::tests::TestStream;
593    use crate::decoder::stateless::StatelessDecoder;
594    use crate::decoder::BlockingMode;
595    use crate::utils::simple_playback_loop;
596    use crate::utils::simple_playback_loop_owned_frames;
597    use crate::utils::NalIterator;
598    use crate::DecodedFormat;
599
600    /// Run `test` using the vaapi decoder, in both blocking and non-blocking modes.
601    fn test_decoder_vaapi(
602        test: &TestStream,
603        output_format: DecodedFormat,
604        blocking_mode: BlockingMode,
605    ) {
606        let display = Display::open().unwrap();
607        let decoder = StatelessDecoder::<H264, _>::new_vaapi::<()>(display, blocking_mode).unwrap();
608
609        test_decode_stream(
610            |d, s, f| {
611                simple_playback_loop(
612                    d,
613                    NalIterator::<Nalu>::new(s),
614                    f,
615                    &mut simple_playback_loop_owned_frames,
616                    output_format,
617                    blocking_mode,
618                )
619            },
620            decoder,
621            test,
622            true,
623            false,
624        );
625    }
626
627    #[test]
628    // Ignore this test by default as it requires libva-compatible hardware.
629    #[ignore]
630    fn test_64x64_progressive_i_block() {
631        use crate::decoder::stateless::h264::tests::DECODE_64X64_PROGRESSIVE_I;
632        test_decoder_vaapi(
633            &DECODE_64X64_PROGRESSIVE_I,
634            DecodedFormat::NV12,
635            BlockingMode::Blocking,
636        );
637    }
638
639    #[test]
640    // Ignore this test by default as it requires libva-compatible hardware.
641    #[ignore]
642    fn test_64x64_progressive_i_nonblock() {
643        use crate::decoder::stateless::h264::tests::DECODE_64X64_PROGRESSIVE_I;
644        test_decoder_vaapi(
645            &DECODE_64X64_PROGRESSIVE_I,
646            DecodedFormat::NV12,
647            BlockingMode::NonBlocking,
648        );
649    }
650
651    #[test]
652    // Ignore this test by default as it requires libva-compatible hardware.
653    #[ignore]
654    fn test_64x64_progressive_i_p_block() {
655        use crate::decoder::stateless::h264::tests::DECODE_64X64_PROGRESSIVE_I_P;
656        test_decoder_vaapi(
657            &DECODE_64X64_PROGRESSIVE_I_P,
658            DecodedFormat::NV12,
659            BlockingMode::Blocking,
660        );
661    }
662
663    #[test]
664    // Ignore this test by default as it requires libva-compatible hardware.
665    #[ignore]
666    fn test_64x64_progressive_i_p_nonblock() {
667        use crate::decoder::stateless::h264::tests::DECODE_64X64_PROGRESSIVE_I_P;
668        test_decoder_vaapi(
669            &DECODE_64X64_PROGRESSIVE_I_P,
670            DecodedFormat::NV12,
671            BlockingMode::NonBlocking,
672        );
673    }
674
675    #[test]
676    // Ignore this test by default as it requires libva-compatible hardware.
677    #[ignore]
678    fn test_64x64_progressive_i_p_b_p_block() {
679        use crate::decoder::stateless::h264::tests::DECODE_64X64_PROGRESSIVE_I_P_B_P;
680        test_decoder_vaapi(
681            &DECODE_64X64_PROGRESSIVE_I_P_B_P,
682            DecodedFormat::NV12,
683            BlockingMode::Blocking,
684        );
685    }
686
687    #[test]
688    // Ignore this test by default as it requires libva-compatible hardware.
689    #[ignore]
690    fn test_64x64_progressive_i_p_b_p_nonblock() {
691        use crate::decoder::stateless::h264::tests::DECODE_64X64_PROGRESSIVE_I_P_B_P;
692        test_decoder_vaapi(
693            &DECODE_64X64_PROGRESSIVE_I_P_B_P,
694            DecodedFormat::NV12,
695            BlockingMode::NonBlocking,
696        );
697    }
698
699    #[test]
700    // Ignore this test by default as it requires libva-compatible hardware.
701    #[ignore]
702    fn test_64x64_progressive_i_p_b_p_high_block() {
703        use crate::decoder::stateless::h264::tests::DECODE_64X64_PROGRESSIVE_I_P_B_P_HIGH;
704        test_decoder_vaapi(
705            &DECODE_64X64_PROGRESSIVE_I_P_B_P_HIGH,
706            DecodedFormat::NV12,
707            BlockingMode::Blocking,
708        );
709    }
710
711    #[test]
712    // Ignore this test by default as it requires libva-compatible hardware.
713    #[ignore]
714    fn test_64x64_progressive_i_p_b_p_high_nonblock() {
715        use crate::decoder::stateless::h264::tests::DECODE_64X64_PROGRESSIVE_I_P_B_P_HIGH;
716        test_decoder_vaapi(
717            &DECODE_64X64_PROGRESSIVE_I_P_B_P_HIGH,
718            DecodedFormat::NV12,
719            BlockingMode::NonBlocking,
720        );
721    }
722
723    #[test]
724    // Ignore this test by default as it requires libva-compatible hardware.
725    #[ignore]
726    fn test_25fps_block() {
727        use crate::decoder::stateless::h264::tests::DECODE_TEST_25FPS;
728        test_decoder_vaapi(
729            &DECODE_TEST_25FPS,
730            DecodedFormat::NV12,
731            BlockingMode::Blocking,
732        );
733    }
734
735    #[test]
736    // Ignore this test by default as it requires libva-compatible hardware.
737    #[ignore]
738    fn test_25fps_nonblock() {
739        use crate::decoder::stateless::h264::tests::DECODE_TEST_25FPS;
740        test_decoder_vaapi(
741            &DECODE_TEST_25FPS,
742            DecodedFormat::NV12,
743            BlockingMode::NonBlocking,
744        );
745    }
746
747    #[test]
748    // Ignore this test by default as it requires libva-compatible hardware.
749    #[ignore]
750    fn test_25fps_interlaced_block() {
751        use crate::decoder::stateless::h264::tests::DECODE_TEST_25FPS_INTERLACED;
752        test_decoder_vaapi(
753            &DECODE_TEST_25FPS_INTERLACED,
754            DecodedFormat::NV12,
755            BlockingMode::Blocking,
756        );
757    }
758
759    #[test]
760    // Ignore this test by default as it requires libva-compatible hardware.
761    #[ignore]
762    fn test_25fps_interlaced_nonblock() {
763        use crate::decoder::stateless::h264::tests::DECODE_TEST_25FPS_INTERLACED;
764        test_decoder_vaapi(
765            &DECODE_TEST_25FPS_INTERLACED,
766            DecodedFormat::NV12,
767            BlockingMode::NonBlocking,
768        );
769    }
770}