cros_codecs/decoder/stateless/
h264.rs

1// Copyright 2023 The ChromiumOS Authors
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#[cfg(any(test, fuzzing))]
6mod dummy;
7#[cfg(feature = "vaapi")]
8mod vaapi;
9
10use std::collections::btree_map::Entry;
11use std::io::Cursor;
12use std::os::fd::AsFd;
13use std::os::fd::BorrowedFd;
14use std::rc::Rc;
15
16use anyhow::anyhow;
17use anyhow::Context;
18use log::debug;
19
20use crate::codec::h264::dpb::Dpb;
21use crate::codec::h264::dpb::DpbEntry;
22use crate::codec::h264::dpb::DpbPicRefList;
23use crate::codec::h264::dpb::MmcoError;
24use crate::codec::h264::dpb::ReferencePicLists;
25use crate::codec::h264::parser::MaxLongTermFrameIdx;
26use crate::codec::h264::parser::Nalu;
27use crate::codec::h264::parser::NaluType;
28use crate::codec::h264::parser::Parser;
29use crate::codec::h264::parser::Pps;
30use crate::codec::h264::parser::RefPicListModification;
31use crate::codec::h264::parser::Slice;
32use crate::codec::h264::parser::SliceHeader;
33use crate::codec::h264::parser::SliceType;
34use crate::codec::h264::parser::Sps;
35use crate::codec::h264::picture::Field;
36use crate::codec::h264::picture::FieldRank;
37use crate::codec::h264::picture::IsIdr;
38use crate::codec::h264::picture::PictureData;
39use crate::codec::h264::picture::RcPictureData;
40use crate::codec::h264::picture::Reference;
41use crate::decoder::stateless::DecodeError;
42use crate::decoder::stateless::DecodingState;
43use crate::decoder::stateless::NewPictureResult;
44use crate::decoder::stateless::PoolLayer;
45use crate::decoder::stateless::StatelessBackendResult;
46use crate::decoder::stateless::StatelessCodec;
47use crate::decoder::stateless::StatelessDecoder;
48use crate::decoder::stateless::StatelessDecoderBackend;
49use crate::decoder::stateless::StatelessDecoderBackendPicture;
50use crate::decoder::stateless::StatelessVideoDecoder;
51use crate::decoder::stateless::TryFormat;
52use crate::decoder::BlockingMode;
53use crate::decoder::DecodedHandle;
54use crate::decoder::DecoderEvent;
55use crate::decoder::StreamInfo;
56use crate::Resolution;
57
58fn get_raster_from_zigzag_8x8(src: [u8; 64], dst: &mut [u8; 64]) {
59    const ZIGZAG_8X8: [usize; 64] = [
60        0, 1, 8, 16, 9, 2, 3, 10, 17, 24, 32, 25, 18, 11, 4, 5, 12, 19, 26, 33, 40, 48, 41, 34, 27,
61        20, 13, 6, 7, 14, 21, 28, 35, 42, 49, 56, 57, 50, 43, 36, 29, 22, 15, 23, 30, 37, 44, 51,
62        58, 59, 52, 45, 38, 31, 39, 46, 53, 60, 61, 54, 47, 55, 62, 63,
63    ];
64
65    for i in 0..64 {
66        dst[ZIGZAG_8X8[i]] = src[i];
67    }
68}
69
70fn get_raster_from_zigzag_4x4(src: [u8; 16], dst: &mut [u8; 16]) {
71    const ZIGZAG_4X4: [usize; 16] = [0, 1, 4, 8, 5, 2, 3, 6, 9, 12, 13, 10, 7, 11, 14, 15];
72
73    for i in 0..16 {
74        dst[ZIGZAG_4X4[i]] = src[i];
75    }
76}
77
78/// Stateless backend methods specific to H.264.
79pub trait StatelessH264DecoderBackend:
80    StatelessDecoderBackend + StatelessDecoderBackendPicture<H264>
81{
82    /// Called when a new SPS is parsed.
83    fn new_sequence(&mut self, sps: &Rc<Sps>) -> StatelessBackendResult<()>;
84
85    /// Called when the decoder determines that a frame or field was found.
86    fn new_picture(&mut self, timestamp: u64) -> NewPictureResult<Self::Picture>;
87
88    /// Called when the decoder determines that a second field was found.
89    /// Indicates that the underlying BackendHandle is to be shared between the
90    /// two pictures. This is so both fields decode to the same underlying
91    /// resource and can thus be presented together as a single frame.
92    fn new_field_picture(
93        &mut self,
94        timestamp: u64,
95        first_field: &Self::Handle,
96    ) -> NewPictureResult<Self::Picture>;
97
98    /// Called by the decoder when starting a new frame or field.
99    fn start_picture(
100        &mut self,
101        picture: &mut Self::Picture,
102        picture_data: &PictureData,
103        sps: &Sps,
104        pps: &Pps,
105        dpb: &Dpb<Self::Handle>,
106        hdr: &SliceHeader,
107    ) -> StatelessBackendResult<()>;
108
109    /// Called to dispatch a decode operation to the backend.
110    #[allow(clippy::too_many_arguments)]
111    fn decode_slice(
112        &mut self,
113        picture: &mut Self::Picture,
114        slice: &Slice,
115        sps: &Sps,
116        pps: &Pps,
117        ref_pic_list0: &[&DpbEntry<Self::Handle>],
118        ref_pic_list1: &[&DpbEntry<Self::Handle>],
119    ) -> StatelessBackendResult<()>;
120
121    /// Called when the decoder wants the backend to finish the decoding
122    /// operations for `picture`. At this point, `decode_slice` has been called
123    /// for all slices.
124    ///
125    /// This call will assign the ownership of the BackendHandle to the Picture
126    /// and then assign the ownership of the Picture to the Handle.
127    fn submit_picture(&mut self, picture: Self::Picture) -> StatelessBackendResult<Self::Handle>;
128}
129
130/// Keeps track of the last values seen for negotiation purposes.
131#[derive(Clone, Debug, Default, PartialEq, Eq)]
132struct NegotiationInfo {
133    /// The current coded resolution
134    coded_resolution: Resolution,
135    /// Same meaning as the specification.
136    profile_idc: u8,
137    /// Same meaning as the specification.
138    bit_depth_luma_minus8: u8,
139    /// Same meaning as the specification.
140    bit_depth_chroma_minus8: u8,
141    /// Same meaning as the specification.
142    chroma_format_idc: u8,
143    /// The maximum size of the dpb in frames.
144    max_dpb_frames: usize,
145    /// Whether this is an interlaced stream
146    interlaced: bool,
147}
148
149impl From<&Sps> for NegotiationInfo {
150    fn from(sps: &Sps) -> Self {
151        NegotiationInfo {
152            coded_resolution: Resolution::from((sps.width(), sps.height())),
153            profile_idc: sps.profile_idc,
154            bit_depth_luma_minus8: sps.bit_depth_luma_minus8,
155            bit_depth_chroma_minus8: sps.bit_depth_chroma_minus8,
156            chroma_format_idc: sps.chroma_format_idc,
157            max_dpb_frames: sps.max_dpb_frames(),
158            interlaced: !sps.frame_mbs_only_flag,
159        }
160    }
161}
162
163#[derive(Copy, Clone, Debug)]
164enum RefPicList {
165    RefPicList0,
166    RefPicList1,
167}
168
169pub struct PrevReferencePicInfo {
170    frame_num: u32,
171    has_mmco_5: bool,
172    top_field_order_cnt: i32,
173    pic_order_cnt_msb: i32,
174    pic_order_cnt_lsb: i32,
175    field: Field,
176}
177
178impl Default for PrevReferencePicInfo {
179    fn default() -> Self {
180        Self {
181            frame_num: Default::default(),
182            has_mmco_5: Default::default(),
183            top_field_order_cnt: Default::default(),
184            pic_order_cnt_msb: Default::default(),
185            pic_order_cnt_lsb: Default::default(),
186            field: Field::Frame,
187        }
188    }
189}
190
191impl PrevReferencePicInfo {
192    /// Store some variables related to the previous reference picture. These
193    /// will be used in the decoding of future pictures.
194    fn fill(&mut self, pic: &PictureData) {
195        self.has_mmco_5 = pic.has_mmco_5;
196        self.top_field_order_cnt = pic.top_field_order_cnt;
197        self.pic_order_cnt_msb = pic.pic_order_cnt_msb;
198        self.pic_order_cnt_lsb = pic.pic_order_cnt_lsb;
199        self.field = pic.field;
200        self.frame_num = pic.frame_num;
201    }
202}
203
204#[derive(Default)]
205pub struct PrevPicInfo {
206    frame_num: u32,
207    frame_num_offset: u32,
208    has_mmco_5: bool,
209}
210
211impl PrevPicInfo {
212    /// Store some variables related to the previous picture. These will be used
213    /// in the decoding of future pictures.
214    fn fill(&mut self, pic: &PictureData) {
215        self.frame_num = pic.frame_num;
216        self.has_mmco_5 = pic.has_mmco_5;
217        self.frame_num_offset = pic.frame_num_offset;
218    }
219}
220
221/// Corresponds to RefPicList0 and RefPicList1 in the specification. Computed for every slice,
222/// points to the pictures in the DPB.
223struct RefPicLists<'a, T> {
224    ref_pic_list0: DpbPicRefList<'a, T>,
225    ref_pic_list1: DpbPicRefList<'a, T>,
226}
227
228/// Used to track that first_mb_in_slice increases monotonically.
229enum CurrentMacroblockTracking {
230    SeparateColorPlane(std::collections::BTreeMap<u8, u32>),
231    NonSeparateColorPlane(u32),
232}
233
234/// State of the picture being currently decoded.
235///
236/// Stored between calls to [`StatelessDecoder::handle_slice`] that belong to the same picture.
237struct CurrentPicState<P> {
238    /// Data for the current picture as extracted from the stream.
239    pic: PictureData,
240    /// PPS at the time of the current picture.
241    pps: Rc<Pps>,
242    /// Backend-specific data for that picture.
243    backend_pic: P,
244    /// List of reference pictures, used once per slice.
245    ref_pic_lists: ReferencePicLists,
246    /// The current macroblock we are processing
247    current_macroblock: CurrentMacroblockTracking,
248}
249
250/// State of the H.264 decoder.
251///
252/// `B` is the backend used for this decoder.
253pub struct H264DecoderState<H: DecodedHandle, P> {
254    /// H.264 bitstream parser.
255    parser: Parser,
256    /// Keeps track of the last stream parameters seen for negotiation purposes.
257    negotiation_info: NegotiationInfo,
258
259    /// The decoded picture buffer.
260    dpb: Dpb<H>,
261
262    /// Cached variables from the previous reference picture.
263    prev_ref_pic_info: PrevReferencePicInfo,
264    /// Cached variables from the previous picture.
265    prev_pic_info: PrevPicInfo,
266    /// Maximum index of the long-term frame.
267    max_long_term_frame_idx: MaxLongTermFrameIdx,
268
269    /// A cached, non-reference first field that did not make it into the DPB
270    /// because it was full even after bumping the smaller POC. This field will
271    /// be cached until the second field is processed so they can be output
272    /// together.
273    ///
274    /// We are not using `DbpEntry<T>` as the type because contrary to a DPB entry,
275    /// the handle of this member is always valid.
276    last_field: Option<(RcPictureData, H)>,
277
278    /// The picture currently being decoded. We need to preserve it between calls to `decode`
279    /// because multiple slices will be processed in different calls to `decode`.
280    current_pic: Option<CurrentPicState<P>>,
281}
282
283impl<H, P> Default for H264DecoderState<H, P>
284where
285    H: DecodedHandle,
286{
287    fn default() -> Self {
288        H264DecoderState {
289            parser: Default::default(),
290            negotiation_info: Default::default(),
291            dpb: Default::default(),
292            prev_ref_pic_info: Default::default(),
293            prev_pic_info: Default::default(),
294            max_long_term_frame_idx: Default::default(),
295            last_field: Default::default(),
296            current_pic: None,
297        }
298    }
299}
300
301/// [`StatelessCodec`] structure to use in order to create a H.264 stateless decoder.
302///
303/// # Accepted input
304///
305/// A decoder using this codec processes exactly one NAL unit of input per call to
306/// [`StatelessDecoder::decode`], and returns the number of bytes until the end of this NAL unit.
307/// This makes it possible to call [`Decode`](StatelessDecoder::decode) repeatedly on some unsplit
308/// Annex B stream and shrinking it by the number of bytes processed after each call, until the
309/// stream ends up being empty.
310pub struct H264;
311
312impl StatelessCodec for H264 {
313    type FormatInfo = Rc<Sps>;
314    type DecoderState<H: DecodedHandle, P> = H264DecoderState<H, P>;
315}
316
317impl<H, P> H264DecoderState<H, P>
318where
319    H: DecodedHandle + Clone,
320{
321    fn compute_pic_order_count(&mut self, pic: &mut PictureData, sps: &Sps) -> anyhow::Result<()> {
322        match pic.pic_order_cnt_type {
323            // Spec 8.2.1.1
324            0 => {
325                let prev_pic_order_cnt_msb;
326                let prev_pic_order_cnt_lsb;
327
328                if matches!(pic.is_idr, IsIdr::Yes { .. }) {
329                    prev_pic_order_cnt_lsb = 0;
330                    prev_pic_order_cnt_msb = 0;
331                } else if self.prev_ref_pic_info.has_mmco_5 {
332                    if !matches!(self.prev_ref_pic_info.field, Field::Bottom) {
333                        prev_pic_order_cnt_msb = 0;
334                        prev_pic_order_cnt_lsb = self.prev_ref_pic_info.top_field_order_cnt;
335                    } else {
336                        prev_pic_order_cnt_msb = 0;
337                        prev_pic_order_cnt_lsb = 0;
338                    }
339                } else {
340                    prev_pic_order_cnt_msb = self.prev_ref_pic_info.pic_order_cnt_msb;
341                    prev_pic_order_cnt_lsb = self.prev_ref_pic_info.pic_order_cnt_lsb;
342                }
343
344                let max_pic_order_cnt_lsb = 1 << (sps.log2_max_pic_order_cnt_lsb_minus4 + 4);
345
346                pic.pic_order_cnt_msb = if (pic.pic_order_cnt_lsb
347                    < self.prev_ref_pic_info.pic_order_cnt_lsb)
348                    && (prev_pic_order_cnt_lsb - pic.pic_order_cnt_lsb >= max_pic_order_cnt_lsb / 2)
349                {
350                    prev_pic_order_cnt_msb + max_pic_order_cnt_lsb
351                } else if (pic.pic_order_cnt_lsb > prev_pic_order_cnt_lsb)
352                    && (pic.pic_order_cnt_lsb - prev_pic_order_cnt_lsb > max_pic_order_cnt_lsb / 2)
353                {
354                    prev_pic_order_cnt_msb - max_pic_order_cnt_lsb
355                } else {
356                    prev_pic_order_cnt_msb
357                };
358
359                if !matches!(pic.field, Field::Bottom) {
360                    pic.top_field_order_cnt = pic.pic_order_cnt_msb + pic.pic_order_cnt_lsb;
361                }
362
363                if !matches!(pic.field, Field::Top) {
364                    if matches!(pic.field, Field::Frame) {
365                        pic.bottom_field_order_cnt =
366                            pic.top_field_order_cnt + pic.delta_pic_order_cnt_bottom;
367                    } else {
368                        pic.bottom_field_order_cnt = pic.pic_order_cnt_msb + pic.pic_order_cnt_lsb;
369                    }
370                }
371            }
372
373            1 => {
374                if self.prev_pic_info.has_mmco_5 {
375                    self.prev_pic_info.frame_num_offset = 0;
376                }
377
378                if matches!(pic.is_idr, IsIdr::Yes { .. }) {
379                    pic.frame_num_offset = 0;
380                } else if self.prev_pic_info.frame_num > pic.frame_num {
381                    pic.frame_num_offset =
382                        self.prev_pic_info.frame_num_offset + sps.max_frame_num();
383                } else {
384                    pic.frame_num_offset = self.prev_pic_info.frame_num_offset;
385                }
386
387                let mut abs_frame_num = if sps.num_ref_frames_in_pic_order_cnt_cycle != 0 {
388                    pic.frame_num_offset + pic.frame_num
389                } else {
390                    0
391                };
392
393                if pic.nal_ref_idc == 0 && abs_frame_num > 0 {
394                    abs_frame_num -= 1;
395                }
396
397                let mut expected_pic_order_cnt = 0;
398
399                if abs_frame_num > 0 {
400                    if sps.num_ref_frames_in_pic_order_cnt_cycle == 0 {
401                        return Err(anyhow!("Invalid num_ref_frames_in_pic_order_cnt_cycle"));
402                    }
403
404                    let pic_order_cnt_cycle_cnt =
405                        (abs_frame_num - 1) / sps.num_ref_frames_in_pic_order_cnt_cycle as u32;
406                    let frame_num_in_pic_order_cnt_cycle =
407                        (abs_frame_num - 1) % sps.num_ref_frames_in_pic_order_cnt_cycle as u32;
408                    expected_pic_order_cnt =
409                        pic_order_cnt_cycle_cnt as i32 * sps.expected_delta_per_pic_order_cnt_cycle;
410
411                    assert!(frame_num_in_pic_order_cnt_cycle < 255);
412
413                    for i in 0..sps.num_ref_frames_in_pic_order_cnt_cycle {
414                        expected_pic_order_cnt += sps.offset_for_ref_frame[i as usize];
415                    }
416                }
417
418                if pic.nal_ref_idc == 0 {
419                    expected_pic_order_cnt += sps.offset_for_non_ref_pic;
420                }
421
422                if matches!(pic.field, Field::Frame) {
423                    pic.top_field_order_cnt = expected_pic_order_cnt + pic.delta_pic_order_cnt0;
424
425                    pic.bottom_field_order_cnt = pic.top_field_order_cnt
426                        + sps.offset_for_top_to_bottom_field
427                        + pic.delta_pic_order_cnt1;
428                } else if !matches!(pic.field, Field::Bottom) {
429                    pic.top_field_order_cnt = expected_pic_order_cnt + pic.delta_pic_order_cnt0;
430                } else {
431                    pic.bottom_field_order_cnt = expected_pic_order_cnt
432                        + sps.offset_for_top_to_bottom_field
433                        + pic.delta_pic_order_cnt0;
434                }
435            }
436
437            2 => {
438                // Spec 8.2.1.3
439                if self.prev_pic_info.has_mmco_5 {
440                    self.prev_pic_info.frame_num_offset = 0;
441                }
442
443                if matches!(pic.is_idr, IsIdr::Yes { .. }) {
444                    pic.frame_num_offset = 0;
445                } else if self.prev_pic_info.frame_num > pic.frame_num {
446                    pic.frame_num_offset =
447                        self.prev_pic_info.frame_num_offset + sps.max_frame_num();
448                } else {
449                    pic.frame_num_offset = self.prev_pic_info.frame_num_offset;
450                }
451
452                let pic_order_cnt = if matches!(pic.is_idr, IsIdr::Yes { .. }) {
453                    0
454                } else if pic.nal_ref_idc == 0 {
455                    2 * (pic.frame_num_offset + pic.frame_num) as i32 - 1
456                } else {
457                    2 * (pic.frame_num_offset + pic.frame_num) as i32
458                };
459
460                if matches!(pic.field, Field::Frame | Field::Top) {
461                    pic.top_field_order_cnt = pic_order_cnt;
462                }
463                if matches!(pic.field, Field::Frame | Field::Bottom) {
464                    pic.bottom_field_order_cnt = pic_order_cnt;
465                }
466            }
467
468            _ => {
469                return Err(anyhow!(
470                    "Invalid pic_order_cnt_type: {}",
471                    sps.pic_order_cnt_type
472                ))
473            }
474        }
475
476        match pic.field {
477            Field::Frame => {
478                pic.pic_order_cnt =
479                    std::cmp::min(pic.top_field_order_cnt, pic.bottom_field_order_cnt);
480            }
481            Field::Top => {
482                pic.pic_order_cnt = pic.top_field_order_cnt;
483            }
484            Field::Bottom => {
485                pic.pic_order_cnt = pic.bottom_field_order_cnt;
486            }
487        }
488
489        Ok(())
490    }
491
492    /// Returns an iterator of the handles of the frames that need to be bumped into the ready
493    /// queue.
494    fn bump_as_needed(&mut self, current_pic: &PictureData) -> impl Iterator<Item = H> {
495        self.dpb.bump_as_needed(current_pic).into_iter().flatten()
496    }
497
498    /// Returns an iterator of the handles of all the frames still present in the DPB.
499    fn drain(&mut self) -> impl Iterator<Item = H> {
500        let pics = self.dpb.drain();
501
502        self.last_field = None;
503
504        pics.into_iter().flatten()
505    }
506
507    /// Find the first field for the picture started by `slice`, if any.
508    #[allow(clippy::type_complexity)]
509    fn find_first_field(&self, hdr: &SliceHeader) -> anyhow::Result<Option<(RcPictureData, H)>> {
510        let mut prev_field = None;
511
512        if self.dpb.interlaced() {
513            if let Some(last_field) = &self.last_field {
514                prev_field = Some((&last_field.0, &last_field.1));
515            } else if let Some(last_dpb_entry) = self.dpb.entries().last() {
516                // Use the last entry in the DPB
517                let last_pic = last_dpb_entry.pic.borrow();
518
519                if !matches!(last_pic.field, Field::Frame) && last_pic.other_field().is_none() {
520                    if let Some(handle) = &last_dpb_entry.handle {
521                        // Still waiting for the second field
522                        prev_field = Some((&last_dpb_entry.pic, handle));
523                    }
524                }
525            }
526        }
527
528        if !hdr.field_pic_flag {
529            if let Some(prev_field) = prev_field {
530                let field = prev_field.0.borrow().field;
531                return Err(anyhow!(
532                    "Expecting complementary field {:?}, got {:?}",
533                    field.opposite(),
534                    field
535                ));
536            }
537        }
538
539        match prev_field {
540            None => Ok(None),
541            Some(prev_field) => {
542                let prev_field_pic = prev_field.0.borrow();
543
544                if prev_field_pic.frame_num != u32::from(hdr.frame_num) {
545                    return Err(anyhow!(
546                "The previous field differs in frame_num value wrt. the current field. {:?} vs {:?}",
547                prev_field_pic.frame_num,
548                hdr.frame_num
549            ));
550                } else {
551                    let cur_field = if hdr.bottom_field_flag {
552                        Field::Bottom
553                    } else {
554                        Field::Top
555                    };
556
557                    if cur_field == prev_field_pic.field {
558                        let field = prev_field_pic.field;
559                        return Err(anyhow!(
560                            "Expecting complementary field {:?}, got {:?}",
561                            field.opposite(),
562                            field
563                        ));
564                    }
565                }
566
567                drop(prev_field_pic);
568                Ok(Some((prev_field.0.clone(), prev_field.1.clone())))
569            }
570        }
571    }
572
573    // 8.2.4.3.1 Modification process of reference picture lists for short-term
574    // reference pictures
575    #[allow(clippy::too_many_arguments)]
576    fn short_term_pic_list_modification<'a>(
577        cur_pic: &PictureData,
578        dpb: &'a Dpb<H>,
579        ref_pic_list_x: &mut DpbPicRefList<'a, H>,
580        num_ref_idx_lx_active_minus1: u8,
581        max_pic_num: i32,
582        rplm: &RefPicListModification,
583        pic_num_lx_pred: &mut i32,
584        ref_idx_lx: &mut usize,
585    ) -> anyhow::Result<()> {
586        let pic_num_lx_no_wrap;
587        let abs_diff_pic_num = rplm.abs_diff_pic_num_minus1 as i32 + 1;
588        let modification_of_pic_nums_idc = rplm.modification_of_pic_nums_idc;
589
590        if modification_of_pic_nums_idc == 0 {
591            if *pic_num_lx_pred - abs_diff_pic_num < 0 {
592                pic_num_lx_no_wrap = *pic_num_lx_pred - abs_diff_pic_num + max_pic_num;
593            } else {
594                pic_num_lx_no_wrap = *pic_num_lx_pred - abs_diff_pic_num;
595            }
596        } else if modification_of_pic_nums_idc == 1 {
597            if *pic_num_lx_pred + abs_diff_pic_num >= max_pic_num {
598                pic_num_lx_no_wrap = *pic_num_lx_pred + abs_diff_pic_num - max_pic_num;
599            } else {
600                pic_num_lx_no_wrap = *pic_num_lx_pred + abs_diff_pic_num;
601            }
602        } else {
603            anyhow::bail!(
604                "unexpected value for modification_of_pic_nums_idc {:?}",
605                rplm.modification_of_pic_nums_idc
606            );
607        }
608
609        *pic_num_lx_pred = pic_num_lx_no_wrap;
610
611        let pic_num_lx = if pic_num_lx_no_wrap > cur_pic.pic_num {
612            pic_num_lx_no_wrap - max_pic_num
613        } else {
614            pic_num_lx_no_wrap
615        };
616
617        let handle = dpb
618            .find_short_term_with_pic_num(pic_num_lx)
619            .with_context(|| format!("No ShortTerm reference found with pic_num {}", pic_num_lx))?;
620
621        ref_pic_list_x.insert(*ref_idx_lx, handle);
622        *ref_idx_lx += 1;
623
624        let mut nidx = *ref_idx_lx;
625
626        for cidx in *ref_idx_lx..=usize::from(num_ref_idx_lx_active_minus1) + 1 {
627            if cidx == ref_pic_list_x.len() {
628                break;
629            }
630
631            let target = &ref_pic_list_x[cidx].pic;
632
633            if target.borrow().pic_num_f(max_pic_num) != pic_num_lx {
634                ref_pic_list_x[nidx] = ref_pic_list_x[cidx];
635                nidx += 1;
636            }
637        }
638
639        while ref_pic_list_x.len() > (usize::from(num_ref_idx_lx_active_minus1) + 1) {
640            ref_pic_list_x.pop();
641        }
642
643        Ok(())
644    }
645
646    fn long_term_pic_list_modification<'a>(
647        dpb: &'a Dpb<H>,
648        ref_pic_list_x: &mut DpbPicRefList<'a, H>,
649        num_ref_idx_lx_active_minus1: u8,
650        max_long_term_frame_idx: MaxLongTermFrameIdx,
651        rplm: &RefPicListModification,
652        ref_idx_lx: &mut usize,
653    ) -> anyhow::Result<()> {
654        let long_term_pic_num = rplm.long_term_pic_num;
655
656        let handle = dpb
657            .find_long_term_with_long_term_pic_num(long_term_pic_num)
658            .with_context(|| {
659                format!(
660                    "No LongTerm reference found with long_term_pic_num {}",
661                    long_term_pic_num
662                )
663            })?;
664
665        ref_pic_list_x.insert(*ref_idx_lx, handle);
666        *ref_idx_lx += 1;
667
668        let mut nidx = *ref_idx_lx;
669
670        for cidx in *ref_idx_lx..=usize::from(num_ref_idx_lx_active_minus1) + 1 {
671            if cidx == ref_pic_list_x.len() {
672                break;
673            }
674
675            let target = &ref_pic_list_x[cidx].pic;
676            if target.borrow().long_term_pic_num_f(max_long_term_frame_idx) != long_term_pic_num {
677                ref_pic_list_x[nidx] = ref_pic_list_x[cidx];
678                nidx += 1;
679            }
680        }
681
682        while ref_pic_list_x.len() > (usize::from(num_ref_idx_lx_active_minus1) + 1) {
683            ref_pic_list_x.pop();
684        }
685
686        Ok(())
687    }
688
689    fn modify_ref_pic_list(
690        &self,
691        cur_pic: &PictureData,
692        hdr: &SliceHeader,
693        ref_pic_list_type: RefPicList,
694        ref_pic_list_indices: &[usize],
695    ) -> anyhow::Result<DpbPicRefList<H>> {
696        let (ref_pic_list_modification_flag_lx, num_ref_idx_lx_active_minus1, rplm) =
697            match ref_pic_list_type {
698                RefPicList::RefPicList0 => (
699                    hdr.ref_pic_list_modification_flag_l0,
700                    hdr.num_ref_idx_l0_active_minus1,
701                    &hdr.ref_pic_list_modification_l0,
702                ),
703                RefPicList::RefPicList1 => (
704                    hdr.ref_pic_list_modification_flag_l1,
705                    hdr.num_ref_idx_l1_active_minus1,
706                    &hdr.ref_pic_list_modification_l1,
707                ),
708            };
709
710        let mut ref_pic_list: Vec<_> = ref_pic_list_indices
711            .iter()
712            .map(|&i| &self.dpb.entries()[i])
713            .take(usize::from(num_ref_idx_lx_active_minus1) + 1)
714            .collect();
715
716        if !ref_pic_list_modification_flag_lx {
717            return Ok(ref_pic_list);
718        }
719
720        let mut pic_num_lx_pred = cur_pic.pic_num;
721        let mut ref_idx_lx = 0;
722
723        for modification in rplm {
724            let idc = modification.modification_of_pic_nums_idc;
725
726            match idc {
727                0 | 1 => {
728                    Self::short_term_pic_list_modification(
729                        cur_pic,
730                        &self.dpb,
731                        &mut ref_pic_list,
732                        num_ref_idx_lx_active_minus1,
733                        hdr.max_pic_num as i32,
734                        modification,
735                        &mut pic_num_lx_pred,
736                        &mut ref_idx_lx,
737                    )?;
738                }
739                2 => Self::long_term_pic_list_modification(
740                    &self.dpb,
741                    &mut ref_pic_list,
742                    num_ref_idx_lx_active_minus1,
743                    self.max_long_term_frame_idx,
744                    modification,
745                    &mut ref_idx_lx,
746                )?,
747                3 => break,
748                _ => anyhow::bail!("unexpected modification_of_pic_nums_idc {:?}", idc),
749            }
750        }
751
752        Ok(ref_pic_list)
753    }
754
755    /// Generate RefPicList0 and RefPicList1 in the specification. Computed for every slice, points
756    /// to the pictures in the DPB.
757    fn create_ref_pic_lists(
758        &mut self,
759        cur_pic: &PictureData,
760        hdr: &SliceHeader,
761        ref_pic_lists: &ReferencePicLists,
762    ) -> anyhow::Result<RefPicLists<H>> {
763        let ref_pic_list0 = match hdr.slice_type {
764            SliceType::P | SliceType::Sp => self.modify_ref_pic_list(
765                cur_pic,
766                hdr,
767                RefPicList::RefPicList0,
768                &ref_pic_lists.ref_pic_list_p0,
769            )?,
770            SliceType::B => self.modify_ref_pic_list(
771                cur_pic,
772                hdr,
773                RefPicList::RefPicList0,
774                &ref_pic_lists.ref_pic_list_b0,
775            )?,
776            _ => Vec::new(),
777        };
778
779        let ref_pic_list1 = match hdr.slice_type {
780            SliceType::B => self.modify_ref_pic_list(
781                cur_pic,
782                hdr,
783                RefPicList::RefPicList1,
784                &ref_pic_lists.ref_pic_list_b1,
785            )?,
786            _ => Vec::new(),
787        };
788
789        Ok(RefPicLists {
790            ref_pic_list0,
791            ref_pic_list1,
792        })
793    }
794
795    fn handle_memory_management_ops(&mut self, pic: &mut PictureData) -> Result<(), MmcoError> {
796        let markings = pic.ref_pic_marking.clone();
797
798        for marking in &markings.inner {
799            match marking.memory_management_control_operation {
800                0 => break,
801                1 => self.dpb.mmco_op_1(pic, marking)?,
802                2 => self.dpb.mmco_op_2(pic, marking)?,
803                3 => self.dpb.mmco_op_3(pic, marking)?,
804                4 => self.max_long_term_frame_idx = self.dpb.mmco_op_4(marking),
805                5 => self.max_long_term_frame_idx = self.dpb.mmco_op_5(pic),
806                6 => self.dpb.mmco_op_6(pic, marking),
807                other => return Err(MmcoError::UnknownMmco(other)),
808            }
809        }
810
811        Ok(())
812    }
813
814    fn reference_pic_marking(&mut self, pic: &mut PictureData, sps: &Sps) -> anyhow::Result<()> {
815        /* 8.2.5.1 */
816        if matches!(pic.is_idr, IsIdr::Yes { .. }) {
817            self.dpb.mark_all_as_unused_for_ref();
818
819            if pic.ref_pic_marking.long_term_reference_flag {
820                pic.set_reference(Reference::LongTerm, false);
821                pic.long_term_frame_idx = 0;
822                self.max_long_term_frame_idx = MaxLongTermFrameIdx::Idx(0);
823            } else {
824                pic.set_reference(Reference::ShortTerm, false);
825                self.max_long_term_frame_idx = MaxLongTermFrameIdx::NoLongTermFrameIndices;
826            }
827
828            return Ok(());
829        }
830
831        if pic.ref_pic_marking.adaptive_ref_pic_marking_mode_flag {
832            self.handle_memory_management_ops(pic)?;
833        } else {
834            self.dpb.sliding_window_marking(pic, sps);
835        }
836
837        Ok(())
838    }
839
840    // Apply the parameters of `sps` to the decoding state.
841    fn apply_sps(&mut self, sps: &Sps) {
842        self.negotiation_info = NegotiationInfo::from(sps);
843
844        let max_dpb_frames = sps.max_dpb_frames();
845        let interlaced = !sps.frame_mbs_only_flag;
846        let max_num_order_frames = sps.max_num_order_frames() as usize;
847        let max_num_reorder_frames = if max_num_order_frames > max_dpb_frames {
848            0
849        } else {
850            max_num_order_frames
851        };
852
853        self.dpb.set_limits(max_dpb_frames, max_num_reorder_frames);
854        self.dpb.set_interlaced(interlaced);
855    }
856}
857
858impl<B> StatelessDecoder<H264, B>
859where
860    B: StatelessH264DecoderBackend + TryFormat<H264>,
861    B::Handle: Clone,
862{
863    fn negotiation_possible(sps: &Sps, old_negotiation_info: &NegotiationInfo) -> bool {
864        let negotiation_info = NegotiationInfo::from(sps);
865        *old_negotiation_info != negotiation_info
866    }
867
868    fn renegotiate_if_needed(&mut self, sps: &Rc<Sps>) -> anyhow::Result<()> {
869        if Self::negotiation_possible(sps, &self.codec.negotiation_info) {
870            // Make sure all the frames we decoded so far are in the ready queue.
871            self.drain()?;
872            self.backend.new_sequence(sps)?;
873            self.await_format_change(sps.clone());
874        }
875
876        Ok(())
877    }
878
879    // Apply the parameters of `sps` to the decoder.
880    fn apply_sps(&mut self, sps: &Sps) {
881        self.codec.apply_sps(sps);
882
883        self.coded_resolution = Resolution::from((sps.width(), sps.height()));
884    }
885
886    fn drain(&mut self) -> anyhow::Result<()> {
887        // Finish the current picture if there is one pending.
888        if let Some(cur_pic) = self.codec.current_pic.take() {
889            self.finish_picture(cur_pic)?;
890        }
891
892        self.ready_queue.extend(self.codec.drain());
893
894        Ok(())
895    }
896
897    /// Adds picture to the ready queue if it could not be added to the DPB.
898    fn add_to_ready_queue(&mut self, pic: PictureData, handle: B::Handle) {
899        if matches!(pic.field, Field::Frame) {
900            assert!(self.codec.last_field.is_none());
901
902            self.ready_queue.push(handle);
903        } else {
904            match self.codec.last_field.take() {
905                None => {
906                    assert!(!pic.is_second_field());
907
908                    // Cache the field, wait for its pair.
909                    self.codec.last_field = Some((pic.into_rc(), handle));
910                }
911                Some((field_pic, field_handle)) if matches!(pic.field_rank(), FieldRank::Second(first_field) if Rc::ptr_eq(first_field, &field_pic)) =>
912                {
913                    self.ready_queue.push(field_handle);
914                }
915                // Somehow, the last field is not paired with the current field.
916                _ => log::warn!("unmatched field dropped"),
917            }
918        }
919    }
920
921    fn finish_picture(&mut self, pic: CurrentPicState<B::Picture>) -> anyhow::Result<()> {
922        debug!("Finishing picture POC {:?}", pic.pic.pic_order_cnt);
923
924        // Submit the picture to the backend.
925        let handle = self.submit_picture(pic.backend_pic)?;
926        let pps = pic.pps;
927        let mut pic = pic.pic;
928
929        if matches!(pic.reference(), Reference::ShortTerm | Reference::LongTerm) {
930            self.codec.reference_pic_marking(&mut pic, &pps.sps)?;
931            self.codec.prev_ref_pic_info.fill(&pic);
932        }
933
934        self.codec.prev_pic_info.fill(&pic);
935
936        if pic.has_mmco_5 {
937            // C.4.5.3 "Bumping process"
938            // The bumping process is invoked in the following cases:
939            // Clause 3:
940            // The current picture has memory_management_control_operation equal
941            // to 5, as specified in clause C.4.4.
942            self.drain()?;
943        }
944
945        // Bump the DPB as per C.4.5.3 to cover clauses 1, 4, 5 and 6.
946        self.ready_queue.extend(self.codec.bump_as_needed(&pic));
947
948        // C.4.5.1, C.4.5.2
949        // If the current decoded picture is the second field of a complementary
950        // reference field pair, add to DPB.
951        // C.4.5.1
952        // For a reference decoded picture, the "bumping" process is invoked
953        // repeatedly until there is an empty frame buffer, by which point it is
954        // added to the DPB. Notice that Dpb::needs_bumping already accounts for
955        // this.
956        // C.4.5.2
957        // For a non-reference decoded picture, if there is empty frame buffer
958        // after bumping the smaller POC, add to DPB. Otherwise, add it to the
959        // ready queue.
960        if pic.is_second_field_of_complementary_ref_pair()
961            || pic.is_ref()
962            || self.codec.dpb.has_empty_frame_buffer()
963        {
964            if self.codec.dpb.interlaced() && matches!(pic.field, Field::Frame) {
965                // Split the Frame into two complementary fields so reference
966                // marking is easier. This is inspired by the GStreamer implementation.
967                let (first_field, second_field) = PictureData::split_frame(pic);
968
969                self.codec.dpb.add_picture(
970                    first_field,
971                    Some(handle.clone()),
972                    &mut self.codec.last_field,
973                )?;
974                self.codec.dpb.add_picture(
975                    second_field,
976                    Some(handle),
977                    &mut self.codec.last_field,
978                )?;
979            } else {
980                self.codec.dpb.add_picture(
981                    pic.into_rc(),
982                    Some(handle),
983                    &mut self.codec.last_field,
984                )?;
985            }
986        } else {
987            self.add_to_ready_queue(pic, handle);
988        }
989
990        Ok(())
991    }
992
993    fn handle_frame_num_gap(
994        &mut self,
995        sps: &Sps,
996        frame_num: u32,
997        timestamp: u64,
998    ) -> anyhow::Result<()> {
999        if self.codec.dpb.is_empty() {
1000            return Ok(());
1001        }
1002
1003        debug!("frame_num gap detected.");
1004
1005        if !sps.gaps_in_frame_num_value_allowed_flag {
1006            return Err(anyhow!(
1007                "Invalid frame_num: {}. Assuming unintentional loss of pictures",
1008                frame_num
1009            ));
1010        }
1011
1012        let mut unused_short_term_frame_num =
1013            (self.codec.prev_ref_pic_info.frame_num + 1) % sps.max_frame_num();
1014        while unused_short_term_frame_num != frame_num {
1015            let max_frame_num = sps.max_frame_num();
1016
1017            let mut pic = PictureData::new_non_existing(unused_short_term_frame_num, timestamp);
1018            self.codec.compute_pic_order_count(&mut pic, sps)?;
1019
1020            self.codec
1021                .dpb
1022                .update_pic_nums(unused_short_term_frame_num, max_frame_num, &pic);
1023
1024            self.codec.dpb.sliding_window_marking(&mut pic, sps);
1025
1026            self.ready_queue.extend(self.codec.bump_as_needed(&pic));
1027
1028            if self.codec.dpb.interlaced() {
1029                let (first_field, second_field) = PictureData::split_frame(pic);
1030
1031                self.codec
1032                    .dpb
1033                    .add_picture(first_field, None, &mut self.codec.last_field)?;
1034                self.codec
1035                    .dpb
1036                    .add_picture(second_field, None, &mut self.codec.last_field)?;
1037            } else {
1038                self.codec
1039                    .dpb
1040                    .add_picture(pic.into_rc(), None, &mut self.codec.last_field)?;
1041            }
1042
1043            unused_short_term_frame_num += 1;
1044            unused_short_term_frame_num %= max_frame_num;
1045        }
1046
1047        Ok(())
1048    }
1049
1050    /// Init the current picture being decoded.
1051    fn init_current_pic(
1052        &mut self,
1053        slice: &Slice,
1054        sps: &Sps,
1055        first_field: Option<&RcPictureData>,
1056        timestamp: u64,
1057    ) -> anyhow::Result<PictureData> {
1058        let mut pic = PictureData::new_from_slice(slice, sps, timestamp, first_field);
1059        self.codec.compute_pic_order_count(&mut pic, sps)?;
1060
1061        if matches!(pic.is_idr, IsIdr::Yes { .. }) {
1062            // C.4.5.3 "Bumping process"
1063            // The bumping process is invoked in the following cases:
1064            // Clause 2:
1065            // The current picture is an IDR picture and
1066            // no_output_of_prior_pics_flag is not equal to 1 and is not
1067            // inferred to be equal to 1, as specified in clause C.4.4.
1068            if !pic.ref_pic_marking.no_output_of_prior_pics_flag {
1069                self.drain()?;
1070            } else {
1071                // C.4.4 When no_output_of_prior_pics_flag is equal to 1 or is
1072                // inferred to be equal to 1, all frame buffers in the DPB are
1073                // emptied without output of the pictures they contain, and DPB
1074                // fullness is set to 0.
1075                self.codec.dpb.clear();
1076                self.codec.last_field = None;
1077            }
1078        }
1079
1080        self.codec.dpb.update_pic_nums(
1081            u32::from(slice.header.frame_num),
1082            sps.max_frame_num(),
1083            &pic,
1084        );
1085
1086        Ok(pic)
1087    }
1088
1089    /// Called once per picture to start it.
1090    fn begin_picture(
1091        &mut self,
1092        timestamp: u64,
1093        slice: &Slice,
1094    ) -> Result<CurrentPicState<B::Picture>, DecodeError> {
1095        // Start by securing the backend picture before modifying our state.
1096        let first_field = self.codec.find_first_field(&slice.header)?;
1097        let mut backend_pic = if let Some(first_field) = &first_field {
1098            self.backend.new_field_picture(timestamp, &first_field.1)
1099        } else {
1100            self.backend.new_picture(timestamp)
1101        }?;
1102
1103        let nalu_hdr = &slice.nalu.header;
1104
1105        if nalu_hdr.idr_pic_flag {
1106            self.codec.prev_ref_pic_info.frame_num = 0;
1107        }
1108
1109        let hdr = &slice.header;
1110        let frame_num = u32::from(hdr.frame_num);
1111
1112        let pps = Rc::clone(
1113            self.codec
1114                .parser
1115                .get_pps(hdr.pic_parameter_set_id)
1116                .context("Invalid PPS in handle_picture")?,
1117        );
1118
1119        // A picture's SPS may require negociation.
1120        self.renegotiate_if_needed(&pps.sps)?;
1121        if let DecodingState::AwaitingFormat(_) = &self.decoding_state {
1122            return Err(DecodeError::CheckEvents);
1123        }
1124
1125        let current_macroblock = match pps.sps.separate_colour_plane_flag {
1126            true => CurrentMacroblockTracking::SeparateColorPlane(Default::default()),
1127            false => CurrentMacroblockTracking::NonSeparateColorPlane(0),
1128        };
1129
1130        if frame_num != self.codec.prev_ref_pic_info.frame_num
1131            && frame_num != (self.codec.prev_ref_pic_info.frame_num + 1) % pps.sps.max_frame_num()
1132        {
1133            self.handle_frame_num_gap(&pps.sps, frame_num, timestamp)?;
1134        }
1135
1136        let pic = self.init_current_pic(
1137            slice,
1138            &pps.sps,
1139            first_field.as_ref().map(|f| &f.0),
1140            timestamp,
1141        )?;
1142        let ref_pic_lists = self.codec.dpb.build_ref_pic_lists(&pic);
1143
1144        debug!("Decode picture POC {:?}", pic.pic_order_cnt);
1145
1146        self.backend.start_picture(
1147            &mut backend_pic,
1148            &pic,
1149            pps.sps.as_ref(),
1150            pps.as_ref(),
1151            &self.codec.dpb,
1152            &slice.header,
1153        )?;
1154
1155        Ok(CurrentPicState {
1156            pic,
1157            pps,
1158            backend_pic,
1159            ref_pic_lists,
1160            current_macroblock,
1161        })
1162    }
1163
1164    // Check whether first_mb_in_slice increases monotonically for the current
1165    // picture as required by the specification.
1166    fn check_first_mb_in_slice(
1167        &mut self,
1168        current_macroblock: &mut CurrentMacroblockTracking,
1169        slice: &Slice,
1170    ) {
1171        match current_macroblock {
1172            CurrentMacroblockTracking::SeparateColorPlane(current_macroblock) => {
1173                match current_macroblock.entry(slice.header.colour_plane_id) {
1174                    Entry::Vacant(current_macroblock) => {
1175                        current_macroblock.insert(slice.header.first_mb_in_slice);
1176                    }
1177                    Entry::Occupied(mut current_macroblock) => {
1178                        let current_macroblock = current_macroblock.get_mut();
1179                        if slice.header.first_mb_in_slice >= *current_macroblock {
1180                            log::trace!("first_mb_in_slice does not increase monotically, expect corrupted output");
1181                        }
1182                        *current_macroblock = slice.header.first_mb_in_slice;
1183                    }
1184                }
1185            }
1186            CurrentMacroblockTracking::NonSeparateColorPlane(current_macroblock) => {
1187                if slice.header.first_mb_in_slice >= *current_macroblock {
1188                    log::trace!(
1189                        "first_mb_in_slice does not increase monotically, expect corrupted output"
1190                    );
1191                }
1192                *current_macroblock = slice.header.first_mb_in_slice;
1193            }
1194        }
1195    }
1196
1197    /// Handle a slice. Called once per slice NALU.
1198    fn handle_slice(
1199        &mut self,
1200        cur_pic: &mut CurrentPicState<B::Picture>,
1201        slice: &Slice,
1202    ) -> anyhow::Result<()> {
1203        self.check_first_mb_in_slice(&mut cur_pic.current_macroblock, slice);
1204
1205        // A slice can technically refer to another PPS.
1206        let pps = self
1207            .codec
1208            .parser
1209            .get_pps(slice.header.pic_parameter_set_id)
1210            .context("Invalid PPS")?;
1211        cur_pic.pps = Rc::clone(pps);
1212
1213        // Make sure that no negotiation is possible mid-picture. How could it?
1214        // We'd lose the context with the previous slices on it.
1215        if Self::negotiation_possible(&cur_pic.pps.sps, &self.codec.negotiation_info) {
1216            anyhow::bail!("invalid stream: inter-frame renegotiation requested");
1217        }
1218
1219        let RefPicLists {
1220            ref_pic_list0,
1221            ref_pic_list1,
1222        } = self
1223            .codec
1224            .create_ref_pic_lists(&cur_pic.pic, &slice.header, &cur_pic.ref_pic_lists)?;
1225
1226        self.backend.decode_slice(
1227            &mut cur_pic.backend_pic,
1228            slice,
1229            cur_pic.pps.sps.as_ref(),
1230            cur_pic.pps.as_ref(),
1231            &ref_pic_list0,
1232            &ref_pic_list1,
1233        )?;
1234
1235        Ok(())
1236    }
1237
1238    /// Submits the picture to the accelerator.
1239    fn submit_picture(&mut self, backend_pic: B::Picture) -> Result<B::Handle, DecodeError> {
1240        let handle = self.backend.submit_picture(backend_pic)?;
1241
1242        if self.blocking_mode == BlockingMode::Blocking {
1243            handle.sync()?;
1244        }
1245
1246        Ok(handle)
1247    }
1248
1249    fn process_nalu(&mut self, timestamp: u64, nalu: Nalu) -> Result<(), DecodeError> {
1250        match nalu.header.type_ {
1251            NaluType::Sps => {
1252                self.codec.parser.parse_sps(&nalu)?;
1253            }
1254            NaluType::Pps => {
1255                self.codec.parser.parse_pps(&nalu)?;
1256            }
1257            NaluType::Slice
1258            | NaluType::SliceDpa
1259            | NaluType::SliceDpb
1260            | NaluType::SliceDpc
1261            | NaluType::SliceIdr
1262            | NaluType::SliceExt => {
1263                let slice = self.codec.parser.parse_slice_header(nalu)?;
1264                let mut cur_pic = match self.codec.current_pic.take() {
1265                    // No current picture, start a new one.
1266                    None => self.begin_picture(timestamp, &slice)?,
1267                    // We have a current picture but are starting a new field, or first_mb_in_slice
1268                    // indicates that a new picture is starting: finish the current picture and
1269                    // start a new one.
1270                    Some(cur_pic)
1271                        if (self.codec.dpb.interlaced()
1272                            && matches!(cur_pic.pic.field, Field::Frame)
1273                            && !cur_pic.pic.is_second_field()
1274                            && cur_pic.pic.field != slice.header.field())
1275                            || (slice.header.first_mb_in_slice == 0) =>
1276                    {
1277                        self.finish_picture(cur_pic)?;
1278                        self.begin_picture(timestamp, &slice)?
1279                    }
1280                    // This slice is part of the current picture.
1281                    Some(cur_pic) => cur_pic,
1282                };
1283
1284                self.handle_slice(&mut cur_pic, &slice)?;
1285                self.codec.current_pic = Some(cur_pic);
1286            }
1287            other => {
1288                debug!("Unsupported NAL unit type {:?}", other,);
1289            }
1290        }
1291
1292        Ok(())
1293    }
1294}
1295
1296impl<B> StatelessVideoDecoder for StatelessDecoder<H264, B>
1297where
1298    B: StatelessH264DecoderBackend + TryFormat<H264>,
1299    B::Handle: Clone + 'static,
1300{
1301    type Handle = B::Handle;
1302    type FramePool = B::FramePool;
1303
1304    fn decode(&mut self, timestamp: u64, bitstream: &[u8]) -> Result<usize, DecodeError> {
1305        let mut cursor = Cursor::new(bitstream);
1306        let nalu = Nalu::next(&mut cursor)?;
1307
1308        if nalu.header.type_ == NaluType::Sps {
1309            let sps = self.codec.parser.parse_sps(&nalu)?.clone();
1310            if matches!(self.decoding_state, DecodingState::AwaitingStreamInfo) {
1311                // If more SPS come along we will renegotiate in begin_picture().
1312                self.renegotiate_if_needed(&sps)?;
1313            } else if matches!(self.decoding_state, DecodingState::Reset) {
1314                // We can resume decoding since the decoding parameters have not changed.
1315                self.decoding_state = DecodingState::Decoding;
1316            }
1317        } else if matches!(self.decoding_state, DecodingState::Reset) {
1318            let mut cursor = Cursor::new(bitstream);
1319
1320            while let Ok(nalu) = Nalu::next(&mut cursor) {
1321                // In the Reset state we can resume decoding from any key frame.
1322                if matches!(nalu.header.type_, NaluType::SliceIdr) {
1323                    self.decoding_state = DecodingState::Decoding;
1324                    break;
1325                }
1326            }
1327        }
1328
1329        let nalu_len = nalu.offset + nalu.size;
1330
1331        match &mut self.decoding_state {
1332            // Process parameter sets, but skip input until we get information
1333            // from the stream.
1334            DecodingState::AwaitingStreamInfo | DecodingState::Reset => {
1335                if matches!(nalu.header.type_, NaluType::Pps) {
1336                    self.process_nalu(timestamp, nalu)?;
1337                }
1338            }
1339            // Ask the client to confirm the format before we can process this.
1340            DecodingState::AwaitingFormat(_) => return Err(DecodeError::CheckEvents),
1341            DecodingState::Decoding => {
1342                self.process_nalu(timestamp, nalu)?;
1343            }
1344        }
1345
1346        Ok(nalu_len)
1347    }
1348
1349    fn flush(&mut self) -> Result<(), DecodeError> {
1350        self.drain()?;
1351        self.decoding_state = DecodingState::Reset;
1352
1353        Ok(())
1354    }
1355
1356    fn next_event(&mut self) -> Option<DecoderEvent<B::Handle>> {
1357        self.query_next_event(|decoder, sps| {
1358            // Apply the SPS settings to the decoder so we don't enter the AwaitingFormat state
1359            // on the next decode() call.
1360            decoder.apply_sps(sps);
1361        })
1362    }
1363
1364    fn frame_pool(&mut self, layer: PoolLayer) -> Vec<&mut B::FramePool> {
1365        self.backend.frame_pool(layer)
1366    }
1367
1368    fn stream_info(&self) -> Option<&StreamInfo> {
1369        self.backend.stream_info()
1370    }
1371
1372    fn poll_fd(&self) -> BorrowedFd {
1373        self.epoll_fd.0.as_fd()
1374    }
1375}
1376
1377#[cfg(test)]
1378pub mod tests {
1379    use crate::codec::h264::parser::Nalu;
1380    use crate::decoder::stateless::h264::H264;
1381    use crate::decoder::stateless::tests::test_decode_stream;
1382    use crate::decoder::stateless::tests::TestStream;
1383    use crate::decoder::stateless::StatelessDecoder;
1384    use crate::decoder::BlockingMode;
1385    use crate::utils::simple_playback_loop;
1386    use crate::utils::simple_playback_loop_owned_frames;
1387    use crate::utils::NalIterator;
1388    use crate::DecodedFormat;
1389
1390    /// Run `test` using the dummy decoder, in both blocking and non-blocking modes.
1391    fn test_decoder_dummy(test: &TestStream, blocking_mode: BlockingMode) {
1392        let decoder = StatelessDecoder::<H264, _>::new_dummy(blocking_mode).unwrap();
1393
1394        test_decode_stream(
1395            |d, s, f| {
1396                simple_playback_loop(
1397                    d,
1398                    NalIterator::<Nalu>::new(s),
1399                    f,
1400                    &mut simple_playback_loop_owned_frames,
1401                    DecodedFormat::NV12,
1402                    blocking_mode,
1403                )
1404            },
1405            decoder,
1406            test,
1407            false,
1408            false,
1409        );
1410    }
1411
1412    /// A 64x64 progressive byte-stream encoded I-frame to make it easier to
1413    /// spot errors on the libva trace.
1414    /// Encoded with the following GStreamer pipeline:
1415    ///
1416    /// gst-launch-1.0 videotestsrc num-buffers=1 ! video/x-raw,format=I420,width=64,height=64 ! x264enc ! video/x-h264,profile=constrained-baseline,stream-format=byte-stream ! filesink location="64x64-I.h264"
1417    pub const DECODE_64X64_PROGRESSIVE_I: TestStream = TestStream {
1418        stream: include_bytes!("../../codec/h264/test_data/64x64-I.h264"),
1419        crcs: include_str!("../../codec/h264/test_data/64x64-I.h264.crc"),
1420    };
1421
1422    #[test]
1423    fn test_64x64_progressive_i_block() {
1424        test_decoder_dummy(&DECODE_64X64_PROGRESSIVE_I, BlockingMode::Blocking);
1425    }
1426
1427    #[test]
1428    fn test_64x64_progressive_i_nonblock() {
1429        test_decoder_dummy(&DECODE_64X64_PROGRESSIVE_I, BlockingMode::NonBlocking);
1430    }
1431
1432    /// A 64x64 progressive byte-stream encoded I-frame and P-frame to make
1433    /// it easier to spot errors on the libva trace.
1434    /// Encoded with the following GStreamer pipeline:
1435    /// gst-launch-1.0 videotestsrc num-buffers=2 ! video/x-raw,format=I420,width=64,height=64 ! x264enc b-adapt=false ! video/x-h264,profile=constrained-baseline,stream-format=byte-stream ! filesink location="64x64-I-P.h264"
1436    pub const DECODE_64X64_PROGRESSIVE_I_P: TestStream = TestStream {
1437        stream: include_bytes!("../../codec/h264/test_data/64x64-I-P.h264"),
1438        crcs: include_str!("../../codec/h264/test_data/64x64-I-P.h264.crc"),
1439    };
1440
1441    #[test]
1442    fn test_64x64_progressive_i_p_block() {
1443        test_decoder_dummy(&DECODE_64X64_PROGRESSIVE_I_P, BlockingMode::Blocking);
1444    }
1445
1446    #[test]
1447    fn test_64x64_progressive_i_p_nonblock() {
1448        test_decoder_dummy(&DECODE_64X64_PROGRESSIVE_I_P, BlockingMode::NonBlocking);
1449    }
1450
1451    /// A 64x64 progressive byte-stream encoded I-P-B-P sequence to make it
1452    /// easier to it easier to spot errors on the libva trace.
1453    /// Encoded with the following GStreamer pipeline:
1454    /// gst-launch-1.0 videotestsrc num-buffers=3 ! video/x-raw,format=I420,width=64,height=64 ! x264enc b-adapt=false bframes=1 ! video/x-h264,profile=constrained-baseline,stream-format=byte-stream ! filesink location="64x64-I-P-B-P.h264"
1455    pub const DECODE_64X64_PROGRESSIVE_I_P_B_P: TestStream = TestStream {
1456        stream: include_bytes!("../../codec/h264/test_data/64x64-I-P-B-P.h264"),
1457        crcs: include_str!("../../codec/h264/test_data/64x64-I-P-B-P.h264.crc"),
1458    };
1459
1460    #[test]
1461    fn test_64x64_progressive_i_p_b_p_block() {
1462        test_decoder_dummy(&DECODE_64X64_PROGRESSIVE_I_P_B_P, BlockingMode::Blocking);
1463    }
1464
1465    #[test]
1466    fn test_64x64_progressive_i_p_b_p_nonblock() {
1467        test_decoder_dummy(&DECODE_64X64_PROGRESSIVE_I_P_B_P, BlockingMode::NonBlocking);
1468    }
1469
1470    /// A 64x64 progressive byte-stream encoded I-P-B-P sequence to make it
1471    /// easier to it easier to spot errors on the libva trace.
1472    /// Also tests whether the decoder supports the high profile.
1473    ///
1474    /// Encoded with the following GStreamer pipeline:
1475    /// gst-launch-1.0 videotestsrc num-buffers=3 ! video/x-raw,format=I420,width=64,height=64 ! x264enc b-adapt=false bframes=1 ! video/x-h264,profile=high,stream-format=byte-stream ! filesink location="64x64-I-P-B-P-high.h264"
1476    pub const DECODE_64X64_PROGRESSIVE_I_P_B_P_HIGH: TestStream = TestStream {
1477        stream: include_bytes!("../../codec/h264/test_data/64x64-I-P-B-P-high.h264"),
1478        crcs: include_str!("../../codec/h264/test_data/64x64-I-P-B-P-high.h264.crc"),
1479    };
1480
1481    #[test]
1482    fn test_64x64_progressive_i_p_b_p_high_block() {
1483        test_decoder_dummy(
1484            &DECODE_64X64_PROGRESSIVE_I_P_B_P_HIGH,
1485            BlockingMode::Blocking,
1486        );
1487    }
1488
1489    #[test]
1490    fn test_64x64_progressive_i_p_b_p_high_nonblock() {
1491        test_decoder_dummy(
1492            &DECODE_64X64_PROGRESSIVE_I_P_B_P_HIGH,
1493            BlockingMode::NonBlocking,
1494        );
1495    }
1496
1497    /// Same as Chromium's test-25fps.h264
1498    pub const DECODE_TEST_25FPS: TestStream = TestStream {
1499        stream: include_bytes!("../../codec/h264/test_data/test-25fps.h264"),
1500        crcs: include_str!("../../codec/h264/test_data/test-25fps.h264.crc"),
1501    };
1502
1503    #[test]
1504    fn test_25fps_block() {
1505        test_decoder_dummy(&DECODE_TEST_25FPS, BlockingMode::Blocking);
1506    }
1507
1508    #[test]
1509    fn test_25fps_nonblock() {
1510        test_decoder_dummy(&DECODE_TEST_25FPS, BlockingMode::NonBlocking);
1511    }
1512
1513    // Adapted from Chromium's test-25fps.h264. Same file, but encoded as
1514    // interlaced instead using the following ffmpeg command:
1515    // ffmpeg -i
1516    // src/third_party/blink/web_tests/media/content/test-25fps.mp4
1517    // -flags +ilme+ildct  -vbsf h264_mp4toannexb -an test-25fps.h264
1518    //
1519    // This test makes sure that the interlaced logic in the decoder
1520    // actually works, specially that "frame splitting" works, as the fields
1521    // here were encoded as frames.
1522    pub const DECODE_TEST_25FPS_INTERLACED: TestStream = TestStream {
1523        stream: include_bytes!("../../codec/h264/test_data/test-25fps-interlaced.h264"),
1524        crcs: include_str!("../../codec/h264/test_data/test-25fps-interlaced.h264.crc"),
1525    };
1526
1527    #[test]
1528    fn test_25fps_interlaced_block() {
1529        test_decoder_dummy(&DECODE_TEST_25FPS_INTERLACED, BlockingMode::Blocking);
1530    }
1531
1532    #[test]
1533    fn test_25fps_interlaced_nonblock() {
1534        test_decoder_dummy(&DECODE_TEST_25FPS_INTERLACED, BlockingMode::NonBlocking);
1535    }
1536}