Skip to main content

zenavif_parse/
lib.rs

1#![deny(unsafe_code)]
2#![allow(clippy::missing_safety_doc)]
3//! AVIF container parser (ISOBMFF/MIAF demuxer).
4//!
5//! Extracts AV1 payloads, alpha channels, grid tiles, animation frames,
6//! and container metadata from AVIF files. Written in safe Rust with
7//! fallible allocations throughout.
8//!
9//! The primary API is [`AvifParser`], which performs zero-copy parsing by
10//! recording byte offsets and resolving data on demand.
11//!
12//! A legacy eager API ([`read_avif`]) is available behind the `eager` feature flag.
13
14// This Source Code Form is subject to the terms of the Mozilla Public
15// License, v. 2.0. If a copy of the MPL was not distributed with this
16// file, You can obtain one at https://mozilla.org/MPL/2.0/.
17
18use arrayvec::ArrayVec;
19use log::{debug, warn};
20
21use bitreader::BitReader;
22use byteorder::ReadBytesExt;
23use fallible_collections::{TryClone, TryReserveError};
24use std::borrow::Cow;
25use std::convert::{TryFrom, TryInto as _};
26
27use std::io::{Read, Take};
28use std::num::NonZeroU32;
29use std::ops::{Range, RangeFrom};
30
31mod obu;
32
33mod boxes;
34use crate::boxes::{BoxType, FourCC};
35
36/// This crate can be used from C.
37#[cfg(feature = "c_api")]
38pub mod c_api;
39
40pub use enough::{Stop, StopReason, Unstoppable};
41
42// Arbitrary buffer size limit used for raw read_bufs on a box.
43// const BUF_SIZE_LIMIT: u64 = 10 * 1024 * 1024;
44
45/// A trait to indicate a type can be infallibly converted to `u64`.
46/// This should only be implemented for infallible conversions, so only unsigned types are valid.
47trait ToU64 {
48    fn to_u64(self) -> u64;
49}
50
51/// Infallible: usize always fits in u64.
52impl ToU64 for usize {
53    fn to_u64(self) -> u64 {
54        const _: () = assert!(std::mem::size_of::<usize>() <= std::mem::size_of::<u64>());
55        self as u64
56    }
57}
58
59/// A trait to indicate a type can be infallibly converted to `usize`.
60/// This should only be implemented for infallible conversions, so only unsigned types are valid.
61pub(crate) trait ToUsize {
62    fn to_usize(self) -> usize;
63}
64
65/// Infallible widening cast: `$from_type` always fits in `usize`.
66macro_rules! impl_to_usize_from {
67    ( $from_type:ty ) => {
68        impl ToUsize for $from_type {
69            fn to_usize(self) -> usize {
70                const _: () = assert!(std::mem::size_of::<$from_type>() <= std::mem::size_of::<usize>());
71                self as usize
72            }
73        }
74    };
75}
76
77impl_to_usize_from!(u8);
78impl_to_usize_from!(u16);
79impl_to_usize_from!(u32);
80
81/// Indicate the current offset (i.e., bytes already read) in a reader
82trait Offset {
83    fn offset(&self) -> u64;
84}
85
86/// Wraps a reader to track the current offset
87struct OffsetReader<'a, T> {
88    reader: &'a mut T,
89    offset: u64,
90}
91
92impl<'a, T> OffsetReader<'a, T> {
93    fn new(reader: &'a mut T) -> Self {
94        Self { reader, offset: 0 }
95    }
96}
97
98impl<T> Offset for OffsetReader<'_, T> {
99    fn offset(&self) -> u64 {
100        self.offset
101    }
102}
103
104impl<T: Read> Read for OffsetReader<'_, T> {
105    fn read(&mut self, buf: &mut [u8]) -> std::io::Result<usize> {
106        let bytes_read = self.reader.read(buf)?;
107        self.offset = self
108            .offset
109            .checked_add(bytes_read.to_u64())
110            .ok_or(Error::Unsupported("total bytes read too large for offset type"))?;
111        Ok(bytes_read)
112    }
113}
114
115pub(crate) type TryVec<T> = fallible_collections::TryVec<T>;
116pub(crate) type TryString = fallible_collections::TryVec<u8>;
117
118// To ensure we don't use stdlib allocating types by accident
119#[allow(dead_code)]
120struct Vec;
121#[allow(dead_code)]
122struct Box;
123#[allow(dead_code)]
124struct HashMap;
125#[allow(dead_code)]
126struct String;
127
128/// Describes parser failures.
129///
130/// This enum wraps the standard `io::Error` type, unified with
131/// our own parser error states and those of crates we use.
132#[derive(Debug)]
133pub enum Error {
134    /// Parse error caused by corrupt or malformed data.
135    InvalidData(&'static str),
136    /// Parse error caused by limited parser support rather than invalid data.
137    Unsupported(&'static str),
138    /// Reflect `std::io::ErrorKind::UnexpectedEof` for short data.
139    UnexpectedEOF,
140    /// Propagate underlying errors from `std::io`.
141    Io(std::io::Error),
142    /// `read_mp4` terminated without detecting a moov box.
143    NoMoov,
144    /// Out of memory
145    OutOfMemory,
146    /// Resource limit exceeded during parsing
147    ResourceLimitExceeded(&'static str),
148    /// Operation was stopped/cancelled
149    Stopped(enough::StopReason),
150}
151
152impl std::fmt::Display for Error {
153    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
154        let msg = match self {
155            Self::InvalidData(s) | Self::Unsupported(s) | Self::ResourceLimitExceeded(s) => s,
156            Self::UnexpectedEOF => "EOF",
157            Self::Io(err) => return err.fmt(f),
158            Self::NoMoov => "Missing Moov box",
159            Self::OutOfMemory => "OOM",
160            Self::Stopped(reason) => return write!(f, "Stopped: {}", reason),
161        };
162        f.write_str(msg)
163    }
164}
165
166impl std::error::Error for Error {}
167
168impl From<bitreader::BitReaderError> for Error {
169    #[cold]
170    #[cfg_attr(debug_assertions, track_caller)]
171    fn from(err: bitreader::BitReaderError) -> Self {
172        log::warn!("bitreader: {err}");
173        Self::InvalidData("truncated bits")
174    }
175}
176
177impl From<std::io::Error> for Error {
178    fn from(err: std::io::Error) -> Self {
179        match err.kind() {
180            std::io::ErrorKind::UnexpectedEof => Self::UnexpectedEOF,
181            _ => Self::Io(err),
182        }
183    }
184}
185
186impl From<std::string::FromUtf8Error> for Error {
187    fn from(_: std::string::FromUtf8Error) -> Self {
188        Self::InvalidData("invalid utf8")
189    }
190}
191
192impl From<std::num::TryFromIntError> for Error {
193    fn from(_: std::num::TryFromIntError) -> Self {
194        Self::Unsupported("integer conversion failed")
195    }
196}
197
198impl From<Error> for std::io::Error {
199    fn from(err: Error) -> Self {
200        let kind = match err {
201            Error::InvalidData(_) => std::io::ErrorKind::InvalidData,
202            Error::UnexpectedEOF => std::io::ErrorKind::UnexpectedEof,
203            Error::Io(io_err) => return io_err,
204            _ => std::io::ErrorKind::Other,
205        };
206        Self::new(kind, err)
207    }
208}
209
210impl From<TryReserveError> for Error {
211    fn from(_: TryReserveError) -> Self {
212        Self::OutOfMemory
213    }
214}
215
216impl From<enough::StopReason> for Error {
217    fn from(reason: enough::StopReason) -> Self {
218        Self::Stopped(reason)
219    }
220}
221
222/// Result shorthand using our Error enum.
223pub type Result<T, E = Error> = std::result::Result<T, E>;
224
225/// Basic ISO box structure.
226///
227/// mp4 files are a sequence of possibly-nested 'box' structures.  Each box
228/// begins with a header describing the length of the box's data and a
229/// four-byte box type which identifies the type of the box. Together these
230/// are enough to interpret the contents of that section of the file.
231///
232/// See ISO 14496-12:2015 § 4.2
233#[derive(Debug, Clone, Copy)]
234struct BoxHeader {
235    /// Box type.
236    name: BoxType,
237    /// Size of the box in bytes.
238    size: u64,
239    /// Offset to the start of the contained data (or header size).
240    offset: u64,
241    /// Uuid for extended type.
242    #[allow(unused)]
243    uuid: Option<[u8; 16]>,
244}
245
246impl BoxHeader {
247    /// 4-byte size + 4-byte type
248    const MIN_SIZE: u64 = 8;
249    /// 4-byte size + 4-byte type + 16-byte size
250    const MIN_LARGE_SIZE: u64 = 16;
251}
252
253/// File type box 'ftyp'.
254#[derive(Debug)]
255#[allow(unused)]
256struct FileTypeBox {
257    major_brand: FourCC,
258    minor_version: u32,
259    compatible_brands: TryVec<FourCC>,
260}
261
262// Handler reference box 'hdlr'
263#[derive(Debug)]
264#[allow(unused)]
265struct HandlerBox {
266    handler_type: FourCC,
267}
268
269/// AV1 codec configuration from the `av1C` property box.
270///
271/// Contains the AV1 codec parameters as signaled in the container.
272/// See AV1-ISOBMFF § 2.3.
273#[derive(Debug, Clone, PartialEq, Eq)]
274pub struct AV1Config {
275    /// AV1 seq_profile (0=Main, 1=High, 2=Professional)
276    pub profile: u8,
277    /// AV1 seq_level_idx for operating point 0
278    pub level: u8,
279    /// AV1 seq_tier for operating point 0
280    pub tier: u8,
281    /// Bit depth (8, 10, or 12)
282    pub bit_depth: u8,
283    /// True if monochrome (no chroma planes)
284    pub monochrome: bool,
285    /// Chroma subsampling X (1 = horizontally subsampled)
286    pub chroma_subsampling_x: u8,
287    /// Chroma subsampling Y (1 = vertically subsampled)
288    pub chroma_subsampling_y: u8,
289    /// Chroma sample position (0=unknown, 1=vertical, 2=colocated)
290    pub chroma_sample_position: u8,
291}
292
293/// Colour information from the `colr` property box.
294///
295/// Can be either CICP-based (`nclx`) or an ICC profile (`rICC`/`prof`).
296/// See ISOBMFF § 12.1.5.
297#[derive(Debug, Clone, PartialEq, Eq)]
298pub enum ColorInformation {
299    /// CICP-based color information (colour_type = 'nclx')
300    Nclx {
301        /// Colour primaries (ITU-T H.273 Table 2)
302        color_primaries: u16,
303        /// Transfer characteristics (ITU-T H.273 Table 3)
304        transfer_characteristics: u16,
305        /// Matrix coefficients (ITU-T H.273 Table 4)
306        matrix_coefficients: u16,
307        /// True if full range (0-255 for 8-bit), false if limited/studio range
308        full_range: bool,
309    },
310    /// ICC profile (colour_type = 'rICC' or 'prof')
311    IccProfile(std::vec::Vec<u8>),
312}
313
314/// Image rotation from the `irot` property box.
315///
316/// Specifies a counter-clockwise rotation to apply after decoding.
317/// See ISOBMFF § 12.1.4.
318#[derive(Debug, Clone, Copy, PartialEq, Eq)]
319pub struct ImageRotation {
320    /// Rotation angle in degrees counter-clockwise: 0, 90, 180, or 270.
321    pub angle: u16,
322}
323
324/// Image mirror from the `imir` property box.
325///
326/// Specifies a mirror (flip) axis to apply after rotation.
327/// See ISOBMFF § 12.1.4.
328#[derive(Debug, Clone, Copy, PartialEq, Eq)]
329pub struct ImageMirror {
330    /// Mirror axis: 0 = top-to-bottom (vertical axis, left-right flip),
331    /// 1 = left-to-right (horizontal axis, top-bottom flip).
332    pub axis: u8,
333}
334
335/// Clean aperture from the `clap` property box.
336///
337/// Defines a crop rectangle as a centered region. All values are
338/// stored as exact rationals (numerator/denominator).
339/// See ISOBMFF § 12.1.4.
340#[derive(Debug, Clone, Copy, PartialEq, Eq)]
341pub struct CleanAperture {
342    /// Width of the clean aperture (numerator)
343    pub width_n: u32,
344    /// Width of the clean aperture (denominator)
345    pub width_d: u32,
346    /// Height of the clean aperture (numerator)
347    pub height_n: u32,
348    /// Height of the clean aperture (denominator)
349    pub height_d: u32,
350    /// Horizontal offset of the clean aperture center (numerator, signed)
351    pub horiz_off_n: i32,
352    /// Horizontal offset of the clean aperture center (denominator)
353    pub horiz_off_d: u32,
354    /// Vertical offset of the clean aperture center (numerator, signed)
355    pub vert_off_n: i32,
356    /// Vertical offset of the clean aperture center (denominator)
357    pub vert_off_d: u32,
358}
359
360/// Pixel aspect ratio from the `pasp` property box.
361///
362/// For AVIF, the spec requires this to be 1:1 if present.
363/// See ISOBMFF § 12.1.4.
364#[derive(Debug, Clone, Copy, PartialEq, Eq)]
365pub struct PixelAspectRatio {
366    /// Horizontal spacing
367    pub h_spacing: u32,
368    /// Vertical spacing
369    pub v_spacing: u32,
370}
371
372/// Content light level info from the `clli` property box.
373///
374/// HDR metadata for display mapping.
375/// See ISOBMFF § 12.1.5 / ITU-T H.274.
376#[derive(Debug, Clone, Copy, PartialEq, Eq)]
377pub struct ContentLightLevel {
378    /// Maximum content light level (cd/m²)
379    pub max_content_light_level: u16,
380    /// Maximum picture average light level (cd/m²)
381    pub max_pic_average_light_level: u16,
382}
383
384/// Mastering display colour volume from the `mdcv` property box.
385///
386/// HDR metadata describing the mastering display's color volume.
387/// See ISOBMFF § 12.1.5 / SMPTE ST 2086.
388#[derive(Debug, Clone, Copy, PartialEq, Eq)]
389pub struct MasteringDisplayColourVolume {
390    /// Display primaries: [(x, y); 3] in 0.00002 units (CIE 1931)
391    /// Order: green, blue, red (per SMPTE ST 2086)
392    pub primaries: [(u16, u16); 3],
393    /// White point (x, y) in 0.00002 units
394    pub white_point: (u16, u16),
395    /// Maximum display luminance in 0.0001 cd/m² units
396    pub max_luminance: u32,
397    /// Minimum display luminance in 0.0001 cd/m² units
398    pub min_luminance: u32,
399}
400
401/// Content colour volume from the `cclv` property box.
402///
403/// Describes the colour volume of the content. Derived from H.265 D.2.40 /
404/// ITU-T H.274. All fields are optional, controlled by presence flags.
405/// See ISOBMFF § 12.1.5.
406#[derive(Debug, Clone, Copy, PartialEq, Eq)]
407pub struct ContentColourVolume {
408    /// Content colour primaries (x, y) for 3 primaries, as signed i32.
409    /// Present only if `ccv_primaries_present_flag` was set.
410    pub primaries: Option<[(i32, i32); 3]>,
411    /// Minimum luminance value. Present only if flag was set.
412    pub min_luminance: Option<u32>,
413    /// Maximum luminance value. Present only if flag was set.
414    pub max_luminance: Option<u32>,
415    /// Average luminance value. Present only if flag was set.
416    pub avg_luminance: Option<u32>,
417}
418
419/// Ambient viewing environment from the `amve` property box.
420///
421/// Describes the ambient viewing conditions under which the content
422/// was authored. See ISOBMFF § 12.1.5 / H.265 D.2.39.
423#[derive(Debug, Clone, Copy, PartialEq, Eq)]
424pub struct AmbientViewingEnvironment {
425    /// Ambient illuminance in units of 1/10000 cd/m²
426    pub ambient_illuminance: u32,
427    /// Ambient light x chromaticity (CIE 1931), units of 1/50000
428    pub ambient_light_x: u16,
429    /// Ambient light y chromaticity (CIE 1931), units of 1/50000
430    pub ambient_light_y: u16,
431}
432
433/// Per-channel gain map parameters from ISO 21496-1.
434///
435/// Each field is a rational number (numerator/denominator pair) describing
436/// how to apply the gain map for this channel.
437#[derive(Debug, Clone, Copy, PartialEq, Eq)]
438pub struct GainMapChannel {
439    /// Minimum gain map value (numerator).
440    pub gain_map_min_n: i32,
441    /// Minimum gain map value (denominator).
442    pub gain_map_min_d: u32,
443    /// Maximum gain map value (numerator).
444    pub gain_map_max_n: i32,
445    /// Maximum gain map value (denominator).
446    pub gain_map_max_d: u32,
447    /// Gamma curve parameter (numerator).
448    pub gamma_n: u32,
449    /// Gamma curve parameter (denominator).
450    pub gamma_d: u32,
451    /// Base image offset (numerator).
452    pub base_offset_n: i32,
453    /// Base image offset (denominator).
454    pub base_offset_d: u32,
455    /// Alternate image offset (numerator).
456    pub alternate_offset_n: i32,
457    /// Alternate image offset (denominator).
458    pub alternate_offset_d: u32,
459}
460
461/// Gain map metadata from a ToneMapImage (`tmap`) derived image item.
462///
463/// Describes how to apply a gain map to convert between SDR and HDR
464/// renditions. The gain map is a separate AV1-encoded image that, combined
465/// with this metadata, allows reconstructing an HDR image from the SDR base.
466///
467/// See ISO 21496-1:2025 for the full specification.
468#[derive(Debug, Clone, PartialEq, Eq)]
469pub struct GainMapMetadata {
470    /// If true, each RGB channel has independent gain map parameters.
471    /// If false, `channels[0]` applies to all three channels.
472    pub is_multichannel: bool,
473    /// If true, the gain map is encoded in the base image's colour space.
474    /// If false, it's in the alternate image's colour space.
475    pub use_base_colour_space: bool,
476    /// Base HDR headroom (numerator).
477    pub base_hdr_headroom_n: u32,
478    /// Base HDR headroom (denominator).
479    pub base_hdr_headroom_d: u32,
480    /// Alternate HDR headroom (numerator).
481    pub alternate_hdr_headroom_n: u32,
482    /// Alternate HDR headroom (denominator).
483    pub alternate_hdr_headroom_d: u32,
484    /// Per-channel parameters. For single-channel mode, only index 0 is
485    /// meaningful (indices 1 and 2 are copies of index 0).
486    pub channels: [GainMapChannel; 3],
487}
488
489/// Gain map information extracted from an AVIF container.
490///
491/// Bundles the ISO 21496-1 metadata, the raw AV1-encoded gain map image data,
492/// and the alternate rendition's color information into a single type.
493///
494/// The `gain_map_data` field contains an AV1 bitstream that can be decoded
495/// with any AV1 decoder (e.g., rav1d) to obtain the gain map pixel values.
496///
497/// # Example
498///
499/// ```no_run
500/// let bytes = std::fs::read("hdr.avif").unwrap();
501/// let parser = zenavif_parse::AvifParser::from_bytes(&bytes).unwrap();
502/// if let Some(Ok(gm)) = parser.gain_map() {
503///     println!("Gain map: {} bytes", gm.gain_map_data.len());
504///     println!("Multichannel: {}", gm.metadata.is_multichannel);
505/// }
506/// ```
507#[derive(Debug, Clone)]
508pub struct AvifGainMap {
509    /// ISO 21496-1 gain map metadata (parsed from the `tmap` item payload).
510    pub metadata: GainMapMetadata,
511    /// Raw AV1 bitstream of the gain map image. Decode with an AV1 decoder
512    /// to obtain the gain map pixel values.
513    pub gain_map_data: std::vec::Vec<u8>,
514    /// Color information for the alternate (typically HDR) rendition,
515    /// from the `tmap` item's `colr` property.
516    pub alt_color_info: Option<ColorInformation>,
517}
518
519/// Depth auxiliary image extracted from an AVIF container.
520///
521/// AVIF supports auxiliary images via `auxl` item references with `auxC` type
522/// properties, following the HEIF (ISO 23008-12) auxiliary image mechanism.
523/// Depth maps use the auxiliary type URN
524/// `urn:mpeg:mpegB:cicp:systems:auxiliary:depth` (MPEG-B Part 23) or the
525/// legacy HEVC-style `urn:mpeg:hevc:2015:auxid:2`.
526///
527/// The `data` field contains a raw AV1 bitstream that can be decoded with
528/// any AV1 decoder to obtain the depth image pixel values (typically
529/// monochrome 8-bit or 10-bit).
530///
531/// # Example
532///
533/// ```no_run
534/// let bytes = std::fs::read("portrait.avif").unwrap();
535/// let parser = zenavif_parse::AvifParser::from_bytes(&bytes).unwrap();
536/// if let Some(Ok(dm)) = parser.depth_map() {
537///     println!("Depth map: {}x{}, {} bytes AV1 data", dm.width, dm.height, dm.data.len());
538/// }
539/// ```
540#[derive(Debug, Clone)]
541pub struct AvifDepthMap {
542    /// Raw AV1 bitstream of the depth auxiliary image. Decode with an AV1
543    /// decoder to obtain grayscale depth pixel values.
544    pub data: std::vec::Vec<u8>,
545    /// Width of the depth image in pixels (from `ispe` property).
546    pub width: u32,
547    /// Height of the depth image in pixels (from `ispe` property).
548    pub height: u32,
549    /// AV1 codec configuration for the depth item (from `av1C` property).
550    pub av1_config: Option<AV1Config>,
551    /// Color information for the depth item (from `colr` property), if present.
552    pub color_info: Option<ColorInformation>,
553}
554
555/// Operating point selector from the `a1op` property box.
556///
557/// Selects which AV1 operating point to decode for multi-operating-point images.
558/// See AVIF § 4.3.4.
559#[derive(Debug, Clone, Copy, PartialEq, Eq)]
560pub struct OperatingPointSelector {
561    /// Operating point index (0..31)
562    pub op_index: u8,
563}
564
565/// Layer selector from the `lsel` property box.
566///
567/// Selects which spatial layer to render for layered/progressive images.
568/// See HEIF (ISO 23008-12).
569#[derive(Debug, Clone, Copy, PartialEq, Eq)]
570pub struct LayerSelector {
571    /// Layer ID to render (0-3), or 0xFFFF for all layers (progressive)
572    pub layer_id: u16,
573}
574
575/// AV1 layered image indexing from the `a1lx` property box.
576///
577/// Provides byte sizes for the first 3 layers so decoders can seek
578/// to a specific layer without parsing the full bitstream.
579/// See AVIF § 4.3.6.
580#[derive(Debug, Clone, Copy, PartialEq, Eq)]
581pub struct AV1LayeredImageIndexing {
582    /// Byte sizes of layers 0, 1, 2. The last layer's size is implicit
583    /// (total item size minus the sum of these three).
584    pub layer_sizes: [u32; 3],
585}
586
587/// Options for parsing AVIF files
588///
589/// Prefer using [`DecodeConfig::lenient()`] with [`AvifParser`] instead.
590#[derive(Debug, Clone, Copy)]
591#[derive(Default)]
592pub struct ParseOptions {
593    /// Enable lenient parsing mode
594    ///
595    /// When true, non-critical validation errors (like non-zero flags in boxes
596    /// that expect zero flags) will be ignored instead of returning errors.
597    /// This allows parsing of slightly malformed but otherwise valid AVIF files.
598    ///
599    /// Default: false (strict validation)
600    pub lenient: bool,
601}
602
603/// Configuration for parsing AVIF files with resource limits and validation options
604///
605/// Provides fine-grained control over resource consumption during AVIF parsing,
606/// allowing defensive parsing against malicious or malformed files.
607///
608/// Resource limits are checked **before** allocations occur, preventing out-of-memory
609/// conditions from malicious files that claim unrealistic dimensions or counts.
610///
611/// # Examples
612///
613/// ```rust
614/// use zenavif_parse::DecodeConfig;
615///
616/// // Default limits (suitable for most apps)
617/// let config = DecodeConfig::default();
618///
619/// // Strict limits for untrusted input
620/// let config = DecodeConfig::default()
621///     .with_peak_memory_limit(100_000_000)  // 100MB
622///     .with_total_megapixels_limit(64)       // 64MP max
623///     .with_max_animation_frames(100);       // 100 frames
624///
625/// // No limits (backwards compatible with read_avif)
626/// let config = DecodeConfig::unlimited();
627/// ```
628#[derive(Debug, Clone)]
629pub struct DecodeConfig {
630    /// Maximum peak heap memory usage in bytes.
631    /// Default: 1GB (1,000,000,000 bytes)
632    pub peak_memory_limit: Option<u64>,
633
634    /// Maximum total megapixels for grid images.
635    /// Default: 512 megapixels
636    pub total_megapixels_limit: Option<u32>,
637
638    /// Maximum number of animation frames.
639    /// Default: 10,000 frames
640    pub max_animation_frames: Option<u32>,
641
642    /// Maximum number of grid tiles.
643    /// Default: 1,000 tiles
644    pub max_grid_tiles: Option<u32>,
645
646    /// Enable lenient parsing mode.
647    /// Default: false (strict validation)
648    pub lenient: bool,
649}
650
651impl Default for DecodeConfig {
652    fn default() -> Self {
653        Self {
654            peak_memory_limit: Some(1_000_000_000),
655            total_megapixels_limit: Some(512),
656            max_animation_frames: Some(10_000),
657            max_grid_tiles: Some(1_000),
658            lenient: false,
659        }
660    }
661}
662
663impl DecodeConfig {
664    /// Create a configuration with no resource limits.
665    ///
666    /// Equivalent to the behavior of `read_avif()` before resource limits were added.
667    pub fn unlimited() -> Self {
668        Self {
669            peak_memory_limit: None,
670            total_megapixels_limit: None,
671            max_animation_frames: None,
672            max_grid_tiles: None,
673            lenient: false,
674        }
675    }
676
677    /// Set the peak memory limit in bytes
678    pub fn with_peak_memory_limit(mut self, bytes: u64) -> Self {
679        self.peak_memory_limit = Some(bytes);
680        self
681    }
682
683    /// Set the total megapixels limit for grid images
684    pub fn with_total_megapixels_limit(mut self, megapixels: u32) -> Self {
685        self.total_megapixels_limit = Some(megapixels);
686        self
687    }
688
689    /// Set the maximum animation frame count
690    pub fn with_max_animation_frames(mut self, frames: u32) -> Self {
691        self.max_animation_frames = Some(frames);
692        self
693    }
694
695    /// Set the maximum grid tile count
696    pub fn with_max_grid_tiles(mut self, tiles: u32) -> Self {
697        self.max_grid_tiles = Some(tiles);
698        self
699    }
700
701    /// Enable lenient parsing mode
702    pub fn lenient(mut self, lenient: bool) -> Self {
703        self.lenient = lenient;
704        self
705    }
706}
707
708/// Grid configuration for tiled/grid-based AVIF images
709#[derive(Debug, Clone, PartialEq)]
710/// Grid image configuration
711///
712/// For tiled/grid AVIF images, this describes the grid layout.
713/// Grid images are composed of multiple AV1 image items (tiles) arranged in a rectangular grid.
714///
715/// ## Grid Layout Determination
716///
717/// Grid layout can be specified in two ways:
718/// 1. **Explicit ImageGrid property box** - contains rows, columns, and output dimensions
719/// 2. **Calculated from ispe properties** - when no ImageGrid box exists, dimensions are
720///    calculated by dividing the grid item's dimensions by a tile's dimensions
721///
722/// ## Output Dimensions
723///
724/// - `output_width` and `output_height` may be 0, indicating the decoder should calculate
725///   them from the tile dimensions
726/// - When non-zero, they specify the exact output dimensions of the composed image
727pub struct GridConfig {
728    /// Number of tile rows (1-256)
729    pub rows: u8,
730    /// Number of tile columns (1-256)
731    pub columns: u8,
732    /// Output width in pixels (0 = calculate from tiles)
733    pub output_width: u32,
734    /// Output height in pixels (0 = calculate from tiles)
735    pub output_height: u32,
736}
737
738/// Frame information for animated AVIF
739#[cfg(feature = "eager")]
740#[deprecated(since = "1.5.0", note = "Use `AvifParser::frame()` which returns `FrameRef` instead")]
741#[derive(Debug)]
742pub struct AnimationFrame {
743    /// AV1 bitstream data for this frame
744    pub data: TryVec<u8>,
745    /// Duration in milliseconds (0 if unknown)
746    pub duration_ms: u32,
747}
748
749/// Animation configuration for animated AVIF (avis brand)
750#[cfg(feature = "eager")]
751#[deprecated(since = "1.5.0", note = "Use `AvifParser::animation_info()` and `AvifParser::frames()` instead")]
752#[derive(Debug)]
753#[allow(deprecated)]
754pub struct AnimationConfig {
755    /// Number of times to loop (0 = infinite)
756    pub loop_count: u32,
757    /// All frames in the animation
758    pub frames: TryVec<AnimationFrame>,
759}
760
761// Internal structures for animation parsing
762
763#[derive(Debug)]
764struct MovieHeader {
765    _timescale: u32,
766    _duration: u64,
767}
768
769#[derive(Debug)]
770struct MediaHeader {
771    timescale: u32,
772    _duration: u64,
773}
774
775#[derive(Debug)]
776struct TimeToSampleEntry {
777    sample_count: u32,
778    sample_delta: u32,
779}
780
781#[derive(Debug)]
782struct SampleToChunkEntry {
783    first_chunk: u32,
784    samples_per_chunk: u32,
785    _sample_description_index: u32,
786}
787
788#[derive(Debug)]
789struct SampleTable {
790    time_to_sample: TryVec<TimeToSampleEntry>,
791    sample_sizes: TryVec<u32>,
792    /// Precomputed byte offset for each sample, derived from
793    /// sample_to_chunk + chunk_offsets + sample_sizes during parsing.
794    sample_offsets: TryVec<u64>,
795}
796
797/// A track reference entry (e.g., auxl, cdsc) parsed from a `tref` sub-box.
798#[derive(Debug)]
799struct TrackReference {
800    reference_type: FourCC,
801    track_ids: TryVec<u32>,
802}
803
804/// Codec properties extracted from a `stsd` VisualSampleEntry.
805#[derive(Debug, Clone, Default)]
806struct TrackCodecConfig {
807    av1_config: Option<AV1Config>,
808    color_info: Option<ColorInformation>,
809}
810
811/// Parsed data from a single track box (`trak`).
812#[derive(Debug)]
813struct ParsedTrack {
814    track_id: u32,
815    handler_type: FourCC,
816    media_timescale: u32,
817    sample_table: SampleTable,
818    references: TryVec<TrackReference>,
819    loop_count: u32,
820    codec_config: TrackCodecConfig,
821}
822
823/// Paired color + optional alpha animation data after track association.
824struct ParsedAnimationData {
825    color_timescale: u32,
826    color_sample_table: SampleTable,
827    alpha_timescale: Option<u32>,
828    alpha_sample_table: Option<SampleTable>,
829    loop_count: u32,
830    color_codec_config: TrackCodecConfig,
831}
832
833#[cfg(feature = "eager")]
834#[deprecated(since = "1.5.0", note = "Use `AvifParser` for zero-copy parsing instead")]
835#[derive(Debug, Default)]
836#[allow(deprecated)]
837pub struct AvifData {
838    /// AV1 data for the color channels.
839    ///
840    /// The collected data indicated by the `pitm` box, See ISO 14496-12:2015 § 8.11.4
841    pub primary_item: TryVec<u8>,
842    /// AV1 data for alpha channel.
843    ///
844    /// Associated alpha channel for the primary item, if any
845    pub alpha_item: Option<TryVec<u8>>,
846    /// If true, divide RGB values by the alpha value.
847    ///
848    /// See `prem` in MIAF § 7.3.5.2
849    pub premultiplied_alpha: bool,
850
851    /// Grid configuration for tiled images.
852    ///
853    /// If present, the image is a grid and `grid_tiles` contains the tile data.
854    /// Grid layout is determined either from an explicit ImageGrid property box or
855    /// calculated from ispe (Image Spatial Extents) properties.
856    ///
857    /// ## Example
858    ///
859    /// ```no_run
860    /// #[allow(deprecated)]
861    /// use std::fs::File;
862    /// # fn main() -> Result<(), Box<dyn std::error::Error>> {
863    /// #[allow(deprecated)]
864    /// let data = zenavif_parse::read_avif(&mut File::open("image.avif")?)?;
865    ///
866    /// if let Some(grid) = data.grid_config {
867    ///     println!("Grid: {}×{} tiles", grid.rows, grid.columns);
868    ///     println!("Output: {}×{}", grid.output_width, grid.output_height);
869    ///     println!("Tile count: {}", data.grid_tiles.len());
870    /// }
871    /// # Ok(())
872    /// # }
873    /// ```
874    pub grid_config: Option<GridConfig>,
875
876    /// AV1 payloads for grid image tiles.
877    ///
878    /// Empty for non-grid images. For grid images, contains one entry per tile.
879    ///
880    /// **Tile ordering:** Tiles are guaranteed to be in the correct order for grid assembly,
881    /// sorted by their dimgIdx (reference index). This is row-major order: tiles in the first
882    /// row from left to right, then the second row, etc.
883    pub grid_tiles: TryVec<TryVec<u8>>,
884
885    /// Animation configuration (for animated AVIF with avis brand)
886    ///
887    /// When present, primary_item contains the first frame
888    pub animation: Option<AnimationConfig>,
889
890    /// AV1 codec configuration from the container's `av1C` property.
891    pub av1_config: Option<AV1Config>,
892
893    /// Colour information from the container's `colr` property.
894    pub color_info: Option<ColorInformation>,
895
896    /// Image rotation from the container's `irot` property.
897    pub rotation: Option<ImageRotation>,
898
899    /// Image mirror from the container's `imir` property.
900    pub mirror: Option<ImageMirror>,
901
902    /// Clean aperture (crop) from the container's `clap` property.
903    pub clean_aperture: Option<CleanAperture>,
904
905    /// Pixel aspect ratio from the container's `pasp` property.
906    pub pixel_aspect_ratio: Option<PixelAspectRatio>,
907
908    /// Content light level from the container's `clli` property.
909    pub content_light_level: Option<ContentLightLevel>,
910
911    /// Mastering display colour volume from the container's `mdcv` property.
912    pub mastering_display: Option<MasteringDisplayColourVolume>,
913
914    /// Content colour volume from the container's `cclv` property.
915    pub content_colour_volume: Option<ContentColourVolume>,
916
917    /// Ambient viewing environment from the container's `amve` property.
918    pub ambient_viewing: Option<AmbientViewingEnvironment>,
919
920    /// Operating point selector from the container's `a1op` property.
921    pub operating_point: Option<OperatingPointSelector>,
922
923    /// Layer selector from the container's `lsel` property.
924    pub layer_selector: Option<LayerSelector>,
925
926    /// AV1 layered image indexing from the container's `a1lx` property.
927    pub layered_image_indexing: Option<AV1LayeredImageIndexing>,
928
929    /// EXIF metadata from a `cdsc`-linked `Exif` item.
930    ///
931    /// Raw EXIF data (TIFF header onwards), with the 4-byte AVIF offset prefix stripped.
932    pub exif: Option<TryVec<u8>>,
933
934    /// XMP metadata from a `cdsc`-linked `mime` item.
935    ///
936    /// Raw XMP/XML data as UTF-8.
937    pub xmp: Option<TryVec<u8>>,
938
939    /// Gain map metadata from a `tmap` derived image item.
940    pub gain_map_metadata: Option<GainMapMetadata>,
941
942    /// AV1-encoded gain map image data.
943    pub gain_map_item: Option<TryVec<u8>>,
944
945    /// Color information for the alternate (HDR) rendition from the `tmap` item.
946    pub gain_map_color_info: Option<ColorInformation>,
947
948    /// Depth auxiliary image data, if present.
949    pub depth_item: Option<TryVec<u8>>,
950
951    /// Width of the depth auxiliary image (from `ispe`).
952    pub depth_width: u32,
953
954    /// Height of the depth auxiliary image (from `ispe`).
955    pub depth_height: u32,
956
957    /// AV1 codec configuration for the depth auxiliary item.
958    pub depth_av1_config: Option<AV1Config>,
959
960    /// Color information for the depth auxiliary item.
961    pub depth_color_info: Option<ColorInformation>,
962
963    /// Major brand from the `ftyp` box (e.g., `*b"avif"` or `*b"avis"`).
964    pub major_brand: [u8; 4],
965
966    /// Compatible brands from the `ftyp` box.
967    pub compatible_brands: std::vec::Vec<[u8; 4]>,
968}
969
970#[cfg(feature = "eager")]
971#[allow(deprecated)]
972impl AvifData {
973    /// Get the full gain map bundle, if present.
974    ///
975    /// Consumes the gain map metadata and data from this `AvifData` and returns
976    /// an [`AvifGainMap`]. Returns `None` if no gain map metadata or data is present.
977    pub fn gain_map(&self) -> Option<AvifGainMap> {
978        let metadata = self.gain_map_metadata.as_ref()?.clone();
979        let gain_map_data = self.gain_map_item.as_ref()?.to_vec();
980        Some(AvifGainMap {
981            metadata,
982            gain_map_data,
983            alt_color_info: self.gain_map_color_info.clone(),
984        })
985    }
986
987    /// Get the depth auxiliary image bundle, if present.
988    ///
989    /// Returns [`AvifDepthMap`] with the raw AV1 depth data, dimensions,
990    /// and codec/color info. Returns `None` if no depth auxiliary is present.
991    pub fn depth_map(&self) -> Option<AvifDepthMap> {
992        let data = self.depth_item.as_ref()?.to_vec();
993        Some(AvifDepthMap {
994            data,
995            width: self.depth_width,
996            height: self.depth_height,
997            av1_config: self.depth_av1_config.clone(),
998            color_info: self.depth_color_info.clone(),
999        })
1000    }
1001}
1002
1003// # Memory Usage
1004//
1005// This implementation loads all image data into owned vectors (`TryVec<u8>`), which has
1006// memory implications depending on the file type:
1007//
1008// - **Static images**: Single copy of compressed data (~5-50KB typical)
1009//   - `primary_item`: compressed AV1 data
1010//   - `alpha_item`: compressed alpha data (if present)
1011//
1012// - **Grid images**: All tiles loaded (~100KB-2MB for large grids)
1013//   - `grid_tiles`: one compressed tile per grid cell
1014//
1015// - **Animated images**: All frames loaded eagerly (⚠️ HIGH MEMORY)
1016//   - Internal mdat boxes: ~500KB for 95-frame video
1017//   - Extracted frames: ~500KB duplicated in `animation.frames[].data`
1018//   - **Total: ~2× file size in memory**
1019//
1020// For large animated files, consider using a streaming approach or processing frames
1021// individually rather than loading the entire `AvifData` structure.
1022
1023#[cfg(feature = "eager")]
1024#[allow(deprecated)]
1025impl AvifData {
1026    #[deprecated(since = "1.5.0", note = "Use `AvifParser::from_reader()` instead")]
1027    pub fn from_reader<R: Read>(reader: &mut R) -> Result<Self> {
1028        read_avif(reader)
1029    }
1030
1031    /// Parses AV1 data to get basic properties of the opaque channel
1032    pub fn primary_item_metadata(&self) -> Result<AV1Metadata> {
1033        AV1Metadata::parse_av1_bitstream(&self.primary_item)
1034    }
1035
1036    /// Parses AV1 data to get basic properties about the alpha channel, if any
1037    pub fn alpha_item_metadata(&self) -> Result<Option<AV1Metadata>> {
1038        self.alpha_item.as_deref().map(AV1Metadata::parse_av1_bitstream).transpose()
1039    }
1040}
1041
1042/// Chroma subsampling configuration for AV1/AVIF.
1043///
1044/// `(false, false)` = 4:4:4 (no subsampling).
1045/// `(true, true)` = 4:2:0 (both axes subsampled).
1046/// `(true, false)` = 4:2:2 (horizontal only).
1047#[derive(Debug, Clone, Copy, PartialEq, Eq)]
1048pub struct ChromaSubsampling {
1049    /// Whether the horizontal (X) axis is subsampled.
1050    pub horizontal: bool,
1051    /// Whether the vertical (Y) axis is subsampled.
1052    pub vertical: bool,
1053}
1054
1055impl ChromaSubsampling {
1056    /// 4:4:4 — no chroma subsampling.
1057    pub const NONE: Self = Self { horizontal: false, vertical: false };
1058    /// 4:2:0 — both axes subsampled.
1059    pub const YUV420: Self = Self { horizontal: true, vertical: true };
1060    /// 4:2:2 — horizontal subsampling only.
1061    pub const YUV422: Self = Self { horizontal: true, vertical: false };
1062}
1063
1064impl From<(bool, bool)> for ChromaSubsampling {
1065    fn from((h, v): (bool, bool)) -> Self {
1066        Self { horizontal: h, vertical: v }
1067    }
1068}
1069
1070impl From<ChromaSubsampling> for (bool, bool) {
1071    fn from(cs: ChromaSubsampling) -> Self {
1072        (cs.horizontal, cs.vertical)
1073    }
1074}
1075
1076/// AV1 sequence header metadata parsed from an OBU bitstream.
1077///
1078/// See [`AvifParser::primary_metadata()`] and [`AV1Metadata::parse_av1_bitstream()`].
1079#[non_exhaustive]
1080#[derive(Debug, Clone)]
1081pub struct AV1Metadata {
1082    /// Should be true for non-animated AVIF
1083    pub still_picture: bool,
1084    pub max_frame_width: NonZeroU32,
1085    pub max_frame_height: NonZeroU32,
1086    /// 8, 10, or 12
1087    pub bit_depth: u8,
1088    /// 0, 1 or 2 for the level of complexity
1089    pub seq_profile: u8,
1090    /// Chroma subsampling. Use named fields (`horizontal`, `vertical`) or
1091    /// constants like [`ChromaSubsampling::YUV420`].
1092    pub chroma_subsampling: ChromaSubsampling,
1093    pub monochrome: bool,
1094    /// AV1 base quantizer index (0-255) from the first frame header.
1095    /// `None` if the frame header could not be parsed.
1096    /// 0 = lossless candidate, 255 = worst quality.
1097    pub base_q_idx: Option<u8>,
1098    /// Whether the encoding is lossless (all quantization parameters are zero
1099    /// and chroma is not subsampled).
1100    /// `None` if the frame header could not be parsed.
1101    pub lossless: Option<bool>,
1102}
1103
1104impl AV1Metadata {
1105    /// Parses raw AV1 bitstream (sequence header + optional frame header).
1106    ///
1107    /// Extracts sequence-level metadata and attempts to parse the first frame
1108    /// header for quantization/lossless detection.
1109    ///
1110    /// This is for the bare image payload from an encoder, not an AVIF/HEIF file.
1111    /// To parse AVIF files, see [`AvifParser::from_reader()`].
1112    #[inline(never)]
1113    pub fn parse_av1_bitstream(obu_bitstream: &[u8]) -> Result<Self> {
1114        let (h, frame_quant) = obu::parse_obu_with_frame_info(obu_bitstream)?;
1115        let no_chroma_subsampling = !h.color.chroma_subsampling.horizontal
1116            && !h.color.chroma_subsampling.vertical;
1117        Ok(Self {
1118            still_picture: h.still_picture,
1119            max_frame_width: h.max_frame_width,
1120            max_frame_height: h.max_frame_height,
1121            bit_depth: h.color.bit_depth,
1122            seq_profile: h.seq_profile,
1123            chroma_subsampling: h.color.chroma_subsampling,
1124            monochrome: h.color.monochrome,
1125            base_q_idx: frame_quant.map(|fq| fq.base_q_idx),
1126            lossless: frame_quant.map(|fq| fq.coded_lossless && no_chroma_subsampling),
1127        })
1128    }
1129}
1130
1131/// A single frame from an animated AVIF, with zero-copy when possible.
1132///
1133/// The `data` field is `Cow::Borrowed` when the frame lives in a single
1134/// contiguous mdat extent, and `Cow::Owned` when extents must be concatenated.
1135pub struct FrameRef<'a> {
1136    pub data: Cow<'a, [u8]>,
1137    /// Alpha channel data for this frame, if the animation has a separate alpha track.
1138    pub alpha_data: Option<Cow<'a, [u8]>>,
1139    pub duration_ms: u32,
1140}
1141
1142/// Byte range of a media data box within the file.
1143struct MdatBounds {
1144    offset: u64,
1145    length: u64,
1146}
1147
1148/// Where an item's data lives: construction method + extent ranges.
1149struct ItemExtents {
1150    construction_method: ConstructionMethod,
1151    extents: TryVec<ExtentRange>,
1152}
1153
1154/// Zero-copy AVIF parser backed by a borrowed or owned byte buffer.
1155///
1156/// `AvifParser` records byte offsets during parsing but does **not** copy
1157/// mdat payload data. Data access methods return `Cow<[u8]>` — borrowed
1158/// when the item is a single contiguous extent, owned when extents must
1159/// be concatenated.
1160///
1161/// # Constructors
1162///
1163/// | Method | Lifetime | Zero-copy? |
1164/// |--------|----------|------------|
1165/// | [`from_bytes`](Self::from_bytes) | `'data` | Yes — borrows the slice |
1166/// | [`from_owned`](Self::from_owned) | `'static` | Within the owned buffer |
1167/// | [`from_reader`](Self::from_reader) | `'static` | Reads all, then owned |
1168///
1169/// # Example
1170///
1171/// ```no_run
1172/// use zenavif_parse::AvifParser;
1173///
1174/// let bytes = std::fs::read("image.avif")?;
1175/// let parser = AvifParser::from_bytes(&bytes)?;
1176/// let primary = parser.primary_data()?; // Cow::Borrowed for single-extent
1177/// # Ok::<(), Box<dyn std::error::Error>>(())
1178/// ```
1179pub struct AvifParser<'data> {
1180    raw: Cow<'data, [u8]>,
1181    mdat_bounds: TryVec<MdatBounds>,
1182    idat: Option<TryVec<u8>>,
1183    primary: ItemExtents,
1184    alpha: Option<ItemExtents>,
1185    grid_config: Option<GridConfig>,
1186    tiles: TryVec<ItemExtents>,
1187    animation_data: Option<AnimationParserData>,
1188    premultiplied_alpha: bool,
1189    av1_config: Option<AV1Config>,
1190    color_info: Option<ColorInformation>,
1191    rotation: Option<ImageRotation>,
1192    mirror: Option<ImageMirror>,
1193    clean_aperture: Option<CleanAperture>,
1194    pixel_aspect_ratio: Option<PixelAspectRatio>,
1195    content_light_level: Option<ContentLightLevel>,
1196    mastering_display: Option<MasteringDisplayColourVolume>,
1197    content_colour_volume: Option<ContentColourVolume>,
1198    ambient_viewing: Option<AmbientViewingEnvironment>,
1199    operating_point: Option<OperatingPointSelector>,
1200    layer_selector: Option<LayerSelector>,
1201    layered_image_indexing: Option<AV1LayeredImageIndexing>,
1202    exif_item: Option<ItemExtents>,
1203    xmp_item: Option<ItemExtents>,
1204    gain_map_metadata: Option<GainMapMetadata>,
1205    gain_map: Option<ItemExtents>,
1206    gain_map_color_info: Option<ColorInformation>,
1207    depth_item: Option<ItemExtents>,
1208    depth_width: u32,
1209    depth_height: u32,
1210    depth_av1_config: Option<AV1Config>,
1211    depth_color_info: Option<ColorInformation>,
1212    major_brand: [u8; 4],
1213    compatible_brands: std::vec::Vec<[u8; 4]>,
1214}
1215
1216struct AnimationParserData {
1217    media_timescale: u32,
1218    sample_table: SampleTable,
1219    alpha_media_timescale: Option<u32>,
1220    alpha_sample_table: Option<SampleTable>,
1221    loop_count: u32,
1222    codec_config: TrackCodecConfig,
1223}
1224
1225/// Animation metadata from [`AvifParser`]
1226#[derive(Debug, Clone, Copy)]
1227pub struct AnimationInfo {
1228    pub frame_count: usize,
1229    pub loop_count: u32,
1230    /// Whether animation has a separate alpha track.
1231    pub has_alpha: bool,
1232    /// Media timescale (ticks per second) for the color track.
1233    pub timescale: u32,
1234}
1235
1236/// Parsed structure from the box-level parse pass (no mdat data).
1237struct ParsedStructure {
1238    /// `None` for pure AVIF sequences (`avis` brand) that have only `moov`+`mdat`.
1239    meta: Option<AvifInternalMeta>,
1240    mdat_bounds: TryVec<MdatBounds>,
1241    animation_data: Option<ParsedAnimationData>,
1242    major_brand: [u8; 4],
1243    compatible_brands: std::vec::Vec<[u8; 4]>,
1244}
1245
1246impl<'data> AvifParser<'data> {
1247    // ========================================
1248    // Constructors
1249    // ========================================
1250
1251    /// Parse AVIF from a borrowed byte slice (true zero-copy).
1252    ///
1253    /// The returned parser borrows `data` — single-extent items will be
1254    /// returned as `Cow::Borrowed` slices into this buffer.
1255    pub fn from_bytes(data: &'data [u8]) -> Result<Self> {
1256        Self::from_bytes_with_config(data, &DecodeConfig::unlimited(), &Unstoppable)
1257    }
1258
1259    /// Parse AVIF from a borrowed byte slice with resource limits.
1260    pub fn from_bytes_with_config(
1261        data: &'data [u8],
1262        config: &DecodeConfig,
1263        stop: &dyn Stop,
1264    ) -> Result<Self> {
1265        let parsed = Self::parse_raw(data, config, stop)?;
1266        Self::build(Cow::Borrowed(data), parsed, config)
1267    }
1268
1269    /// Parse AVIF from an owned buffer.
1270    ///
1271    /// The returned parser owns the data — single-extent items will still
1272    /// be returned as `Cow::Borrowed` slices (borrowing from the internal buffer).
1273    pub fn from_owned(data: std::vec::Vec<u8>) -> Result<AvifParser<'static>> {
1274        AvifParser::from_owned_with_config(data, &DecodeConfig::unlimited(), &Unstoppable)
1275    }
1276
1277    /// Parse AVIF from an owned buffer with resource limits.
1278    pub fn from_owned_with_config(
1279        data: std::vec::Vec<u8>,
1280        config: &DecodeConfig,
1281        stop: &dyn Stop,
1282    ) -> Result<AvifParser<'static>> {
1283        let parsed = AvifParser::parse_raw(&data, config, stop)?;
1284        AvifParser::build(Cow::Owned(data), parsed, config)
1285    }
1286
1287    /// Parse AVIF from a reader (reads all bytes, then parses).
1288    pub fn from_reader<R: Read>(reader: &mut R) -> Result<AvifParser<'static>> {
1289        AvifParser::from_reader_with_config(reader, &DecodeConfig::unlimited(), &Unstoppable)
1290    }
1291
1292    /// Parse AVIF from a reader with resource limits.
1293    pub fn from_reader_with_config<R: Read>(
1294        reader: &mut R,
1295        config: &DecodeConfig,
1296        stop: &dyn Stop,
1297    ) -> Result<AvifParser<'static>> {
1298        let mut buf = std::vec::Vec::new();
1299        reader.read_to_end(&mut buf)?;
1300        AvifParser::from_owned_with_config(buf, config, stop)
1301    }
1302
1303    // ========================================
1304    // Internal: parse pass (records offsets, no mdat copy)
1305    // ========================================
1306
1307    /// Parse the AVIF box structure from raw bytes, recording mdat offsets
1308    /// without copying mdat content.
1309    fn parse_raw(data: &[u8], config: &DecodeConfig, stop: &dyn Stop) -> Result<ParsedStructure> {
1310        let parse_opts = ParseOptions { lenient: config.lenient };
1311        let mut cursor = std::io::Cursor::new(data);
1312        let mut f = OffsetReader::new(&mut cursor);
1313        let mut iter = BoxIter::new(&mut f);
1314
1315        // 'ftyp' box must occur first; see ISO 14496-12:2015 § 4.3.1
1316        let (major_brand, compatible_brands) = if let Some(mut b) = iter.next_box()? {
1317            if b.head.name == BoxType::FileTypeBox {
1318                let ftyp = read_ftyp(&mut b)?;
1319                if ftyp.major_brand != b"avif" && ftyp.major_brand != b"avis" {
1320                    return Err(Error::InvalidData("ftyp must be 'avif' or 'avis'"));
1321                }
1322                let major = ftyp.major_brand.value;
1323                let compat = ftyp.compatible_brands.iter().map(|b| b.value).collect();
1324                (major, compat)
1325            } else {
1326                return Err(Error::InvalidData("'ftyp' box must occur first"));
1327            }
1328        } else {
1329            return Err(Error::InvalidData("'ftyp' box must occur first"));
1330        };
1331
1332        let mut meta = None;
1333        let mut mdat_bounds = TryVec::new();
1334        let mut animation_data: Option<ParsedAnimationData> = None;
1335
1336        while let Some(mut b) = iter.next_box()? {
1337            stop.check()?;
1338
1339            match b.head.name {
1340                BoxType::MetadataBox => {
1341                    if meta.is_some() {
1342                        return Err(Error::InvalidData(
1343                            "There should be zero or one meta boxes per ISO 14496-12:2015 § 8.11.1.1",
1344                        ));
1345                    }
1346                    meta = Some(read_avif_meta(&mut b, &parse_opts)?);
1347                }
1348                BoxType::MovieBox => {
1349                    let tracks = read_moov(&mut b)?;
1350                    if !tracks.is_empty() {
1351                        animation_data = Some(associate_tracks(tracks)?);
1352                    }
1353                }
1354                BoxType::MediaDataBox => {
1355                    if b.bytes_left() > 0 {
1356                        let offset = b.offset();
1357                        let length = b.bytes_left();
1358                        mdat_bounds.push(MdatBounds { offset, length })?;
1359                    }
1360                    // Skip the content — we'll slice into raw later
1361                    skip_box_content(&mut b)?;
1362                }
1363                _ => skip_box_content(&mut b)?,
1364            }
1365
1366            check_parser_state(&b.head, &b.content)?;
1367        }
1368
1369        // meta is required for still images, but pure AVIF sequences (avis brand)
1370        // can have only moov+mdat with no meta box.
1371        if meta.is_none() && animation_data.is_none() {
1372            return Err(Error::InvalidData("missing meta"));
1373        }
1374
1375        Ok(ParsedStructure { meta, mdat_bounds, animation_data, major_brand, compatible_brands })
1376    }
1377
1378    /// Build an AvifParser from raw bytes + parsed structure.
1379    fn build(raw: Cow<'data, [u8]>, parsed: ParsedStructure, config: &DecodeConfig) -> Result<Self> {
1380        let tracker = ResourceTracker::new(config);
1381
1382        // Store animation metadata if present
1383        let animation_data = if let Some(anim) = parsed.animation_data {
1384            tracker.validate_animation_frames(anim.color_sample_table.sample_sizes.len() as u32)?;
1385            Some(AnimationParserData {
1386                media_timescale: anim.color_timescale,
1387                sample_table: anim.color_sample_table,
1388                alpha_media_timescale: anim.alpha_timescale,
1389                alpha_sample_table: anim.alpha_sample_table,
1390                loop_count: anim.loop_count,
1391                codec_config: anim.color_codec_config,
1392            })
1393        } else {
1394            None
1395        };
1396
1397        // Pure sequence (no meta box): only animation methods will work.
1398        // Use codec config from the color track's stsd if available.
1399        let Some(meta) = parsed.meta else {
1400            let track_config = animation_data.as_ref()
1401                .map(|a| a.codec_config.clone())
1402                .unwrap_or_default();
1403            return Ok(Self {
1404                raw,
1405                mdat_bounds: parsed.mdat_bounds,
1406                idat: None,
1407                primary: ItemExtents { construction_method: ConstructionMethod::File, extents: TryVec::new() },
1408                alpha: None,
1409                grid_config: None,
1410                tiles: TryVec::new(),
1411                animation_data,
1412                premultiplied_alpha: false,
1413                av1_config: track_config.av1_config,
1414                color_info: track_config.color_info,
1415                rotation: None,
1416                mirror: None,
1417                clean_aperture: None,
1418                pixel_aspect_ratio: None,
1419                content_light_level: None,
1420                mastering_display: None,
1421                content_colour_volume: None,
1422                ambient_viewing: None,
1423                operating_point: None,
1424                layer_selector: None,
1425                layered_image_indexing: None,
1426                exif_item: None,
1427                xmp_item: None,
1428                gain_map_metadata: None,
1429                gain_map: None,
1430                gain_map_color_info: None,
1431                depth_item: None,
1432                depth_width: 0,
1433                depth_height: 0,
1434                depth_av1_config: None,
1435                depth_color_info: None,
1436                major_brand: parsed.major_brand,
1437                compatible_brands: parsed.compatible_brands,
1438            });
1439        };
1440
1441        // Get primary item extents
1442        let primary = Self::get_item_extents(&meta, meta.primary_item_id)?;
1443
1444        // Find alpha item and get its extents
1445        let alpha_item_id = meta
1446            .item_references
1447            .iter()
1448            .filter(|iref| {
1449                iref.to_item_id == meta.primary_item_id
1450                    && iref.from_item_id != meta.primary_item_id
1451                    && iref.item_type == b"auxl"
1452            })
1453            .map(|iref| iref.from_item_id)
1454            .find(|&item_id| {
1455                meta.properties.iter().any(|prop| {
1456                    prop.item_id == item_id
1457                        && match &prop.property {
1458                            ItemProperty::AuxiliaryType(urn) => {
1459                                urn.type_subtype().0 == b"urn:mpeg:mpegB:cicp:systems:auxiliary:alpha"
1460                            }
1461                            _ => false,
1462                        }
1463                })
1464            });
1465
1466        let alpha = alpha_item_id
1467            .map(|id| Self::get_item_extents(&meta, id))
1468            .transpose()?;
1469
1470        // Check for premultiplied alpha
1471        let premultiplied_alpha = alpha_item_id.is_some_and(|alpha_id| {
1472            meta.item_references.iter().any(|iref| {
1473                iref.from_item_id == meta.primary_item_id
1474                    && iref.to_item_id == alpha_id
1475                    && iref.item_type == b"prem"
1476            })
1477        });
1478
1479        // Find depth auxiliary item (auxl reference with depth auxC type)
1480        let depth_item_id = meta
1481            .item_references
1482            .iter()
1483            .filter(|iref| {
1484                iref.to_item_id == meta.primary_item_id
1485                    && iref.from_item_id != meta.primary_item_id
1486                    && iref.item_type == b"auxl"
1487            })
1488            .map(|iref| iref.from_item_id)
1489            .find(|&item_id| {
1490                // Skip the alpha item if we already found one
1491                if alpha_item_id == Some(item_id) {
1492                    return false;
1493                }
1494                meta.properties.iter().any(|prop| {
1495                    prop.item_id == item_id
1496                        && match &prop.property {
1497                            ItemProperty::AuxiliaryType(urn) => {
1498                                is_depth_auxiliary_urn(urn.type_subtype().0)
1499                            }
1500                            _ => false,
1501                        }
1502                })
1503            });
1504
1505        let (depth_item, depth_width, depth_height, depth_av1_config, depth_color_info) =
1506            if let Some(depth_id) = depth_item_id {
1507                let extents = Self::get_item_extents(&meta, depth_id)?;
1508                // Get dimensions from ispe property
1509                let dims = meta.properties.iter().find_map(|p| {
1510                    if p.item_id == depth_id {
1511                        match &p.property {
1512                            ItemProperty::ImageSpatialExtents(e) => Some((e.width, e.height)),
1513                            _ => None,
1514                        }
1515                    } else {
1516                        None
1517                    }
1518                });
1519                let (w, h) = dims.unwrap_or((0, 0));
1520                // Get av1C property
1521                let av1c = meta.properties.iter().find_map(|p| {
1522                    if p.item_id == depth_id {
1523                        match &p.property {
1524                            ItemProperty::AV1Config(c) => Some(c.clone()),
1525                            _ => None,
1526                        }
1527                    } else {
1528                        None
1529                    }
1530                });
1531                // Get colr property
1532                let colr = meta.properties.iter().find_map(|p| {
1533                    if p.item_id == depth_id {
1534                        match &p.property {
1535                            ItemProperty::ColorInformation(c) => Some(c.clone()),
1536                            _ => None,
1537                        }
1538                    } else {
1539                        None
1540                    }
1541                });
1542                (Some(extents), w, h, av1c, colr)
1543            } else {
1544                (None, 0, 0, None, None)
1545            };
1546
1547        // Find EXIF/XMP items linked via cdsc references to the primary item
1548        let mut exif_item = None;
1549        let mut xmp_item = None;
1550        for iref in meta.item_references.iter() {
1551            if iref.to_item_id != meta.primary_item_id || iref.item_type != b"cdsc" {
1552                continue;
1553            }
1554            let desc_item_id = iref.from_item_id;
1555            let Some(info) = meta.item_infos.iter().find(|i| i.item_id == desc_item_id) else {
1556                continue;
1557            };
1558            if info.item_type == b"Exif" && exif_item.is_none() {
1559                exif_item = Some(Self::get_item_extents(&meta, desc_item_id)?);
1560            } else if info.item_type == b"mime" && xmp_item.is_none() {
1561                xmp_item = Some(Self::get_item_extents(&meta, desc_item_id)?);
1562            }
1563        }
1564
1565        // Check if primary item is a grid (tiled image)
1566        let is_grid = meta
1567            .item_infos
1568            .iter()
1569            .find(|x| x.item_id == meta.primary_item_id)
1570            .is_some_and(|info| info.item_type == b"grid");
1571
1572        // Extract grid configuration and tile extents if this is a grid
1573        let (grid_config, tiles) = if is_grid {
1574            let mut tiles_with_index: TryVec<(u32, u16)> = TryVec::new();
1575            for iref in meta.item_references.iter() {
1576                if iref.from_item_id == meta.primary_item_id && iref.item_type == b"dimg" {
1577                    tiles_with_index.push((iref.to_item_id, iref.reference_index))?;
1578                }
1579            }
1580
1581            tracker.validate_grid_tiles(tiles_with_index.len() as u32)?;
1582            tiles_with_index.sort_by_key(|&(_, idx)| idx);
1583
1584            let mut tile_extents = TryVec::new();
1585            for (tile_id, _) in tiles_with_index.iter() {
1586                tile_extents.push(Self::get_item_extents(&meta, *tile_id)?)?;
1587            }
1588
1589            let mut tile_ids = TryVec::new();
1590            for (tile_id, _) in tiles_with_index.iter() {
1591                tile_ids.push(*tile_id)?;
1592            }
1593
1594            let grid_config = Self::calculate_grid_config(&meta, &tile_ids)?;
1595
1596            // AVIF 1.2: transformative properties SHALL NOT be on grid tile items
1597            for (tile_id, _) in tiles_with_index.iter() {
1598                for prop in meta.properties.iter() {
1599                    if prop.item_id == *tile_id {
1600                        match &prop.property {
1601                            ItemProperty::Rotation(_)
1602                            | ItemProperty::Mirror(_)
1603                            | ItemProperty::CleanAperture(_) => {
1604                                warn!("grid tile {} has a transformative property (irot/imir/clap), violating AVIF spec", tile_id);
1605                            }
1606                            _ => {}
1607                        }
1608                    }
1609                }
1610            }
1611
1612            (Some(grid_config), tile_extents)
1613        } else {
1614            (None, TryVec::new())
1615        };
1616
1617        // Detect gain map (tmap derived image item)
1618        let (gain_map_metadata, gain_map, gain_map_color_info) = {
1619            let tmap_item = meta.item_infos.iter()
1620                .find(|info| info.item_type == b"tmap");
1621
1622            if let Some(tmap_info) = tmap_item {
1623                let tmap_id = tmap_info.item_id;
1624
1625                // Find dimg references FROM tmap TO its inputs
1626                let mut inputs: TryVec<(u32, u16)> = TryVec::new();
1627                for iref in meta.item_references.iter() {
1628                    if iref.from_item_id == tmap_id && iref.item_type == b"dimg" {
1629                        inputs.push((iref.to_item_id, iref.reference_index))?;
1630                    }
1631                }
1632                inputs.sort_by_key(|&(_, idx)| idx);
1633
1634                if inputs.len() >= 2 {
1635                    let base_item_id = inputs[0].0;
1636                    let gmap_item_id = inputs[1].0;
1637
1638                    if base_item_id == meta.primary_item_id {
1639                        // Read tmap item's data payload (ToneMapImage)
1640                        let tmap_extents = Self::get_item_extents(&meta, tmap_id)?;
1641                        let tmap_data = Self::resolve_extents_from_raw(
1642                            raw.as_ref(), &parsed.mdat_bounds, &tmap_extents,
1643                        )?;
1644                        let metadata = parse_tone_map_image(&tmap_data)?;
1645
1646                        // Get gain map image extents
1647                        let gmap_extents = Self::get_item_extents(&meta, gmap_item_id)?;
1648
1649                        // Get alternate color info from tmap item's properties
1650                        let alt_color = meta.properties.iter().find_map(|p| {
1651                            if p.item_id == tmap_id {
1652                                match &p.property {
1653                                    ItemProperty::ColorInformation(c) => Some(c.clone()),
1654                                    _ => None,
1655                                }
1656                            } else {
1657                                None
1658                            }
1659                        });
1660
1661                        (Some(metadata), Some(gmap_extents), alt_color)
1662                    } else {
1663                        (None, None, None)
1664                    }
1665                } else {
1666                    (None, None, None)
1667                }
1668            } else {
1669                (None, None, None)
1670            }
1671        };
1672
1673        // Extract properties for the primary item
1674        macro_rules! find_prop {
1675            ($variant:ident) => {
1676                meta.properties.iter().find_map(|p| {
1677                    if p.item_id == meta.primary_item_id {
1678                        match &p.property {
1679                            ItemProperty::$variant(c) => Some(c.clone()),
1680                            _ => None,
1681                        }
1682                    } else {
1683                        None
1684                    }
1685                })
1686            };
1687        }
1688
1689        let track_config = animation_data.as_ref().map(|a| &a.codec_config);
1690        let av1_config = find_prop!(AV1Config)
1691            .or_else(|| track_config.and_then(|c| c.av1_config.clone()));
1692        let color_info = find_prop!(ColorInformation)
1693            .or_else(|| track_config.and_then(|c| c.color_info.clone()));
1694        let rotation = find_prop!(Rotation);
1695        let mirror = find_prop!(Mirror);
1696        let clean_aperture = find_prop!(CleanAperture);
1697        let pixel_aspect_ratio = find_prop!(PixelAspectRatio);
1698        let content_light_level = find_prop!(ContentLightLevel);
1699        let mastering_display = find_prop!(MasteringDisplayColourVolume);
1700        let content_colour_volume = find_prop!(ContentColourVolume);
1701        let ambient_viewing = find_prop!(AmbientViewingEnvironment);
1702        let operating_point = find_prop!(OperatingPointSelector);
1703        let layer_selector = find_prop!(LayerSelector);
1704        let layered_image_indexing = find_prop!(AV1LayeredImageIndexing);
1705
1706        // Clone idat
1707        let idat = if let Some(ref idat_data) = meta.idat {
1708            let mut cloned = TryVec::new();
1709            cloned.extend_from_slice(idat_data)?;
1710            Some(cloned)
1711        } else {
1712            None
1713        };
1714
1715        Ok(Self {
1716            raw,
1717            mdat_bounds: parsed.mdat_bounds,
1718            idat,
1719            primary,
1720            alpha,
1721            grid_config,
1722            tiles,
1723            animation_data,
1724            premultiplied_alpha,
1725            av1_config,
1726            color_info,
1727            rotation,
1728            mirror,
1729            clean_aperture,
1730            pixel_aspect_ratio,
1731            content_light_level,
1732            mastering_display,
1733            content_colour_volume,
1734            ambient_viewing,
1735            operating_point,
1736            layer_selector,
1737            layered_image_indexing,
1738            exif_item,
1739            xmp_item,
1740            gain_map_metadata,
1741            gain_map,
1742            gain_map_color_info,
1743            depth_item,
1744            depth_width,
1745            depth_height,
1746            depth_av1_config,
1747            depth_color_info,
1748            major_brand: parsed.major_brand,
1749            compatible_brands: parsed.compatible_brands,
1750        })
1751    }
1752
1753    // ========================================
1754    // Internal helpers
1755    // ========================================
1756
1757    /// Get item extents (construction method + ranges) from metadata.
1758    fn get_item_extents(meta: &AvifInternalMeta, item_id: u32) -> Result<ItemExtents> {
1759        let item = meta
1760            .iloc_items
1761            .iter()
1762            .find(|item| item.item_id == item_id)
1763            .ok_or(Error::InvalidData("item not found in iloc"))?;
1764
1765        let mut extents = TryVec::new();
1766        for extent in &item.extents {
1767            extents.push(extent.extent_range.clone())?;
1768        }
1769        Ok(ItemExtents {
1770            construction_method: item.construction_method,
1771            extents,
1772        })
1773    }
1774
1775    /// Resolve file-based item extents from a raw buffer during `build()`,
1776    /// before `self` exists. Returns owned data (small payloads like tmap).
1777    fn resolve_extents_from_raw(
1778        raw: &[u8],
1779        mdat_bounds: &[MdatBounds],
1780        item: &ItemExtents,
1781    ) -> Result<std::vec::Vec<u8>> {
1782        if item.construction_method != ConstructionMethod::File {
1783            return Err(Error::Unsupported("tmap item must use file construction method"));
1784        }
1785        let mut data = std::vec::Vec::new();
1786        for extent in &item.extents {
1787            let file_offset = extent.start();
1788            let start = usize::try_from(file_offset)?;
1789            let end = match extent {
1790                ExtentRange::WithLength(range) => {
1791                    let len = range.end.checked_sub(range.start)
1792                        .ok_or(Error::InvalidData("extent range start > end"))?;
1793                    start.checked_add(usize::try_from(len)?)
1794                        .ok_or(Error::InvalidData("extent end overflow"))?
1795                }
1796                ExtentRange::ToEnd(_) => {
1797                    // Find the mdat that contains this offset
1798                    let mut found_end = raw.len();
1799                    for mdat in mdat_bounds {
1800                        if file_offset >= mdat.offset && file_offset < mdat.offset + mdat.length {
1801                            found_end = usize::try_from(mdat.offset + mdat.length)?;
1802                            break;
1803                        }
1804                    }
1805                    found_end
1806                }
1807            };
1808            let slice = raw.get(start..end)
1809                .ok_or(Error::InvalidData("tmap extent out of bounds"))?;
1810            data.extend_from_slice(slice);
1811        }
1812        Ok(data)
1813    }
1814
1815    /// Resolve an item's data from the raw buffer, returning `Cow::Borrowed`
1816    /// for single-extent file items and `Cow::Owned` for multi-extent or idat.
1817    fn resolve_item(&self, item: &ItemExtents) -> Result<Cow<'_, [u8]>> {
1818        match item.construction_method {
1819            ConstructionMethod::Idat => self.resolve_idat_extents(&item.extents),
1820            ConstructionMethod::File => self.resolve_file_extents(&item.extents),
1821            ConstructionMethod::Item => Err(Error::Unsupported("construction_method 'item' not supported")),
1822        }
1823    }
1824
1825    /// Resolve file-based extents from the raw buffer.
1826    fn resolve_file_extents(&self, extents: &[ExtentRange]) -> Result<Cow<'_, [u8]>> {
1827        let raw = self.raw.as_ref();
1828
1829        // Fast path: single extent → borrow directly from raw
1830        if extents.len() == 1 {
1831            let extent = &extents[0];
1832            let (start, end) = self.extent_byte_range(extent)?;
1833            let slice = raw.get(start..end).ok_or(Error::InvalidData("extent out of bounds in raw buffer"))?;
1834            return Ok(Cow::Borrowed(slice));
1835        }
1836
1837        // Multi-extent: concatenate into owned buffer
1838        let mut data = TryVec::new();
1839        for extent in extents {
1840            let (start, end) = self.extent_byte_range(extent)?;
1841            let slice = raw.get(start..end).ok_or(Error::InvalidData("extent out of bounds in raw buffer"))?;
1842            data.extend_from_slice(slice)?;
1843        }
1844        Ok(Cow::Owned(data.into_iter().collect()))
1845    }
1846
1847    /// Convert an ExtentRange to a (start, end) byte range within the raw buffer.
1848    fn extent_byte_range(&self, extent: &ExtentRange) -> Result<(usize, usize)> {
1849        let file_offset = extent.start();
1850        let start = usize::try_from(file_offset)?;
1851
1852        match extent {
1853            ExtentRange::WithLength(range) => {
1854                let len = range.end.checked_sub(range.start)
1855                    .ok_or(Error::InvalidData("extent range start > end"))?;
1856                let end = start.checked_add(usize::try_from(len)?)
1857                    .ok_or(Error::InvalidData("extent end overflow"))?;
1858                Ok((start, end))
1859            }
1860            ExtentRange::ToEnd(_) => {
1861                // Find the mdat that contains this offset and use its bounds
1862                for mdat in &self.mdat_bounds {
1863                    if file_offset >= mdat.offset && file_offset < mdat.offset + mdat.length {
1864                        let end = usize::try_from(mdat.offset + mdat.length)?;
1865                        return Ok((start, end));
1866                    }
1867                }
1868                // Fall back to end of raw buffer
1869                Ok((start, self.raw.len()))
1870            }
1871        }
1872    }
1873
1874    /// Resolve idat-based extents.
1875    fn resolve_idat_extents(&self, extents: &[ExtentRange]) -> Result<Cow<'_, [u8]>> {
1876        let idat_data = self.idat.as_ref()
1877            .ok_or(Error::InvalidData("idat box missing but construction_method is Idat"))?;
1878
1879        if extents.len() == 1 {
1880            let extent = &extents[0];
1881            let start = usize::try_from(extent.start())?;
1882            let slice = match extent {
1883                ExtentRange::WithLength(range) => {
1884                    let len = usize::try_from(range.end - range.start)?;
1885                    idat_data.get(start..start + len)
1886                        .ok_or(Error::InvalidData("idat extent out of bounds"))?
1887                }
1888                ExtentRange::ToEnd(_) => {
1889                    idat_data.get(start..)
1890                        .ok_or(Error::InvalidData("idat extent out of bounds"))?
1891                }
1892            };
1893            return Ok(Cow::Borrowed(slice));
1894        }
1895
1896        // Multi-extent idat: concatenate
1897        let mut data = TryVec::new();
1898        for extent in extents {
1899            let start = usize::try_from(extent.start())?;
1900            let slice = match extent {
1901                ExtentRange::WithLength(range) => {
1902                    let len = usize::try_from(range.end - range.start)?;
1903                    idat_data.get(start..start + len)
1904                        .ok_or(Error::InvalidData("idat extent out of bounds"))?
1905                }
1906                ExtentRange::ToEnd(_) => {
1907                    idat_data.get(start..)
1908                        .ok_or(Error::InvalidData("idat extent out of bounds"))?
1909                }
1910            };
1911            data.extend_from_slice(slice)?;
1912        }
1913        Ok(Cow::Owned(data.into_iter().collect()))
1914    }
1915
1916    /// Resolve a single animation frame from the raw buffer.
1917    fn resolve_frame(&self, index: usize) -> Result<FrameRef<'_>> {
1918        let anim = self.animation_data.as_ref()
1919            .ok_or(Error::InvalidData("not an animated AVIF"))?;
1920
1921        if index >= anim.sample_table.sample_sizes.len() {
1922            return Err(Error::InvalidData("frame index out of bounds"));
1923        }
1924
1925        let duration_ms = self.calculate_frame_duration(&anim.sample_table, anim.media_timescale, index)?;
1926        let (offset, size) = self.calculate_sample_location(&anim.sample_table, index)?;
1927
1928        let start = usize::try_from(offset)?;
1929        let end = start.checked_add(size as usize)
1930            .ok_or(Error::InvalidData("frame end overflow"))?;
1931
1932        let raw = self.raw.as_ref();
1933        let slice = raw.get(start..end)
1934            .ok_or(Error::InvalidData("frame not found in raw buffer"))?;
1935
1936        // Resolve alpha frame if alpha track exists and has this index
1937        let alpha_data = if let Some(ref alpha_st) = anim.alpha_sample_table {
1938            let alpha_timescale = anim.alpha_media_timescale.unwrap_or(anim.media_timescale);
1939            if index < alpha_st.sample_sizes.len() {
1940                let (a_offset, a_size) = self.calculate_sample_location(alpha_st, index)?;
1941                let a_start = usize::try_from(a_offset)?;
1942                let a_end = a_start.checked_add(a_size as usize)
1943                    .ok_or(Error::InvalidData("alpha frame end overflow"))?;
1944                let a_slice = raw.get(a_start..a_end)
1945                    .ok_or(Error::InvalidData("alpha frame not found in raw buffer"))?;
1946                let _ = alpha_timescale; // timescale used for duration, which comes from color track
1947                Some(Cow::Borrowed(a_slice))
1948            } else {
1949                warn!("alpha track has fewer frames than color track (index {})", index);
1950                None
1951            }
1952        } else {
1953            None
1954        };
1955
1956        Ok(FrameRef {
1957            data: Cow::Borrowed(slice),
1958            alpha_data,
1959            duration_ms,
1960        })
1961    }
1962
1963    /// Calculate grid configuration from metadata.
1964    fn calculate_grid_config(meta: &AvifInternalMeta, tile_ids: &[u32]) -> Result<GridConfig> {
1965        // Try explicit grid property first
1966        for prop in &meta.properties {
1967            if prop.item_id == meta.primary_item_id
1968                && let ItemProperty::ImageGrid(grid) = &prop.property {
1969                    return Ok(grid.clone());
1970                }
1971        }
1972
1973        // Fall back to ispe calculation
1974        let grid_dims = meta
1975            .properties
1976            .iter()
1977            .find(|p| p.item_id == meta.primary_item_id)
1978            .and_then(|p| match &p.property {
1979                ItemProperty::ImageSpatialExtents(e) => Some(e),
1980                _ => None,
1981            });
1982
1983        let tile_dims = tile_ids.first().and_then(|&tile_id| {
1984            meta.properties
1985                .iter()
1986                .find(|p| p.item_id == tile_id)
1987                .and_then(|p| match &p.property {
1988                    ItemProperty::ImageSpatialExtents(e) => Some(e),
1989                    _ => None,
1990                })
1991        });
1992
1993        if let (Some(grid), Some(tile)) = (grid_dims, tile_dims)
1994            && tile.width != 0
1995                && tile.height != 0
1996                && grid.width % tile.width == 0
1997                && grid.height % tile.height == 0
1998            {
1999                let columns = grid.width / tile.width;
2000                let rows = grid.height / tile.height;
2001
2002                if columns <= 255 && rows <= 255 {
2003                    return Ok(GridConfig {
2004                        rows: rows as u8,
2005                        columns: columns as u8,
2006                        output_width: grid.width,
2007                        output_height: grid.height,
2008                    });
2009                }
2010            }
2011
2012        let tile_count = tile_ids.len();
2013        Ok(GridConfig {
2014            rows: tile_count.min(255) as u8,
2015            columns: 1,
2016            output_width: 0,
2017            output_height: 0,
2018        })
2019    }
2020
2021    /// Calculate frame duration from sample table.
2022    fn calculate_frame_duration(
2023        &self,
2024        st: &SampleTable,
2025        timescale: u32,
2026        index: usize,
2027    ) -> Result<u32> {
2028        let mut current_sample = 0;
2029        for entry in &st.time_to_sample {
2030            if current_sample + entry.sample_count as usize > index {
2031                let duration_ms = if timescale > 0 {
2032                    ((entry.sample_delta as u64) * 1000) / (timescale as u64)
2033                } else {
2034                    0
2035                };
2036                return Ok(u32::try_from(duration_ms).unwrap_or(u32::MAX));
2037            }
2038            current_sample += entry.sample_count as usize;
2039        }
2040        Ok(0)
2041    }
2042
2043    /// Look up precomputed sample location (offset and size) from sample table.
2044    fn calculate_sample_location(&self, st: &SampleTable, index: usize) -> Result<(u64, u32)> {
2045        let offset = *st
2046            .sample_offsets
2047            .get(index)
2048            .ok_or(Error::InvalidData("sample index out of bounds"))?;
2049        let size = *st
2050            .sample_sizes
2051            .get(index)
2052            .ok_or(Error::InvalidData("sample index out of bounds"))?;
2053        Ok((offset, size))
2054    }
2055
2056    // ========================================
2057    // Public data access API (one way each)
2058    // ========================================
2059
2060    /// Get primary item data.
2061    ///
2062    /// Returns `Cow::Borrowed` for single-extent items, `Cow::Owned` for multi-extent.
2063    pub fn primary_data(&self) -> Result<Cow<'_, [u8]>> {
2064        self.resolve_item(&self.primary)
2065    }
2066
2067    /// Get alpha item data, if present.
2068    pub fn alpha_data(&self) -> Option<Result<Cow<'_, [u8]>>> {
2069        self.alpha.as_ref().map(|item| self.resolve_item(item))
2070    }
2071
2072    /// Get grid tile data by index.
2073    pub fn tile_data(&self, index: usize) -> Result<Cow<'_, [u8]>> {
2074        let item = self.tiles.get(index)
2075            .ok_or(Error::InvalidData("tile index out of bounds"))?;
2076        self.resolve_item(item)
2077    }
2078
2079    /// Get a single animation frame by index.
2080    pub fn frame(&self, index: usize) -> Result<FrameRef<'_>> {
2081        self.resolve_frame(index)
2082    }
2083
2084    /// Iterate over all animation frames.
2085    pub fn frames(&self) -> FrameIterator<'_> {
2086        let count = self
2087            .animation_info()
2088            .map(|info| info.frame_count)
2089            .unwrap_or(0);
2090        FrameIterator { parser: self, index: 0, count }
2091    }
2092
2093    // ========================================
2094    // Metadata (no data access)
2095    // ========================================
2096
2097    /// Get animation metadata (if animated).
2098    pub fn animation_info(&self) -> Option<AnimationInfo> {
2099        self.animation_data.as_ref().map(|data| AnimationInfo {
2100            frame_count: data.sample_table.sample_sizes.len(),
2101            loop_count: data.loop_count,
2102            has_alpha: data.alpha_sample_table.is_some(),
2103            timescale: data.media_timescale,
2104        })
2105    }
2106
2107    /// Get grid configuration (if grid image).
2108    pub fn grid_config(&self) -> Option<&GridConfig> {
2109        self.grid_config.as_ref()
2110    }
2111
2112    /// Get number of grid tiles.
2113    pub fn grid_tile_count(&self) -> usize {
2114        self.tiles.len()
2115    }
2116
2117    /// Check if alpha channel uses premultiplied alpha.
2118    pub fn premultiplied_alpha(&self) -> bool {
2119        self.premultiplied_alpha
2120    }
2121
2122    /// Get the AV1 codec configuration for the primary item, if present.
2123    ///
2124    /// This is parsed from the `av1C` property box in the container.
2125    pub fn av1_config(&self) -> Option<&AV1Config> {
2126        self.av1_config.as_ref()
2127    }
2128
2129    /// Get colour information for the primary item, if present.
2130    ///
2131    /// This is parsed from the `colr` property box in the container.
2132    /// For CICP/nclx values, this is the authoritative source and may
2133    /// differ from values in the AV1 bitstream sequence header.
2134    pub fn color_info(&self) -> Option<&ColorInformation> {
2135        self.color_info.as_ref()
2136    }
2137
2138    /// Get rotation for the primary item, if present.
2139    pub fn rotation(&self) -> Option<&ImageRotation> {
2140        self.rotation.as_ref()
2141    }
2142
2143    /// Get mirror for the primary item, if present.
2144    pub fn mirror(&self) -> Option<&ImageMirror> {
2145        self.mirror.as_ref()
2146    }
2147
2148    /// Get clean aperture (crop) for the primary item, if present.
2149    pub fn clean_aperture(&self) -> Option<&CleanAperture> {
2150        self.clean_aperture.as_ref()
2151    }
2152
2153    /// Get pixel aspect ratio for the primary item, if present.
2154    pub fn pixel_aspect_ratio(&self) -> Option<&PixelAspectRatio> {
2155        self.pixel_aspect_ratio.as_ref()
2156    }
2157
2158    /// Get content light level info for the primary item, if present.
2159    pub fn content_light_level(&self) -> Option<&ContentLightLevel> {
2160        self.content_light_level.as_ref()
2161    }
2162
2163    /// Get mastering display colour volume for the primary item, if present.
2164    pub fn mastering_display(&self) -> Option<&MasteringDisplayColourVolume> {
2165        self.mastering_display.as_ref()
2166    }
2167
2168    /// Get content colour volume for the primary item, if present.
2169    pub fn content_colour_volume(&self) -> Option<&ContentColourVolume> {
2170        self.content_colour_volume.as_ref()
2171    }
2172
2173    /// Get ambient viewing environment for the primary item, if present.
2174    pub fn ambient_viewing(&self) -> Option<&AmbientViewingEnvironment> {
2175        self.ambient_viewing.as_ref()
2176    }
2177
2178    /// Get operating point selector for the primary item, if present.
2179    pub fn operating_point(&self) -> Option<&OperatingPointSelector> {
2180        self.operating_point.as_ref()
2181    }
2182
2183    /// Get layer selector for the primary item, if present.
2184    pub fn layer_selector(&self) -> Option<&LayerSelector> {
2185        self.layer_selector.as_ref()
2186    }
2187
2188    /// Get AV1 layered image indexing for the primary item, if present.
2189    pub fn layered_image_indexing(&self) -> Option<&AV1LayeredImageIndexing> {
2190        self.layered_image_indexing.as_ref()
2191    }
2192
2193    /// Get EXIF metadata for the primary item, if present.
2194    ///
2195    /// Returns raw EXIF data (TIFF header onwards), with the 4-byte AVIF offset prefix stripped.
2196    pub fn exif(&self) -> Option<Result<Cow<'_, [u8]>>> {
2197        self.exif_item.as_ref().map(|item| {
2198            let raw = self.resolve_item(item)?;
2199            // AVIF EXIF items start with a 4-byte big-endian offset to the TIFF header
2200            if raw.len() <= 4 {
2201                return Err(Error::InvalidData("EXIF item too short"));
2202            }
2203            let offset = u32::from_be_bytes([raw[0], raw[1], raw[2], raw[3]]) as usize;
2204            let start = 4 + offset;
2205            if start >= raw.len() {
2206                return Err(Error::InvalidData("EXIF offset exceeds item size"));
2207            }
2208            match raw {
2209                Cow::Borrowed(slice) => Ok(Cow::Borrowed(&slice[start..])),
2210                Cow::Owned(vec) => Ok(Cow::Owned(vec[start..].to_vec())),
2211            }
2212        })
2213    }
2214
2215    /// Get XMP metadata for the primary item, if present.
2216    ///
2217    /// Returns raw XMP/XML data.
2218    pub fn xmp(&self) -> Option<Result<Cow<'_, [u8]>>> {
2219        self.xmp_item.as_ref().map(|item| self.resolve_item(item))
2220    }
2221
2222    /// Gain map metadata, if a `tmap` derived image item is present.
2223    ///
2224    /// Describes how to apply a gain map to reconstruct an HDR rendition
2225    /// from the SDR base image. See ISO 21496-1.
2226    pub fn gain_map_metadata(&self) -> Option<&GainMapMetadata> {
2227        self.gain_map_metadata.as_ref()
2228    }
2229
2230    /// Gain map image data (AV1-encoded), if present.
2231    pub fn gain_map_data(&self) -> Option<Result<Cow<'_, [u8]>>> {
2232        self.gain_map.as_ref().map(|item| self.resolve_item(item))
2233    }
2234
2235    /// Color information for the alternate (typically HDR) rendition.
2236    ///
2237    /// This comes from the `tmap` item's `colr` property and describes
2238    /// the colour space of the tone-mapped output.
2239    pub fn gain_map_color_info(&self) -> Option<&ColorInformation> {
2240        self.gain_map_color_info.as_ref()
2241    }
2242
2243    /// Get the full gain map bundle, if a `tmap` derived image item is present.
2244    ///
2245    /// Returns [`AvifGainMap`] containing metadata, raw AV1 gain map data,
2246    /// and alternate rendition color info. Returns `None` if no gain map
2247    /// is present, or `Some(Err(..))` if the gain map data cannot be resolved.
2248    pub fn gain_map(&self) -> Option<Result<AvifGainMap>> {
2249        let metadata = self.gain_map_metadata.as_ref()?.clone();
2250        let data_extents = self.gain_map.as_ref()?;
2251        let alt_color_info = self.gain_map_color_info.clone();
2252
2253        Some(self.resolve_item(data_extents).map(|data| AvifGainMap {
2254            metadata,
2255            gain_map_data: data.into_owned(),
2256            alt_color_info,
2257        }))
2258    }
2259
2260    /// Check if a depth auxiliary image is present.
2261    ///
2262    /// Returns `true` if the AVIF container has an `auxl`-linked item with
2263    /// a depth auxiliary type URN.
2264    pub fn has_depth_map(&self) -> bool {
2265        self.depth_item.is_some()
2266    }
2267
2268    /// Get the raw AV1 bitstream of the depth auxiliary image, if present.
2269    pub fn depth_map_data(&self) -> Option<Result<Cow<'_, [u8]>>> {
2270        self.depth_item.as_ref().map(|item| self.resolve_item(item))
2271    }
2272
2273    /// Get the full depth map bundle, if a depth auxiliary image is present.
2274    ///
2275    /// Returns [`AvifDepthMap`] containing the raw AV1 depth image data,
2276    /// dimensions, codec config, and color info. Returns `None` if no depth
2277    /// auxiliary is present, or `Some(Err(..))` if the data cannot be resolved.
2278    ///
2279    /// # Example
2280    ///
2281    /// ```no_run
2282    /// let bytes = std::fs::read("portrait.avif").unwrap();
2283    /// let parser = zenavif_parse::AvifParser::from_bytes(&bytes).unwrap();
2284    /// if let Some(Ok(dm)) = parser.depth_map() {
2285    ///     println!("Depth: {}x{}, {} bytes", dm.width, dm.height, dm.data.len());
2286    /// }
2287    /// ```
2288    pub fn depth_map(&self) -> Option<Result<AvifDepthMap>> {
2289        let data_extents = self.depth_item.as_ref()?;
2290        let av1_config = self.depth_av1_config.clone();
2291        let color_info = self.depth_color_info.clone();
2292        let width = self.depth_width;
2293        let height = self.depth_height;
2294
2295        Some(self.resolve_item(data_extents).map(|data| AvifDepthMap {
2296            data: data.into_owned(),
2297            width,
2298            height,
2299            av1_config,
2300            color_info,
2301        }))
2302    }
2303
2304    /// Get the major brand from the `ftyp` box (e.g., `*b"avif"` or `*b"avis"`).
2305    pub fn major_brand(&self) -> &[u8; 4] {
2306        &self.major_brand
2307    }
2308
2309    /// Get the compatible brands from the `ftyp` box.
2310    pub fn compatible_brands(&self) -> &[[u8; 4]] {
2311        &self.compatible_brands
2312    }
2313
2314    /// Parse AV1 metadata from the primary item.
2315    pub fn primary_metadata(&self) -> Result<AV1Metadata> {
2316        let data = self.primary_data()?;
2317        AV1Metadata::parse_av1_bitstream(&data)
2318    }
2319
2320    /// Parse AV1 metadata from the alpha item, if present.
2321    pub fn alpha_metadata(&self) -> Option<Result<AV1Metadata>> {
2322        self.alpha.as_ref().map(|item| {
2323            let data = self.resolve_item(item)?;
2324            AV1Metadata::parse_av1_bitstream(&data)
2325        })
2326    }
2327
2328    // ========================================
2329    // Conversion
2330    // ========================================
2331
2332    /// Convert to [`AvifData`] (eagerly loads all frames and tiles).
2333    ///
2334    /// Provided for migration from the eager API. Prefer using `AvifParser`
2335    /// methods directly.
2336    #[cfg(feature = "eager")]
2337    #[deprecated(since = "1.5.0", note = "Use AvifParser methods directly instead of converting to AvifData")]
2338    #[allow(deprecated)]
2339    pub fn to_avif_data(&self) -> Result<AvifData> {
2340        let primary_data = self.primary_data()?;
2341        let mut primary_item = TryVec::new();
2342        primary_item.extend_from_slice(&primary_data)?;
2343
2344        let alpha_item = match self.alpha_data() {
2345            Some(Ok(data)) => {
2346                let mut v = TryVec::new();
2347                v.extend_from_slice(&data)?;
2348                Some(v)
2349            }
2350            Some(Err(e)) => return Err(e),
2351            None => None,
2352        };
2353
2354        let mut grid_tiles = TryVec::new();
2355        for i in 0..self.grid_tile_count() {
2356            let data = self.tile_data(i)?;
2357            let mut v = TryVec::new();
2358            v.extend_from_slice(&data)?;
2359            grid_tiles.push(v)?;
2360        }
2361
2362        let animation = if let Some(info) = self.animation_info() {
2363            let mut frames = TryVec::new();
2364            for i in 0..info.frame_count {
2365                let frame_ref = self.frame(i)?;
2366                let mut data = TryVec::new();
2367                data.extend_from_slice(&frame_ref.data)?;
2368                frames.push(AnimationFrame { data, duration_ms: frame_ref.duration_ms })?;
2369            }
2370            Some(AnimationConfig {
2371                loop_count: info.loop_count,
2372                frames,
2373            })
2374        } else {
2375            None
2376        };
2377
2378        Ok(AvifData {
2379            primary_item,
2380            alpha_item,
2381            premultiplied_alpha: self.premultiplied_alpha,
2382            grid_config: self.grid_config.clone(),
2383            grid_tiles,
2384            animation,
2385            av1_config: self.av1_config.clone(),
2386            color_info: self.color_info.clone(),
2387            rotation: self.rotation,
2388            mirror: self.mirror,
2389            clean_aperture: self.clean_aperture,
2390            pixel_aspect_ratio: self.pixel_aspect_ratio,
2391            content_light_level: self.content_light_level,
2392            mastering_display: self.mastering_display,
2393            content_colour_volume: self.content_colour_volume,
2394            ambient_viewing: self.ambient_viewing,
2395            operating_point: self.operating_point,
2396            layer_selector: self.layer_selector,
2397            layered_image_indexing: self.layered_image_indexing,
2398            exif: self.exif().and_then(|r| r.ok()).map(|c| {
2399                let mut v = TryVec::new();
2400                let _ = v.extend_from_slice(&c);
2401                v
2402            }),
2403            xmp: self.xmp().and_then(|r| r.ok()).map(|c| {
2404                let mut v = TryVec::new();
2405                let _ = v.extend_from_slice(&c);
2406                v
2407            }),
2408            gain_map_metadata: self.gain_map_metadata.clone(),
2409            gain_map_item: self.gain_map_data().and_then(|r| r.ok()).map(|c| {
2410                let mut v = TryVec::new();
2411                let _ = v.extend_from_slice(&c);
2412                v
2413            }),
2414            gain_map_color_info: self.gain_map_color_info.clone(),
2415            depth_item: self.depth_map_data().and_then(|r| r.ok()).map(|c| {
2416                let mut v = TryVec::new();
2417                let _ = v.extend_from_slice(&c);
2418                v
2419            }),
2420            depth_width: self.depth_width,
2421            depth_height: self.depth_height,
2422            depth_av1_config: self.depth_av1_config.clone(),
2423            depth_color_info: self.depth_color_info.clone(),
2424            major_brand: self.major_brand,
2425            compatible_brands: self.compatible_brands.clone(),
2426        })
2427    }
2428}
2429
2430/// Iterator over animation frames.
2431///
2432/// Created by [`AvifParser::frames()`]. Yields [`FrameRef`] on demand.
2433pub struct FrameIterator<'a> {
2434    parser: &'a AvifParser<'a>,
2435    index: usize,
2436    count: usize,
2437}
2438
2439impl<'a> Iterator for FrameIterator<'a> {
2440    type Item = Result<FrameRef<'a>>;
2441
2442    fn next(&mut self) -> Option<Self::Item> {
2443        if self.index >= self.count {
2444            return None;
2445        }
2446        let result = self.parser.frame(self.index);
2447        self.index += 1;
2448        Some(result)
2449    }
2450
2451    fn size_hint(&self) -> (usize, Option<usize>) {
2452        let remaining = self.count.saturating_sub(self.index);
2453        (remaining, Some(remaining))
2454    }
2455}
2456
2457impl ExactSizeIterator for FrameIterator<'_> {
2458    fn len(&self) -> usize {
2459        self.count.saturating_sub(self.index)
2460    }
2461}
2462
2463struct AvifInternalMeta {
2464    item_references: TryVec<SingleItemTypeReferenceBox>,
2465    properties: TryVec<AssociatedProperty>,
2466    primary_item_id: u32,
2467    iloc_items: TryVec<ItemLocationBoxItem>,
2468    item_infos: TryVec<ItemInfoEntry>,
2469    idat: Option<TryVec<u8>>,
2470    #[allow(dead_code)] // Parsed for future altr group support
2471    entity_groups: TryVec<EntityGroup>,
2472}
2473
2474/// A Media Data Box
2475/// See ISO 14496-12:2015 § 8.1.1
2476#[cfg(feature = "eager")]
2477struct MediaDataBox {
2478    /// Offset of `data` from the beginning of the file. See `ConstructionMethod::File`
2479    offset: u64,
2480    data: TryVec<u8>,
2481}
2482
2483#[cfg(feature = "eager")]
2484impl MediaDataBox {
2485    /// Check whether the beginning of `extent` is within the bounds of the `MediaDataBox`.
2486    /// We assume extents to not cross box boundaries. If so, this will cause an error
2487    /// in `read_extent`.
2488    fn contains_extent(&self, extent: &ExtentRange) -> bool {
2489        if self.offset <= extent.start() {
2490            let start_offset = extent.start() - self.offset;
2491            start_offset < self.data.len().to_u64()
2492        } else {
2493            false
2494        }
2495    }
2496
2497    /// Check whether `extent` covers the `MediaDataBox` exactly.
2498    fn matches_extent(&self, extent: &ExtentRange) -> bool {
2499        if self.offset == extent.start() {
2500            match extent {
2501                ExtentRange::WithLength(range) => {
2502                    if let Some(end) = self.offset.checked_add(self.data.len().to_u64()) {
2503                        end == range.end
2504                    } else {
2505                        false
2506                    }
2507                },
2508                ExtentRange::ToEnd(_) => true,
2509            }
2510        } else {
2511            false
2512        }
2513    }
2514
2515    /// Copy the range specified by `extent` to the end of `buf` or return an error if the range
2516    /// is not fully contained within `MediaDataBox`.
2517    fn read_extent(&self, extent: &ExtentRange, buf: &mut TryVec<u8>) -> Result<()> {
2518        let start_offset = extent
2519            .start()
2520            .checked_sub(self.offset)
2521            .ok_or(Error::InvalidData("mdat does not contain extent"))?;
2522        let slice = match extent {
2523            ExtentRange::WithLength(range) => {
2524                let range_len = range
2525                    .end
2526                    .checked_sub(range.start)
2527                    .ok_or(Error::InvalidData("range start > end"))?;
2528                let end = start_offset
2529                    .checked_add(range_len)
2530                    .ok_or(Error::InvalidData("extent end overflow"))?;
2531                self.data.get(start_offset.try_into()?..end.try_into()?)
2532            },
2533            ExtentRange::ToEnd(_) => self.data.get(start_offset.try_into()?..),
2534        };
2535        let slice = slice.ok_or(Error::InvalidData("extent crosses box boundary"))?;
2536        buf.extend_from_slice(slice)?;
2537        Ok(())
2538    }
2539
2540}
2541
2542/// Used for 'infe' boxes within 'iinf' boxes
2543/// See ISO 14496-12:2015 § 8.11.6
2544/// Only versions {2, 3} are supported
2545#[derive(Debug)]
2546struct ItemInfoEntry {
2547    item_id: u32,
2548    item_type: FourCC,
2549}
2550
2551/// See ISO 14496-12:2015 § 8.11.12
2552#[derive(Debug)]
2553struct SingleItemTypeReferenceBox {
2554    item_type: FourCC,
2555    from_item_id: u32,
2556    to_item_id: u32,
2557    /// Index of this reference within the list of references of the same type from the same item
2558    /// (0-based). This is the dimgIdx for grid tiles.
2559    reference_index: u16,
2560}
2561
2562/// Potential sizes (in bytes) of variable-sized fields of the 'iloc' box
2563/// See ISO 14496-12:2015 § 8.11.3
2564#[derive(Debug)]
2565enum IlocFieldSize {
2566    Zero,
2567    Four,
2568    Eight,
2569}
2570
2571impl IlocFieldSize {
2572    const fn to_bits(&self) -> u8 {
2573        match self {
2574            Self::Zero => 0,
2575            Self::Four => 32,
2576            Self::Eight => 64,
2577        }
2578    }
2579}
2580
2581impl TryFrom<u8> for IlocFieldSize {
2582    type Error = Error;
2583
2584    fn try_from(value: u8) -> Result<Self> {
2585        match value {
2586            0 => Ok(Self::Zero),
2587            4 => Ok(Self::Four),
2588            8 => Ok(Self::Eight),
2589            _ => Err(Error::InvalidData("value must be in the set {0, 4, 8}")),
2590        }
2591    }
2592}
2593
2594#[derive(PartialEq)]
2595enum IlocVersion {
2596    Zero,
2597    One,
2598    Two,
2599}
2600
2601impl TryFrom<u8> for IlocVersion {
2602    type Error = Error;
2603
2604    fn try_from(value: u8) -> Result<Self> {
2605        match value {
2606            0 => Ok(Self::Zero),
2607            1 => Ok(Self::One),
2608            2 => Ok(Self::Two),
2609            _ => Err(Error::Unsupported("unsupported version in 'iloc' box")),
2610        }
2611    }
2612}
2613
2614/// Used for 'iloc' boxes
2615/// See ISO 14496-12:2015 § 8.11.3
2616/// `base_offset` is omitted since it is integrated into the ranges in `extents`
2617/// `data_reference_index` is omitted, since only 0 (i.e., this file) is supported
2618#[derive(Debug)]
2619struct ItemLocationBoxItem {
2620    item_id: u32,
2621    construction_method: ConstructionMethod,
2622    /// Unused for `ConstructionMethod::Idat`
2623    extents: TryVec<ItemLocationBoxExtent>,
2624}
2625
2626#[derive(Clone, Copy, Debug, PartialEq)]
2627enum ConstructionMethod {
2628    File,
2629    Idat,
2630    #[allow(dead_code)] // TODO: see https://github.com/mozilla/mp4parse-rust/issues/196
2631    Item,
2632}
2633
2634/// `extent_index` is omitted since it's only used for `ConstructionMethod::Item` which
2635/// is currently not implemented.
2636#[derive(Clone, Debug)]
2637struct ItemLocationBoxExtent {
2638    extent_range: ExtentRange,
2639}
2640
2641#[derive(Clone, Debug)]
2642enum ExtentRange {
2643    WithLength(Range<u64>),
2644    ToEnd(RangeFrom<u64>),
2645}
2646
2647impl ExtentRange {
2648    const fn start(&self) -> u64 {
2649        match self {
2650            Self::WithLength(r) => r.start,
2651            Self::ToEnd(r) => r.start,
2652        }
2653    }
2654}
2655
2656/// See ISO 14496-12:2015 § 4.2
2657struct BMFFBox<'a, T> {
2658    head: BoxHeader,
2659    content: Take<&'a mut T>,
2660}
2661
2662impl<T: Read> BMFFBox<'_, T> {
2663    fn read_into_try_vec(&mut self) -> std::io::Result<TryVec<u8>> {
2664        let limit = self.content.limit();
2665        // For size=0 boxes, size is set to u64::MAX, but after subtracting offset
2666        // (8 or 16 bytes), the limit will be slightly less. Check for values very
2667        // close to u64::MAX to detect these cases.
2668        // Cap pre-allocation to 256 MB — the actual read_to_end will
2669        // grow as needed if the box really is larger, and return early
2670        // if the underlying reader has less data than claimed.
2671        const MAX_PREALLOC: u64 = 256 * 1024 * 1024;
2672        let mut vec = if limit >= u64::MAX - BoxHeader::MIN_LARGE_SIZE {
2673            // Unknown size (size=0 box), read without pre-allocation
2674            std::vec::Vec::new()
2675        } else {
2676            let mut v = std::vec::Vec::new();
2677            v.try_reserve_exact(limit.min(MAX_PREALLOC) as usize)
2678                .map_err(|_| std::io::ErrorKind::OutOfMemory)?;
2679            v
2680        };
2681        self.content.read_to_end(&mut vec)?; // The default impl
2682        Ok(vec.into())
2683    }
2684}
2685
2686#[test]
2687fn box_read_to_end() {
2688    let tmp = &mut b"1234567890".as_slice();
2689    let mut src = BMFFBox {
2690        head: BoxHeader { name: BoxType::FileTypeBox, size: 5, offset: 0, uuid: None },
2691        content: <_ as Read>::take(tmp, 5),
2692    };
2693    let buf = src.read_into_try_vec().unwrap();
2694    assert_eq!(buf.len(), 5);
2695    assert_eq!(buf, b"12345".as_ref());
2696}
2697
2698#[test]
2699fn box_read_to_end_large_claim() {
2700    // A box claiming huge size but backed by only 10 bytes should still succeed —
2701    // read_to_end returns what's actually available, pre-allocation is capped.
2702    let tmp = &mut b"1234567890".as_slice();
2703    let mut src = BMFFBox {
2704        head: BoxHeader { name: BoxType::FileTypeBox, size: 5, offset: 0, uuid: None },
2705        content: <_ as Read>::take(tmp, u64::MAX / 2),
2706    };
2707    let buf = src.read_into_try_vec().unwrap();
2708    assert_eq!(buf.len(), 10);
2709}
2710
2711struct BoxIter<'a, T> {
2712    src: &'a mut T,
2713}
2714
2715impl<T: Read> BoxIter<'_, T> {
2716    fn new(src: &mut T) -> BoxIter<'_, T> {
2717        BoxIter { src }
2718    }
2719
2720    fn next_box(&mut self) -> Result<Option<BMFFBox<'_, T>>> {
2721        let r = read_box_header(self.src);
2722        match r {
2723            Ok(h) => Ok(Some(BMFFBox {
2724                head: h,
2725                content: self.src.take(h.size - h.offset),
2726            })),
2727            Err(Error::UnexpectedEOF) => Ok(None),
2728            Err(e) => Err(e),
2729        }
2730    }
2731}
2732
2733impl<T: Read> Read for BMFFBox<'_, T> {
2734    fn read(&mut self, buf: &mut [u8]) -> std::io::Result<usize> {
2735        self.content.read(buf)
2736    }
2737}
2738
2739impl<T: Offset> Offset for BMFFBox<'_, T> {
2740    fn offset(&self) -> u64 {
2741        self.content.get_ref().offset()
2742    }
2743}
2744
2745impl<T: Read> BMFFBox<'_, T> {
2746    fn bytes_left(&self) -> u64 {
2747        self.content.limit()
2748    }
2749
2750    const fn get_header(&self) -> &BoxHeader {
2751        &self.head
2752    }
2753
2754    fn box_iter(&mut self) -> BoxIter<'_, Self> {
2755        BoxIter::new(self)
2756    }
2757}
2758
2759impl<T> Drop for BMFFBox<'_, T> {
2760    fn drop(&mut self) {
2761        if self.content.limit() > 0 {
2762            let name: FourCC = From::from(self.head.name);
2763            debug!("Dropping {} bytes in '{}'", self.content.limit(), name);
2764        }
2765    }
2766}
2767
2768/// Read and parse a box header.
2769///
2770/// Call this first to determine the type of a particular mp4 box
2771/// and its length. Used internally for dispatching to specific
2772/// parsers for the internal content, or to get the length to
2773/// skip unknown or uninteresting boxes.
2774///
2775/// See ISO 14496-12:2015 § 4.2
2776fn read_box_header<T: ReadBytesExt>(src: &mut T) -> Result<BoxHeader> {
2777    let size32 = be_u32(src)?;
2778    let name = BoxType::from(be_u32(src)?);
2779    let size = match size32 {
2780        // valid only for top-level box and indicates it's the last box in the file.  usually mdat.
2781        0 => {
2782            // Size=0 means box extends to EOF (ISOBMFF spec allows this for last box)
2783            u64::MAX
2784        },
2785        1 => {
2786            let size64 = be_u64(src)?;
2787            if size64 < BoxHeader::MIN_LARGE_SIZE {
2788                return Err(Error::InvalidData("malformed wide size"));
2789            }
2790            size64
2791        },
2792        _ => {
2793            if u64::from(size32) < BoxHeader::MIN_SIZE {
2794                return Err(Error::InvalidData("malformed size"));
2795            }
2796            u64::from(size32)
2797        },
2798    };
2799    let mut offset = match size32 {
2800        1 => BoxHeader::MIN_LARGE_SIZE,
2801        _ => BoxHeader::MIN_SIZE,
2802    };
2803    let uuid = if name == BoxType::UuidBox {
2804        if size >= offset + 16 {
2805            let mut buffer = [0u8; 16];
2806            let count = src.read(&mut buffer)?;
2807            offset += count.to_u64();
2808            if count == 16 {
2809                Some(buffer)
2810            } else {
2811                debug!("malformed uuid (short read), skipping");
2812                None
2813            }
2814        } else {
2815            debug!("malformed uuid, skipping");
2816            None
2817        }
2818    } else {
2819        None
2820    };
2821    if offset > size {
2822        return Err(Error::InvalidData("box header offset exceeds size"));
2823    }
2824    Ok(BoxHeader { name, size, offset, uuid })
2825}
2826
2827/// Parse the extra header fields for a full box.
2828fn read_fullbox_extra<T: ReadBytesExt>(src: &mut T) -> Result<(u8, u32)> {
2829    let version = src.read_u8()?;
2830    let flags_a = src.read_u8()?;
2831    let flags_b = src.read_u8()?;
2832    let flags_c = src.read_u8()?;
2833    Ok((
2834        version,
2835        u32::from(flags_a) << 16 | u32::from(flags_b) << 8 | u32::from(flags_c),
2836    ))
2837}
2838
2839// Parse the extra fields for a full box whose flag fields must be zero.
2840fn read_fullbox_version_no_flags<T: ReadBytesExt>(src: &mut T, options: &ParseOptions) -> Result<u8> {
2841    let (version, flags) = read_fullbox_extra(src)?;
2842
2843    if flags != 0 && !options.lenient {
2844        return Err(Error::Unsupported("expected flags to be 0"));
2845    }
2846
2847    Ok(version)
2848}
2849
2850/// Skip over the entire contents of a box.
2851fn skip_box_content<T: Read>(src: &mut BMFFBox<'_, T>) -> Result<()> {
2852    // Skip the contents of unknown chunks.
2853    let to_skip = {
2854        let header = src.get_header();
2855        debug!("{header:?} (skipped)");
2856        header
2857            .size
2858            .checked_sub(header.offset)
2859            .ok_or(Error::InvalidData("header offset > size"))?
2860    };
2861    if to_skip != src.bytes_left() {
2862        return Err(Error::InvalidData("box content size mismatch"));
2863    }
2864    skip(src, to_skip)
2865}
2866
2867/// Skip over the remain data of a box.
2868fn skip_box_remain<T: Read>(src: &mut BMFFBox<'_, T>) -> Result<()> {
2869    let remain = {
2870        let header = src.get_header();
2871        let len = src.bytes_left();
2872        debug!("remain {len} (skipped) in {header:?}");
2873        len
2874    };
2875    skip(src, remain)
2876}
2877
2878struct ResourceTracker<'a> {
2879    config: &'a DecodeConfig,
2880    #[cfg(feature = "eager")]
2881    current_memory: u64,
2882    #[cfg(feature = "eager")]
2883    peak_memory: u64,
2884}
2885
2886impl<'a> ResourceTracker<'a> {
2887    fn new(config: &'a DecodeConfig) -> Self {
2888        Self {
2889            config,
2890            #[cfg(feature = "eager")]
2891            current_memory: 0,
2892            #[cfg(feature = "eager")]
2893            peak_memory: 0,
2894        }
2895    }
2896
2897    #[cfg(feature = "eager")]
2898    fn reserve(&mut self, bytes: u64) -> Result<()> {
2899        self.current_memory = self.current_memory.saturating_add(bytes);
2900        self.peak_memory = self.peak_memory.max(self.current_memory);
2901
2902        if let Some(limit) = self.config.peak_memory_limit
2903            && self.peak_memory > limit {
2904                return Err(Error::ResourceLimitExceeded("peak memory limit exceeded"));
2905            }
2906
2907        Ok(())
2908    }
2909
2910    #[cfg(feature = "eager")]
2911    fn release(&mut self, bytes: u64) {
2912        self.current_memory = self.current_memory.saturating_sub(bytes);
2913    }
2914
2915    #[cfg(feature = "eager")]
2916    fn validate_total_megapixels(&self, width: u32, height: u32) -> Result<()> {
2917        if let Some(limit) = self.config.total_megapixels_limit {
2918            let megapixels = (width as u64)
2919                .checked_mul(height as u64)
2920                .ok_or(Error::InvalidData("dimension overflow"))?
2921                / 1_000_000;
2922
2923            if megapixels > limit as u64 {
2924                return Err(Error::ResourceLimitExceeded("total megapixels limit exceeded"));
2925            }
2926        }
2927
2928        Ok(())
2929    }
2930
2931    fn validate_animation_frames(&self, count: u32) -> Result<()> {
2932        if let Some(limit) = self.config.max_animation_frames
2933            && count > limit {
2934                return Err(Error::ResourceLimitExceeded("animation frame count limit exceeded"));
2935            }
2936
2937        Ok(())
2938    }
2939
2940    fn validate_grid_tiles(&self, count: u32) -> Result<()> {
2941        if let Some(limit) = self.config.max_grid_tiles
2942            && count > limit {
2943                return Err(Error::ResourceLimitExceeded("grid tile count limit exceeded"));
2944            }
2945
2946        Ok(())
2947    }
2948}
2949
2950/// Read the contents of an AVIF file with resource limits and cancellation support
2951///
2952/// This is the primary parsing function with full control over resource limits
2953/// and cooperative cancellation via the [`Stop`] trait.
2954///
2955/// # Arguments
2956///
2957/// * `f` - Reader for the AVIF file
2958/// * `config` - Resource limits and parsing options
2959/// * `stop` - Cancellation token (use [`Unstoppable`] if not needed)
2960#[cfg(feature = "eager")]
2961#[deprecated(since = "1.5.0", note = "Use `AvifParser::from_reader_with_config()` instead")]
2962#[allow(deprecated)]
2963pub fn read_avif_with_config<T: Read>(
2964    f: &mut T,
2965    config: &DecodeConfig,
2966    stop: &dyn Stop,
2967) -> Result<AvifData> {
2968    let mut tracker = ResourceTracker::new(config);
2969    let mut f = OffsetReader::new(f);
2970
2971    let mut iter = BoxIter::new(&mut f);
2972
2973    // 'ftyp' box must occur first; see ISO 14496-12:2015 § 4.3.1
2974    let (major_brand, compatible_brands) = if let Some(mut b) = iter.next_box()? {
2975        if b.head.name == BoxType::FileTypeBox {
2976            let ftyp = read_ftyp(&mut b)?;
2977            // Accept both 'avif' (single-frame) and 'avis' (animated) brands
2978            if ftyp.major_brand != b"avif" && ftyp.major_brand != b"avis" {
2979                warn!("major_brand: {}", ftyp.major_brand);
2980                return Err(Error::InvalidData("ftyp must be 'avif' or 'avis'"));
2981            }
2982            let major = ftyp.major_brand.value;
2983            let compat = ftyp.compatible_brands.iter().map(|b| b.value).collect();
2984            (major, compat)
2985        } else {
2986            return Err(Error::InvalidData("'ftyp' box must occur first"));
2987        }
2988    } else {
2989        return Err(Error::InvalidData("'ftyp' box must occur first"));
2990    };
2991
2992    let mut meta = None;
2993    let mut mdats = TryVec::new();
2994    let mut animation_data: Option<ParsedAnimationData> = None;
2995
2996    let parse_opts = ParseOptions { lenient: config.lenient };
2997
2998    while let Some(mut b) = iter.next_box()? {
2999        stop.check()?;
3000
3001        match b.head.name {
3002            BoxType::MetadataBox => {
3003                if meta.is_some() {
3004                    return Err(Error::InvalidData("There should be zero or one meta boxes per ISO 14496-12:2015 § 8.11.1.1"));
3005                }
3006                meta = Some(read_avif_meta(&mut b, &parse_opts)?);
3007            },
3008            BoxType::MovieBox => {
3009                let tracks = read_moov(&mut b)?;
3010                if !tracks.is_empty() {
3011                    animation_data = Some(associate_tracks(tracks)?);
3012                }
3013            },
3014            BoxType::MediaDataBox => {
3015                if b.bytes_left() > 0 {
3016                    let offset = b.offset();
3017                    let size = b.bytes_left();
3018                    tracker.reserve(size)?;
3019                    let data = b.read_into_try_vec()?;
3020                    tracker.release(size);
3021                    mdats.push(MediaDataBox { offset, data })?;
3022                }
3023            },
3024            _ => skip_box_content(&mut b)?,
3025        }
3026
3027        check_parser_state(&b.head, &b.content)?;
3028    }
3029
3030    // meta is required for still images; pure sequences can have only moov+mdat
3031    if meta.is_none() && animation_data.is_none() {
3032        return Err(Error::InvalidData("missing meta"));
3033    }
3034    let Some(meta) = meta else {
3035        // Pure sequence: return minimal AvifData with no items
3036        return Ok(AvifData {
3037            ..Default::default()
3038        });
3039    };
3040
3041    // Check if primary item is a grid (tiled image)
3042    let is_grid = meta
3043        .item_infos
3044        .iter()
3045        .find(|x| x.item_id == meta.primary_item_id)
3046        .is_some_and(|info| {
3047            let is_g = info.item_type == b"grid";
3048            if is_g {
3049                log::debug!("Grid image detected: primary_item_id={}", meta.primary_item_id);
3050            }
3051            is_g
3052        });
3053
3054    // Extract grid configuration if this is a grid image
3055    let mut grid_config = if is_grid {
3056        meta.properties
3057            .iter()
3058            .find(|prop| {
3059                prop.item_id == meta.primary_item_id
3060                    && matches!(prop.property, ItemProperty::ImageGrid(_))
3061            })
3062            .and_then(|prop| match &prop.property {
3063                ItemProperty::ImageGrid(config) => {
3064                    log::debug!("Grid: found explicit ImageGrid property: {:?}", config);
3065                    Some(config.clone())
3066                },
3067                _ => None,
3068            })
3069    } else {
3070        None
3071    };
3072
3073    // Find tile item IDs if this is a grid
3074    let tile_item_ids: TryVec<u32> = if is_grid {
3075        // Collect tiles with their reference index
3076        let mut tiles_with_index: TryVec<(u32, u16)> = TryVec::new();
3077        for iref in meta.item_references.iter() {
3078            // Grid items reference tiles via "dimg" (derived image) type
3079            if iref.from_item_id == meta.primary_item_id && iref.item_type == b"dimg" {
3080                tiles_with_index.push((iref.to_item_id, iref.reference_index))?;
3081            }
3082        }
3083
3084        // Validate tile count
3085        tracker.validate_grid_tiles(tiles_with_index.len() as u32)?;
3086
3087        // Sort tiles by reference_index to get correct grid order
3088        tiles_with_index.sort_by_key(|&(_, idx)| idx);
3089
3090        // Extract just the IDs in sorted order
3091        let mut ids = TryVec::new();
3092        for (tile_id, _) in tiles_with_index.iter() {
3093            ids.push(*tile_id)?;
3094        }
3095
3096        // No logging here - too verbose for production
3097
3098        // If no ImageGrid property found, calculate grid layout from ispe dimensions
3099        if grid_config.is_none() && !ids.is_empty() {
3100            // Try to calculate grid dimensions from ispe properties
3101            let grid_dims = meta.properties.iter()
3102                .find(|p| p.item_id == meta.primary_item_id)
3103                .and_then(|p| match &p.property {
3104                    ItemProperty::ImageSpatialExtents(e) => Some(e),
3105                    _ => None,
3106                });
3107
3108            let tile_dims = ids.first().and_then(|&tile_id| {
3109                meta.properties.iter()
3110                    .find(|p| p.item_id == tile_id)
3111                    .and_then(|p| match &p.property {
3112                        ItemProperty::ImageSpatialExtents(e) => Some(e),
3113                        _ => None,
3114                    })
3115            });
3116
3117            if let (Some(grid), Some(tile)) = (grid_dims, tile_dims) {
3118                // Validate grid output dimensions
3119                tracker.validate_total_megapixels(grid.width, grid.height)?;
3120
3121                // Validate tile dimensions are non-zero (already validated in read_ispe, but defensive)
3122                if tile.width == 0 || tile.height == 0 {
3123                    log::warn!("Grid: tile has zero dimensions, using fallback");
3124                } else if grid.width % tile.width == 0 && grid.height % tile.height == 0 {
3125                    // Calculate grid layout: grid_dims ÷ tile_dims
3126                    let columns = grid.width / tile.width;
3127                    let rows = grid.height / tile.height;
3128
3129                    // Validate grid dimensions fit in u8 (max 255×255 grid)
3130                    if columns > 255 || rows > 255 {
3131                        log::warn!("Grid: calculated dimensions {}×{} exceed 255, using fallback", rows, columns);
3132                    } else {
3133                        log::debug!("Grid: calculated {}×{} layout from ispe dimensions", rows, columns);
3134                        grid_config = Some(GridConfig {
3135                            rows: rows as u8,
3136                            columns: columns as u8,
3137                            output_width: grid.width,
3138                            output_height: grid.height,
3139                        });
3140                    }
3141                } else {
3142                    log::warn!("Grid: dimension mismatch - grid {}×{} not evenly divisible by tile {}×{}, using fallback",
3143                              grid.width, grid.height, tile.width, tile.height);
3144                }
3145            }
3146
3147            // Fallback: if calculation failed or ispe not available, use N×1 inference
3148            if grid_config.is_none() {
3149                log::debug!("Grid: using fallback {}×1 layout inference", ids.len());
3150                grid_config = Some(GridConfig {
3151                    rows: ids.len() as u8,  // Changed: vertical stack
3152                    columns: 1,              // Changed: single column
3153                    output_width: 0,  // Will be calculated from tiles
3154                    output_height: 0, // Will be calculated from tiles
3155                });
3156            }
3157        }
3158
3159        ids
3160    } else {
3161        TryVec::new()
3162    };
3163
3164    let alpha_item_id = meta
3165        .item_references
3166        .iter()
3167        // Auxiliary image for the primary image
3168        .filter(|iref| {
3169            iref.to_item_id == meta.primary_item_id
3170                && iref.from_item_id != meta.primary_item_id
3171                && iref.item_type == b"auxl"
3172        })
3173        .map(|iref| iref.from_item_id)
3174        // which has the alpha property
3175        .find(|&item_id| {
3176            meta.properties.iter().any(|prop| {
3177                prop.item_id == item_id
3178                    && match &prop.property {
3179                        ItemProperty::AuxiliaryType(urn) => {
3180                            urn.type_subtype().0 == b"urn:mpeg:mpegB:cicp:systems:auxiliary:alpha"
3181                        }
3182                        _ => false,
3183                    }
3184            })
3185        });
3186
3187    // Extract properties for the primary item
3188    macro_rules! find_prop {
3189        ($variant:ident) => {
3190            meta.properties.iter().find_map(|p| {
3191                if p.item_id == meta.primary_item_id {
3192                    match &p.property {
3193                        ItemProperty::$variant(c) => Some(c.clone()),
3194                        _ => None,
3195                    }
3196                } else {
3197                    None
3198                }
3199            })
3200        };
3201    }
3202
3203    let av1_config = find_prop!(AV1Config);
3204    let color_info = find_prop!(ColorInformation);
3205    let rotation = find_prop!(Rotation);
3206    let mirror = find_prop!(Mirror);
3207    let clean_aperture = find_prop!(CleanAperture);
3208    let pixel_aspect_ratio = find_prop!(PixelAspectRatio);
3209    let content_light_level = find_prop!(ContentLightLevel);
3210    let mastering_display = find_prop!(MasteringDisplayColourVolume);
3211    let content_colour_volume = find_prop!(ContentColourVolume);
3212    let ambient_viewing = find_prop!(AmbientViewingEnvironment);
3213    let operating_point = find_prop!(OperatingPointSelector);
3214    let layer_selector = find_prop!(LayerSelector);
3215    let layered_image_indexing = find_prop!(AV1LayeredImageIndexing);
3216
3217    let mut context = AvifData {
3218        premultiplied_alpha: alpha_item_id.is_some_and(|alpha_item_id| {
3219            meta.item_references.iter().any(|iref| {
3220                iref.from_item_id == meta.primary_item_id
3221                    && iref.to_item_id == alpha_item_id
3222                    && iref.item_type == b"prem"
3223            })
3224        }),
3225        av1_config,
3226        color_info,
3227        rotation,
3228        mirror,
3229        clean_aperture,
3230        pixel_aspect_ratio,
3231        content_light_level,
3232        mastering_display,
3233        content_colour_volume,
3234        ambient_viewing,
3235        operating_point,
3236        layer_selector,
3237        layered_image_indexing,
3238        major_brand,
3239        compatible_brands,
3240        ..Default::default()
3241    };
3242
3243    // Helper to extract item data from either mdat or idat
3244    let mut extract_item_data = |loc: &ItemLocationBoxItem, buf: &mut TryVec<u8>| -> Result<()> {
3245        match loc.construction_method {
3246            ConstructionMethod::File => {
3247                for extent in loc.extents.iter() {
3248                    let mut found = false;
3249                    for mdat in mdats.iter_mut() {
3250                        if mdat.matches_extent(&extent.extent_range) {
3251                            buf.append(&mut mdat.data)?;
3252                            found = true;
3253                            break;
3254                        } else if mdat.contains_extent(&extent.extent_range) {
3255                            mdat.read_extent(&extent.extent_range, buf)?;
3256                            found = true;
3257                            break;
3258                        }
3259                    }
3260                    if !found {
3261                        return Err(Error::InvalidData("iloc contains an extent that is not in mdat"));
3262                    }
3263                }
3264                Ok(())
3265            },
3266            ConstructionMethod::Idat => {
3267                let idat_data = meta.idat.as_ref().ok_or(Error::InvalidData("idat box missing but construction_method is Idat"))?;
3268                for extent in loc.extents.iter() {
3269                    match &extent.extent_range {
3270                        ExtentRange::WithLength(range) => {
3271                            let start = usize::try_from(range.start).map_err(|_| Error::InvalidData("extent start too large"))?;
3272                            let end = usize::try_from(range.end).map_err(|_| Error::InvalidData("extent end too large"))?;
3273                            if end > idat_data.len() {
3274                                return Err(Error::InvalidData("extent exceeds idat size"));
3275                            }
3276                            buf.extend_from_slice(&idat_data[start..end]).map_err(|_| Error::OutOfMemory)?;
3277                        },
3278                        ExtentRange::ToEnd(range) => {
3279                            let start = usize::try_from(range.start).map_err(|_| Error::InvalidData("extent start too large"))?;
3280                            if start >= idat_data.len() {
3281                                return Err(Error::InvalidData("extent start exceeds idat size"));
3282                            }
3283                            buf.extend_from_slice(&idat_data[start..]).map_err(|_| Error::OutOfMemory)?;
3284                        },
3285                    }
3286                }
3287                Ok(())
3288            },
3289            ConstructionMethod::Item => {
3290                Err(Error::Unsupported("construction_method 'item' not supported"))
3291            },
3292        }
3293    };
3294
3295    // load data of relevant items
3296    // For grid images, we need to load tiles in the order specified by iref
3297    if is_grid {
3298        // Extract each tile in order
3299        for (idx, &tile_id) in tile_item_ids.iter().enumerate() {
3300            if idx % 16 == 0 {
3301                stop.check()?;
3302            }
3303
3304            let mut tile_data = TryVec::new();
3305
3306            if let Some(loc) = meta.iloc_items.iter().find(|loc| loc.item_id == tile_id) {
3307                extract_item_data(loc, &mut tile_data)?;
3308            } else {
3309                return Err(Error::InvalidData("grid tile not found in iloc"));
3310            }
3311
3312            context.grid_tiles.push(tile_data)?;
3313        }
3314
3315        // Set grid_config in context
3316        context.grid_config = grid_config;
3317    } else {
3318        // Standard single-frame AVIF: load primary_item and optional alpha_item
3319        for loc in meta.iloc_items.iter() {
3320            let item_data = if loc.item_id == meta.primary_item_id {
3321                &mut context.primary_item
3322            } else if Some(loc.item_id) == alpha_item_id {
3323                context.alpha_item.get_or_insert_with(TryVec::new)
3324            } else {
3325                continue;
3326            };
3327
3328            extract_item_data(loc, item_data)?;
3329        }
3330    }
3331
3332    // Extract EXIF and XMP items linked via cdsc references to the primary item
3333    for iref in meta.item_references.iter() {
3334        if iref.to_item_id != meta.primary_item_id || iref.item_type != b"cdsc" {
3335            continue;
3336        }
3337        let desc_item_id = iref.from_item_id;
3338        let Some(info) = meta.item_infos.iter().find(|i| i.item_id == desc_item_id) else {
3339            continue;
3340        };
3341        if info.item_type == b"Exif" {
3342            if let Some(loc) = meta.iloc_items.iter().find(|l| l.item_id == desc_item_id) {
3343                let mut raw = TryVec::new();
3344                extract_item_data(loc, &mut raw)?;
3345                // AVIF EXIF items start with a 4-byte big-endian offset to the TIFF header
3346                if raw.len() > 4 {
3347                    let offset = u32::from_be_bytes([raw[0], raw[1], raw[2], raw[3]]) as usize;
3348                    let start = 4 + offset;
3349                    if start < raw.len() {
3350                        let mut exif = TryVec::new();
3351                        exif.extend_from_slice(&raw[start..])?;
3352                        context.exif = Some(exif);
3353                    }
3354                }
3355            }
3356        } else if info.item_type == b"mime"
3357            && let Some(loc) = meta.iloc_items.iter().find(|l| l.item_id == desc_item_id)
3358        {
3359            let mut xmp = TryVec::new();
3360            extract_item_data(loc, &mut xmp)?;
3361            context.xmp = Some(xmp);
3362        }
3363    }
3364
3365    // Extract gain map (tmap derived image item)
3366    if let Some(tmap_info) = meta.item_infos.iter().find(|info| info.item_type == b"tmap") {
3367        let tmap_id = tmap_info.item_id;
3368
3369        let mut inputs: TryVec<(u32, u16)> = TryVec::new();
3370        for iref in meta.item_references.iter() {
3371            if iref.from_item_id == tmap_id && iref.item_type == b"dimg" {
3372                inputs.push((iref.to_item_id, iref.reference_index))?;
3373            }
3374        }
3375        inputs.sort_by_key(|&(_, idx)| idx);
3376
3377        if inputs.len() >= 2 && inputs[0].0 == meta.primary_item_id {
3378            let gmap_item_id = inputs[1].0;
3379
3380            // Read tmap item payload
3381            if let Some(loc) = meta.iloc_items.iter().find(|l| l.item_id == tmap_id) {
3382                let mut tmap_data = TryVec::new();
3383                extract_item_data(loc, &mut tmap_data)?;
3384                if let Ok(metadata) = parse_tone_map_image(&tmap_data) {
3385                    context.gain_map_metadata = Some(metadata);
3386                }
3387            }
3388
3389            // Read gain map image data
3390            if let Some(loc) = meta.iloc_items.iter().find(|l| l.item_id == gmap_item_id) {
3391                let mut gmap_data = TryVec::new();
3392                extract_item_data(loc, &mut gmap_data)?;
3393                context.gain_map_item = Some(gmap_data);
3394            }
3395
3396            // Get alternate color info from tmap item's properties
3397            context.gain_map_color_info = meta.properties.iter().find_map(|p| {
3398                if p.item_id == tmap_id {
3399                    match &p.property {
3400                        ItemProperty::ColorInformation(c) => Some(c.clone()),
3401                        _ => None,
3402                    }
3403                } else {
3404                    None
3405                }
3406            });
3407        }
3408    }
3409
3410    // Extract depth auxiliary image
3411    {
3412        let depth_item_id = meta
3413            .item_references
3414            .iter()
3415            .filter(|iref| {
3416                iref.to_item_id == meta.primary_item_id
3417                    && iref.from_item_id != meta.primary_item_id
3418                    && iref.item_type == b"auxl"
3419            })
3420            .map(|iref| iref.from_item_id)
3421            .find(|&item_id| {
3422                if alpha_item_id == Some(item_id) {
3423                    return false;
3424                }
3425                meta.properties.iter().any(|prop| {
3426                    prop.item_id == item_id
3427                        && match &prop.property {
3428                            ItemProperty::AuxiliaryType(urn) => {
3429                                is_depth_auxiliary_urn(urn.type_subtype().0)
3430                            }
3431                            _ => false,
3432                        }
3433                })
3434            });
3435
3436        if let Some(depth_id) = depth_item_id {
3437            if let Some(loc) = meta.iloc_items.iter().find(|l| l.item_id == depth_id) {
3438                let mut depth_data = TryVec::new();
3439                extract_item_data(loc, &mut depth_data)?;
3440                context.depth_item = Some(depth_data);
3441            }
3442            // Get dimensions from ispe
3443            if let Some((w, h)) = meta.properties.iter().find_map(|p| {
3444                if p.item_id == depth_id {
3445                    match &p.property {
3446                        ItemProperty::ImageSpatialExtents(e) => Some((e.width, e.height)),
3447                        _ => None,
3448                    }
3449                } else {
3450                    None
3451                }
3452            }) {
3453                context.depth_width = w;
3454                context.depth_height = h;
3455            }
3456            // Get av1C
3457            context.depth_av1_config = meta.properties.iter().find_map(|p| {
3458                if p.item_id == depth_id {
3459                    match &p.property {
3460                        ItemProperty::AV1Config(c) => Some(c.clone()),
3461                        _ => None,
3462                    }
3463                } else {
3464                    None
3465                }
3466            });
3467            // Get colr
3468            context.depth_color_info = meta.properties.iter().find_map(|p| {
3469                if p.item_id == depth_id {
3470                    match &p.property {
3471                        ItemProperty::ColorInformation(c) => Some(c.clone()),
3472                        _ => None,
3473                    }
3474                } else {
3475                    None
3476                }
3477            });
3478        }
3479    }
3480
3481    // Extract animation frames if this is an animated AVIF
3482    if let Some(anim) = animation_data {
3483        let frame_count = anim.color_sample_table.sample_sizes.len() as u32;
3484        tracker.validate_animation_frames(frame_count)?;
3485
3486        log::debug!("Animation: extracting frames (media_timescale={})", anim.color_timescale);
3487        match extract_animation_frames(&anim.color_sample_table, anim.color_timescale, &mut mdats) {
3488            Ok(frames) => {
3489                if !frames.is_empty() {
3490                    log::debug!("Animation: extracted {} frames", frames.len());
3491                    context.animation = Some(AnimationConfig {
3492                        loop_count: anim.loop_count,
3493                        frames,
3494                    });
3495                }
3496            }
3497            Err(e) => {
3498                log::warn!("Animation: failed to extract frames: {}", e);
3499            }
3500        }
3501    }
3502
3503    Ok(context)
3504}
3505
3506/// Read the contents of an AVIF file with custom parsing options
3507///
3508/// Uses unlimited resource limits for backwards compatibility.
3509///
3510/// # Arguments
3511///
3512/// * `f` - Reader for the AVIF file
3513/// * `options` - Parsing options (e.g., lenient mode)
3514#[cfg(feature = "eager")]
3515#[deprecated(since = "1.5.0", note = "Use `AvifParser::from_reader_with_config()` with `DecodeConfig::lenient()` instead")]
3516#[allow(deprecated)]
3517pub fn read_avif_with_options<T: Read>(f: &mut T, options: &ParseOptions) -> Result<AvifData> {
3518    let config = DecodeConfig::unlimited().lenient(options.lenient);
3519    read_avif_with_config(f, &config, &Unstoppable)
3520}
3521
3522/// Read the contents of an AVIF file
3523///
3524/// Metadata is accumulated and returned in [`AvifData`] struct.
3525/// Uses strict validation and unlimited resource limits by default.
3526///
3527/// For resource limits, use [`read_avif_with_config`].
3528/// For lenient parsing, use [`read_avif_with_options`].
3529#[cfg(feature = "eager")]
3530#[deprecated(since = "1.5.0", note = "Use `AvifParser::from_reader()` instead")]
3531#[allow(deprecated)]
3532pub fn read_avif<T: Read>(f: &mut T) -> Result<AvifData> {
3533    read_avif_with_options(f, &ParseOptions::default())
3534}
3535
3536/// An entity group from a GroupsListBox (`grpl`).
3537///
3538/// See ISO 14496-12:2024 § 8.15.3.
3539#[allow(dead_code)] // Parsed for future altr group support
3540struct EntityGroup {
3541    group_type: FourCC,
3542    group_id: u32,
3543    entity_ids: TryVec<u32>,
3544}
3545
3546/// Parse a GroupsListBox (`grpl`).
3547///
3548/// Each child box is an EntityToGroupBox with a grouping type given by its box type.
3549/// See ISO 14496-12:2024 § 8.15.3.
3550fn read_grpl<T: Read + Offset>(src: &mut BMFFBox<'_, T>) -> Result<TryVec<EntityGroup>> {
3551    let mut groups = TryVec::new();
3552    let mut iter = src.box_iter();
3553    while let Some(mut b) = iter.next_box()? {
3554        let group_type = FourCC::from(u32::from(b.head.name));
3555        // Read version and flags (not validated per spec flexibility)
3556        let _version = b.read_u8()?;
3557        let mut flags_buf = [0u8; 3];
3558        b.read_exact(&mut flags_buf)?;
3559
3560        let group_id = be_u32(&mut b)?;
3561        let num_entities = be_u32(&mut b)?;
3562
3563        let mut entity_ids = TryVec::new();
3564        for _ in 0..num_entities {
3565            entity_ids.push(be_u32(&mut b)?)?;
3566        }
3567
3568        groups.push(EntityGroup {
3569            group_type,
3570            group_id,
3571            entity_ids,
3572        })?;
3573
3574        skip_box_remain(&mut b)?;
3575        check_parser_state(&b.head, &b.content)?;
3576    }
3577    Ok(groups)
3578}
3579
3580/// Parse a ToneMapImage (`tmap`) item payload into gain map metadata.
3581///
3582/// See ISO 21496-1:2025 for the payload format.
3583fn parse_tone_map_image(data: &[u8]) -> Result<GainMapMetadata> {
3584    let mut cursor = std::io::Cursor::new(data);
3585
3586    // version (u8) — must be 0
3587    let version = cursor.read_u8()?;
3588    if version != 0 {
3589        return Err(Error::Unsupported("tmap version"));
3590    }
3591
3592    // minimum_version (u16 BE) — must be 0
3593    let minimum_version = be_u16(&mut cursor)?;
3594    if minimum_version > 0 {
3595        return Err(Error::Unsupported("tmap minimum version"));
3596    }
3597
3598    // writer_version (u16 BE) — informational, must be >= minimum_version
3599    let writer_version = be_u16(&mut cursor)?;
3600    if writer_version < minimum_version {
3601        return Err(Error::InvalidData("tmap writer_version < minimum_version"));
3602    }
3603
3604    // Flags byte: is_multichannel (bit 7), use_base_colour_space (bit 6), reserved (bits 0-5)
3605    let flags = cursor.read_u8()?;
3606    let is_multichannel = (flags & 0x80) != 0;
3607    let use_base_colour_space = (flags & 0x40) != 0;
3608
3609    // base_hdr_headroom and alternate_hdr_headroom
3610    let base_hdr_headroom_n = be_u32(&mut cursor)?;
3611    let base_hdr_headroom_d = be_u32(&mut cursor)?;
3612    let alternate_hdr_headroom_n = be_u32(&mut cursor)?;
3613    let alternate_hdr_headroom_d = be_u32(&mut cursor)?;
3614
3615    let channel_count = if is_multichannel { 3 } else { 1 };
3616    let mut channels = [GainMapChannel {
3617        gain_map_min_n: 0, gain_map_min_d: 0,
3618        gain_map_max_n: 0, gain_map_max_d: 0,
3619        gamma_n: 0, gamma_d: 0,
3620        base_offset_n: 0, base_offset_d: 0,
3621        alternate_offset_n: 0, alternate_offset_d: 0,
3622    }; 3];
3623
3624    for ch in channels.iter_mut().take(channel_count) {
3625        ch.gain_map_min_n = be_i32(&mut cursor)?;
3626        ch.gain_map_min_d = be_u32(&mut cursor)?;
3627        ch.gain_map_max_n = be_i32(&mut cursor)?;
3628        ch.gain_map_max_d = be_u32(&mut cursor)?;
3629        ch.gamma_n = be_u32(&mut cursor)?;
3630        ch.gamma_d = be_u32(&mut cursor)?;
3631        ch.base_offset_n = be_i32(&mut cursor)?;
3632        ch.base_offset_d = be_u32(&mut cursor)?;
3633        ch.alternate_offset_n = be_i32(&mut cursor)?;
3634        ch.alternate_offset_d = be_u32(&mut cursor)?;
3635    }
3636
3637    // Copy channel 0 to channels 1 and 2 if single-channel
3638    if !is_multichannel {
3639        channels[1] = channels[0];
3640        channels[2] = channels[0];
3641    }
3642
3643    Ok(GainMapMetadata {
3644        is_multichannel,
3645        use_base_colour_space,
3646        base_hdr_headroom_n,
3647        base_hdr_headroom_d,
3648        alternate_hdr_headroom_n,
3649        alternate_hdr_headroom_d,
3650        channels,
3651    })
3652}
3653
3654/// Parse a metadata box in the context of an AVIF
3655/// Currently requires the primary item to be an av01 item type and generates
3656/// an error otherwise.
3657/// See ISO 14496-12:2015 § 8.11.1
3658fn read_avif_meta<T: Read + Offset>(src: &mut BMFFBox<'_, T>, options: &ParseOptions) -> Result<AvifInternalMeta> {
3659    let version = read_fullbox_version_no_flags(src, options)?;
3660
3661    if version != 0 {
3662        return Err(Error::Unsupported("unsupported meta version"));
3663    }
3664
3665    let mut primary_item_id = None;
3666    let mut item_infos = None;
3667    let mut iloc_items = None;
3668    let mut item_references = TryVec::new();
3669    let mut properties = TryVec::new();
3670    let mut idat = None;
3671    let mut entity_groups = TryVec::new();
3672
3673    let mut iter = src.box_iter();
3674    while let Some(mut b) = iter.next_box()? {
3675        match b.head.name {
3676            BoxType::ItemInfoBox => {
3677                if item_infos.is_some() {
3678                    return Err(Error::InvalidData("There should be zero or one iinf boxes per ISO 14496-12:2015 § 8.11.6.1"));
3679                }
3680                item_infos = Some(read_iinf(&mut b, options)?);
3681            },
3682            BoxType::ItemLocationBox => {
3683                if iloc_items.is_some() {
3684                    return Err(Error::InvalidData("There should be zero or one iloc boxes per ISO 14496-12:2015 § 8.11.3.1"));
3685                }
3686                iloc_items = Some(read_iloc(&mut b, options)?);
3687            },
3688            BoxType::PrimaryItemBox => {
3689                if primary_item_id.is_some() {
3690                    return Err(Error::InvalidData("There should be zero or one iloc boxes per ISO 14496-12:2015 § 8.11.4.1"));
3691                }
3692                primary_item_id = Some(read_pitm(&mut b, options)?);
3693            },
3694            BoxType::ImageReferenceBox => {
3695                item_references.append(&mut read_iref(&mut b, options)?)?;
3696            },
3697            BoxType::ImagePropertiesBox => {
3698                properties = read_iprp(&mut b, options)?;
3699            },
3700            BoxType::ItemDataBox => {
3701                if idat.is_some() {
3702                    return Err(Error::InvalidData("There should be zero or one idat boxes"));
3703                }
3704                idat = Some(b.read_into_try_vec()?);
3705            },
3706            BoxType::GroupsListBox => {
3707                entity_groups.append(&mut read_grpl(&mut b)?)?;
3708            },
3709            BoxType::HandlerBox => {
3710                let hdlr = read_hdlr(&mut b)?;
3711                if hdlr.handler_type != b"pict" {
3712                    warn!("hdlr handler_type: {}", hdlr.handler_type);
3713                    return Err(Error::InvalidData("meta handler_type must be 'pict' for AVIF"));
3714                }
3715            },
3716            _ => skip_box_content(&mut b)?,
3717        }
3718
3719        check_parser_state(&b.head, &b.content)?;
3720    }
3721
3722    let primary_item_id = primary_item_id.ok_or(Error::InvalidData("Required pitm box not present in meta box"))?;
3723
3724    let item_infos = item_infos.ok_or(Error::InvalidData("iinf missing"))?;
3725
3726    if let Some(item_info) = item_infos.iter().find(|x| x.item_id == primary_item_id) {
3727        // Allow both "av01" (standard single-frame) and "grid" (tiled) types
3728        if item_info.item_type != b"av01" && item_info.item_type != b"grid" {
3729            warn!("primary_item_id type: {}", item_info.item_type);
3730            return Err(Error::InvalidData("primary_item_id type is not av01 or grid"));
3731        }
3732    } else {
3733        return Err(Error::InvalidData("primary_item_id not present in iinf box"));
3734    }
3735
3736    Ok(AvifInternalMeta {
3737        properties,
3738        item_references,
3739        primary_item_id,
3740        iloc_items: iloc_items.ok_or(Error::InvalidData("iloc missing"))?,
3741        item_infos,
3742        idat,
3743        entity_groups,
3744    })
3745}
3746
3747/// Parse a Handler Reference Box
3748/// See ISO 14496-12:2015 § 8.4.3
3749fn read_hdlr<T: Read>(src: &mut BMFFBox<'_, T>) -> Result<HandlerBox> {
3750    let (_version, _flags) = read_fullbox_extra(src)?;
3751    // pre_defined (4 bytes)
3752    skip(src, 4)?;
3753    // handler_type (4 bytes)
3754    let handler_type = be_u32(src)?;
3755    // reserved (3 × 4 bytes) + name (variable) — skip the rest
3756    skip_box_remain(src)?;
3757    Ok(HandlerBox {
3758        handler_type: FourCC::from(handler_type),
3759    })
3760}
3761
3762/// Parse a Primary Item Box
3763/// See ISO 14496-12:2015 § 8.11.4
3764fn read_pitm<T: Read>(src: &mut BMFFBox<'_, T>, options: &ParseOptions) -> Result<u32> {
3765    let version = read_fullbox_version_no_flags(src, options)?;
3766
3767    let item_id = match version {
3768        0 => be_u16(src)?.into(),
3769        1 => be_u32(src)?,
3770        _ => return Err(Error::Unsupported("unsupported pitm version")),
3771    };
3772
3773    Ok(item_id)
3774}
3775
3776/// Parse an Item Information Box
3777/// See ISO 14496-12:2015 § 8.11.6
3778fn read_iinf<T: Read>(src: &mut BMFFBox<'_, T>, options: &ParseOptions) -> Result<TryVec<ItemInfoEntry>> {
3779    let version = read_fullbox_version_no_flags(src, options)?;
3780
3781    match version {
3782        0 | 1 => (),
3783        _ => return Err(Error::Unsupported("unsupported iinf version")),
3784    }
3785
3786    let entry_count = if version == 0 {
3787        be_u16(src)?.to_usize()
3788    } else {
3789        be_u32(src)?.to_usize()
3790    };
3791    // Cap pre-allocation: entry_count is untrusted, actual items come from box_iter
3792    let mut item_infos = TryVec::with_capacity(entry_count.min(4096))?;
3793
3794    let mut iter = src.box_iter();
3795    while let Some(mut b) = iter.next_box()? {
3796        if b.head.name != BoxType::ItemInfoEntry {
3797            return Err(Error::InvalidData("iinf box should contain only infe boxes"));
3798        }
3799
3800        item_infos.push(read_infe(&mut b)?)?;
3801
3802        check_parser_state(&b.head, &b.content)?;
3803    }
3804
3805    Ok(item_infos)
3806}
3807
3808/// Parse an Item Info Entry
3809/// See ISO 14496-12:2015 § 8.11.6.2
3810fn read_infe<T: Read>(src: &mut BMFFBox<'_, T>) -> Result<ItemInfoEntry> {
3811    // According to the standard, it seems the flags field should be 0, but
3812    // at least one sample AVIF image has a nonzero value.
3813    let (version, _) = read_fullbox_extra(src)?;
3814
3815    // mif1 brand (see ISO 23008-12:2017 § 10.2.1) only requires v2 and 3
3816    let item_id = match version {
3817        2 => be_u16(src)?.into(),
3818        3 => be_u32(src)?,
3819        _ => return Err(Error::Unsupported("unsupported version in 'infe' box")),
3820    };
3821
3822    let item_protection_index = be_u16(src)?;
3823
3824    if item_protection_index != 0 {
3825        return Err(Error::Unsupported("protected items (infe.item_protection_index != 0) are not supported"));
3826    }
3827
3828    let item_type = FourCC::from(be_u32(src)?);
3829    debug!("infe item_id {item_id} item_type: {item_type}");
3830
3831    // There are some additional fields here, but they're not of interest to us
3832    skip_box_remain(src)?;
3833
3834    Ok(ItemInfoEntry { item_id, item_type })
3835}
3836
3837fn read_iref<T: Read>(src: &mut BMFFBox<'_, T>, options: &ParseOptions) -> Result<TryVec<SingleItemTypeReferenceBox>> {
3838    let mut item_references = TryVec::new();
3839    let version = read_fullbox_version_no_flags(src, options)?;
3840    if version > 1 {
3841        return Err(Error::Unsupported("iref version"));
3842    }
3843
3844    let mut iter = src.box_iter();
3845    while let Some(mut b) = iter.next_box()? {
3846        let from_item_id = if version == 0 {
3847            be_u16(&mut b)?.into()
3848        } else {
3849            be_u32(&mut b)?
3850        };
3851        let reference_count = be_u16(&mut b)?;
3852        for reference_index in 0..reference_count {
3853            let to_item_id = if version == 0 {
3854                be_u16(&mut b)?.into()
3855            } else {
3856                be_u32(&mut b)?
3857            };
3858            if from_item_id == to_item_id {
3859                return Err(Error::InvalidData("from_item_id and to_item_id must be different"));
3860            }
3861            item_references.push(SingleItemTypeReferenceBox {
3862                item_type: b.head.name.into(),
3863                from_item_id,
3864                to_item_id,
3865                reference_index,
3866            })?;
3867        }
3868        check_parser_state(&b.head, &b.content)?;
3869    }
3870    Ok(item_references)
3871}
3872
3873/// Properties that MUST be marked essential when associated with an item.
3874/// See AVIF § 2.3.2.1.1 (a1op), HEIF § 6.5.11.1 (lsel), MIAF § 7.3.9 (clap, irot, imir).
3875const MUST_BE_ESSENTIAL: &[&[u8; 4]] = &[b"a1op", b"lsel", b"clap", b"irot", b"imir"];
3876
3877/// Properties that MUST NOT be marked essential when associated with an item.
3878/// See AVIF § 2.3.2.3.2 (a1lx).
3879const MUST_NOT_BE_ESSENTIAL: &[&[u8; 4]] = &[b"a1lx"];
3880
3881fn read_iprp<T: Read>(src: &mut BMFFBox<'_, T>, options: &ParseOptions) -> Result<TryVec<AssociatedProperty>> {
3882    let mut iter = src.box_iter();
3883    let mut properties = TryVec::new();
3884    let mut associations = TryVec::new();
3885
3886    while let Some(mut b) = iter.next_box()? {
3887        match b.head.name {
3888            BoxType::ItemPropertyContainerBox => {
3889                properties = read_ipco(&mut b, options)?;
3890            },
3891            BoxType::ItemPropertyAssociationBox => {
3892                associations = read_ipma(&mut b)?;
3893            },
3894            _ => return Err(Error::InvalidData("unexpected ipco child")),
3895        }
3896    }
3897
3898    let mut associated = TryVec::new();
3899    for a in associations {
3900        let index = match a.property_index {
3901            0 => {
3902                // property_index 0 means no association; essential must also be 0
3903                if a.essential {
3904                    return Err(Error::InvalidData(
3905                        "ipma property_index 0 must not be marked essential",
3906                    ));
3907                }
3908                continue;
3909            }
3910            x => x as usize - 1,
3911        };
3912
3913        let Some(entry) = properties.get(index) else {
3914            continue;
3915        };
3916
3917        let is_supported = entry.property != ItemProperty::Unsupported;
3918        let fourcc_bytes = &entry.fourcc.value;
3919
3920        if is_supported {
3921            // Validate essential flag for known property types
3922            if a.essential && MUST_NOT_BE_ESSENTIAL.contains(&fourcc_bytes) {
3923                warn!("item {} has {} marked essential (spec forbids it)", a.item_id, entry.fourcc);
3924                if !options.lenient {
3925                    return Err(Error::InvalidData(
3926                        "property must not be marked essential",
3927                    ));
3928                }
3929            }
3930            if !a.essential && MUST_BE_ESSENTIAL.contains(&fourcc_bytes) {
3931                warn!("item {} has {} not marked essential (spec requires it)", a.item_id, entry.fourcc);
3932                if !options.lenient {
3933                    return Err(Error::InvalidData(
3934                        "property must be marked essential",
3935                    ));
3936                }
3937            }
3938
3939            associated.push(AssociatedProperty {
3940                item_id: a.item_id,
3941                property: entry.property.try_clone()?,
3942            })?;
3943        } else if a.essential {
3944            // Unknown property marked essential — this item cannot be correctly processed
3945            warn!(
3946                "item {} has unsupported property {} marked essential; item will be unusable",
3947                a.item_id, entry.fourcc
3948            );
3949            if !options.lenient {
3950                return Err(Error::Unsupported(
3951                    "unsupported property marked as essential",
3952                ));
3953            }
3954        }
3955        // Unknown non-essential properties are silently skipped (they're optional)
3956    }
3957    Ok(associated)
3958}
3959
3960/// Image spatial extents (dimensions)
3961#[derive(Debug, Clone, Copy, PartialEq, Eq)]
3962pub(crate) struct ImageSpatialExtents {
3963    pub(crate) width: u32,
3964    pub(crate) height: u32,
3965}
3966
3967#[derive(Debug, PartialEq)]
3968pub(crate) enum ItemProperty {
3969    Channels(ArrayVec<u8, 16>),
3970    AuxiliaryType(AuxiliaryTypeProperty),
3971    ImageSpatialExtents(ImageSpatialExtents),
3972    ImageGrid(GridConfig),
3973    AV1Config(AV1Config),
3974    ColorInformation(ColorInformation),
3975    Rotation(ImageRotation),
3976    Mirror(ImageMirror),
3977    CleanAperture(CleanAperture),
3978    PixelAspectRatio(PixelAspectRatio),
3979    ContentLightLevel(ContentLightLevel),
3980    MasteringDisplayColourVolume(MasteringDisplayColourVolume),
3981    ContentColourVolume(ContentColourVolume),
3982    AmbientViewingEnvironment(AmbientViewingEnvironment),
3983    OperatingPointSelector(OperatingPointSelector),
3984    LayerSelector(LayerSelector),
3985    AV1LayeredImageIndexing(AV1LayeredImageIndexing),
3986    Unsupported,
3987}
3988
3989impl TryClone for ItemProperty {
3990    fn try_clone(&self) -> Result<Self, TryReserveError> {
3991        Ok(match self {
3992            Self::Channels(val) => Self::Channels(val.clone()),
3993            Self::AuxiliaryType(val) => Self::AuxiliaryType(val.try_clone()?),
3994            Self::ImageSpatialExtents(val) => Self::ImageSpatialExtents(*val),
3995            Self::ImageGrid(val) => Self::ImageGrid(val.clone()),
3996            Self::AV1Config(val) => Self::AV1Config(val.clone()),
3997            Self::ColorInformation(val) => Self::ColorInformation(val.clone()),
3998            Self::Rotation(val) => Self::Rotation(*val),
3999            Self::Mirror(val) => Self::Mirror(*val),
4000            Self::CleanAperture(val) => Self::CleanAperture(*val),
4001            Self::PixelAspectRatio(val) => Self::PixelAspectRatio(*val),
4002            Self::ContentLightLevel(val) => Self::ContentLightLevel(*val),
4003            Self::MasteringDisplayColourVolume(val) => Self::MasteringDisplayColourVolume(*val),
4004            Self::ContentColourVolume(val) => Self::ContentColourVolume(*val),
4005            Self::AmbientViewingEnvironment(val) => Self::AmbientViewingEnvironment(*val),
4006            Self::OperatingPointSelector(val) => Self::OperatingPointSelector(*val),
4007            Self::LayerSelector(val) => Self::LayerSelector(*val),
4008            Self::AV1LayeredImageIndexing(val) => Self::AV1LayeredImageIndexing(*val),
4009            Self::Unsupported => Self::Unsupported,
4010        })
4011    }
4012}
4013
4014struct Association {
4015    item_id: u32,
4016    essential: bool,
4017    property_index: u16,
4018}
4019
4020pub(crate) struct AssociatedProperty {
4021    pub item_id: u32,
4022    pub property: ItemProperty,
4023}
4024
4025fn read_ipma<T: Read>(src: &mut BMFFBox<'_, T>) -> Result<TryVec<Association>> {
4026    let (version, flags) = read_fullbox_extra(src)?;
4027
4028    let mut associations = TryVec::new();
4029
4030    let entry_count = be_u32(src)?;
4031    for _ in 0..entry_count {
4032        let item_id = if version == 0 {
4033            be_u16(src)?.into()
4034        } else {
4035            be_u32(src)?
4036        };
4037        let association_count = src.read_u8()?;
4038        for _ in 0..association_count {
4039            let num_association_bytes = if flags & 1 == 1 { 2 } else { 1 };
4040            let association = &mut [0; 2][..num_association_bytes];
4041            src.read_exact(association)?;
4042            let mut association = BitReader::new(association);
4043            let essential = association.read_bool()?;
4044            let property_index = association.read_u16(association.remaining().try_into()?)?;
4045            associations.push(Association {
4046                item_id,
4047                essential,
4048                property_index,
4049            })?;
4050        }
4051    }
4052    Ok(associations)
4053}
4054
4055/// A parsed property with its box FourCC, for essential flag validation.
4056struct IndexedProperty {
4057    fourcc: FourCC,
4058    property: ItemProperty,
4059}
4060
4061fn read_ipco<T: Read>(src: &mut BMFFBox<'_, T>, options: &ParseOptions) -> Result<TryVec<IndexedProperty>> {
4062    let mut properties = TryVec::new();
4063
4064    let mut iter = src.box_iter();
4065    while let Some(mut b) = iter.next_box()? {
4066        let fourcc: FourCC = b.head.name.into();
4067        // Must push for every property to have correct index for them
4068        let prop = match b.head.name {
4069            BoxType::PixelInformationBox => ItemProperty::Channels(read_pixi(&mut b, options)?),
4070            BoxType::AuxiliaryTypeProperty => ItemProperty::AuxiliaryType(read_auxc(&mut b, options)?),
4071            BoxType::ImageSpatialExtentsBox => ItemProperty::ImageSpatialExtents(read_ispe(&mut b, options)?),
4072            BoxType::ImageGridBox => ItemProperty::ImageGrid(read_grid(&mut b, options)?),
4073            BoxType::AV1CodecConfigurationBox => ItemProperty::AV1Config(read_av1c(&mut b)?),
4074            BoxType::ColorInformationBox => {
4075                match read_colr(&mut b) {
4076                    Ok(colr) => ItemProperty::ColorInformation(colr),
4077                    Err(_) => ItemProperty::Unsupported,
4078                }
4079            },
4080            BoxType::ImageRotationBox => ItemProperty::Rotation(read_irot(&mut b)?),
4081            BoxType::ImageMirrorBox => ItemProperty::Mirror(read_imir(&mut b)?),
4082            BoxType::CleanApertureBox => ItemProperty::CleanAperture(read_clap(&mut b)?),
4083            BoxType::PixelAspectRatioBox => ItemProperty::PixelAspectRatio(read_pasp(&mut b)?),
4084            BoxType::ContentLightLevelBox => ItemProperty::ContentLightLevel(read_clli(&mut b)?),
4085            BoxType::MasteringDisplayColourVolumeBox => ItemProperty::MasteringDisplayColourVolume(read_mdcv(&mut b)?),
4086            BoxType::ContentColourVolumeBox => ItemProperty::ContentColourVolume(read_cclv(&mut b)?),
4087            BoxType::AmbientViewingEnvironmentBox => ItemProperty::AmbientViewingEnvironment(read_amve(&mut b)?),
4088            BoxType::OperatingPointSelectorBox => ItemProperty::OperatingPointSelector(read_a1op(&mut b)?),
4089            BoxType::LayerSelectorBox => ItemProperty::LayerSelector(read_lsel(&mut b)?),
4090            BoxType::AV1LayeredImageIndexingBox => ItemProperty::AV1LayeredImageIndexing(read_a1lx(&mut b)?),
4091            _ => {
4092                skip_box_remain(&mut b)?;
4093                ItemProperty::Unsupported
4094            },
4095        };
4096        properties.push(IndexedProperty { fourcc, property: prop })?;
4097    }
4098    Ok(properties)
4099}
4100
4101fn read_pixi<T: Read>(src: &mut BMFFBox<'_, T>, options: &ParseOptions) -> Result<ArrayVec<u8, 16>> {
4102    let version = read_fullbox_version_no_flags(src, options)?;
4103    if version != 0 {
4104        return Err(Error::Unsupported("pixi version"));
4105    }
4106
4107    let num_channels = usize::from(src.read_u8()?);
4108    let mut channels = ArrayVec::new();
4109    let clamped = num_channels.min(channels.capacity());
4110    channels.extend((0..clamped).map(|_| 0));
4111    src.read_exact(&mut channels).map_err(|_| Error::InvalidData("invalid num_channels"))?;
4112
4113    // In lenient mode, skip any extra bytes (e.g., extended_pixi.avif has 6 extra bytes)
4114    if options.lenient && src.bytes_left() > 0 {
4115        skip(src, src.bytes_left())?;
4116    }
4117
4118    check_parser_state(&src.head, &src.content)?;
4119    Ok(channels)
4120}
4121
4122#[derive(Debug, PartialEq)]
4123struct AuxiliaryTypeProperty {
4124    aux_data: TryString,
4125}
4126
4127impl AuxiliaryTypeProperty {
4128    #[must_use]
4129    fn type_subtype(&self) -> (&[u8], &[u8]) {
4130        let split = self.aux_data.iter().position(|&b| b == b'\0')
4131            .map(|pos| self.aux_data.split_at(pos));
4132        if let Some((aux_type, rest)) = split {
4133            (aux_type, &rest[1..])
4134        } else {
4135            (&self.aux_data, &[])
4136        }
4137    }
4138}
4139
4140impl TryClone for AuxiliaryTypeProperty {
4141    fn try_clone(&self) -> Result<Self, TryReserveError> {
4142        Ok(Self {
4143            aux_data: self.aux_data.try_clone()?,
4144        })
4145    }
4146}
4147
4148fn read_auxc<T: Read>(src: &mut BMFFBox<'_, T>, options: &ParseOptions) -> Result<AuxiliaryTypeProperty> {
4149    let version = read_fullbox_version_no_flags(src, options)?;
4150    if version != 0 {
4151        return Err(Error::Unsupported("auxC version"));
4152    }
4153
4154    let aux_data = src.read_into_try_vec()?;
4155
4156    Ok(AuxiliaryTypeProperty { aux_data })
4157}
4158
4159/// Check if an auxiliary type URN identifies a depth auxiliary image.
4160///
4161/// Recognizes two standard URNs:
4162/// - `urn:mpeg:mpegB:cicp:systems:auxiliary:depth` (MPEG-B Part 23 / ISO 23091-2)
4163/// - `urn:mpeg:hevc:2015:auxid:2` (HEVC-style, auxid 2 = depth)
4164fn is_depth_auxiliary_urn(urn: &[u8]) -> bool {
4165    urn == b"urn:mpeg:mpegB:cicp:systems:auxiliary:depth"
4166        || urn == b"urn:mpeg:hevc:2015:auxid:2"
4167}
4168
4169/// Parse an AV1 Codec Configuration property box
4170/// See AV1-ISOBMFF § 2.3
4171fn read_av1c<T: Read>(src: &mut BMFFBox<'_, T>) -> Result<AV1Config> {
4172    // av1C is NOT a FullBox — it has no version/flags
4173    let byte0 = src.read_u8()?;
4174    let marker = byte0 >> 7;
4175    let version = byte0 & 0x7F;
4176
4177    if marker != 1 {
4178        return Err(Error::InvalidData("av1C marker must be 1"));
4179    }
4180    if version != 1 {
4181        return Err(Error::Unsupported("av1C version must be 1"));
4182    }
4183
4184    let byte1 = src.read_u8()?;
4185    let profile = byte1 >> 5;
4186    let level = byte1 & 0x1F;
4187
4188    let byte2 = src.read_u8()?;
4189    let tier = byte2 >> 7;
4190    let high_bitdepth = (byte2 >> 6) & 1;
4191    let twelve_bit = (byte2 >> 5) & 1;
4192    let monochrome = (byte2 >> 4) & 1 != 0;
4193    let chroma_subsampling_x = (byte2 >> 3) & 1;
4194    let chroma_subsampling_y = (byte2 >> 2) & 1;
4195    let chroma_sample_position = byte2 & 0x03;
4196
4197    let byte3 = src.read_u8()?;
4198    // byte3: 3 bits reserved, 1 bit initial_presentation_delay_present, 4 bits delay/reserved
4199    // Not needed for image decoding.
4200    let _ = byte3;
4201
4202    let bit_depth = if high_bitdepth != 0 {
4203        if twelve_bit != 0 { 12 } else { 10 }
4204    } else {
4205        8
4206    };
4207
4208    // Skip any configOBUs (remainder of box)
4209    skip_box_remain(src)?;
4210
4211    Ok(AV1Config {
4212        profile,
4213        level,
4214        tier,
4215        bit_depth,
4216        monochrome,
4217        chroma_subsampling_x,
4218        chroma_subsampling_y,
4219        chroma_sample_position,
4220    })
4221}
4222
4223/// Parse a Colour Information property box
4224/// See ISOBMFF § 12.1.5
4225fn read_colr<T: Read>(src: &mut BMFFBox<'_, T>) -> Result<ColorInformation> {
4226    // colr is NOT a FullBox — no version/flags
4227    let colour_type = be_u32(src)?;
4228
4229    match &colour_type.to_be_bytes() {
4230        b"nclx" => {
4231            let color_primaries = be_u16(src)?;
4232            let transfer_characteristics = be_u16(src)?;
4233            let matrix_coefficients = be_u16(src)?;
4234            let full_range_byte = src.read_u8()?;
4235            let full_range = (full_range_byte >> 7) != 0;
4236            // Skip any remaining bytes
4237            skip_box_remain(src)?;
4238            Ok(ColorInformation::Nclx {
4239                color_primaries,
4240                transfer_characteristics,
4241                matrix_coefficients,
4242                full_range,
4243            })
4244        }
4245        b"rICC" | b"prof" => {
4246            let icc_data = src.read_into_try_vec()?;
4247            Ok(ColorInformation::IccProfile(icc_data.to_vec()))
4248        }
4249        _ => {
4250            skip_box_remain(src)?;
4251            Err(Error::Unsupported("unsupported colr colour_type"))
4252        }
4253    }
4254}
4255
4256/// Parse an Image Rotation property box.
4257/// See ISOBMFF § 12.1.4. NOT a FullBox.
4258fn read_irot<T: Read>(src: &mut BMFFBox<'_, T>) -> Result<ImageRotation> {
4259    let byte = src.read_u8()?;
4260    let angle_code = byte & 0x03;
4261    let angle = match angle_code {
4262        0 => 0,
4263        1 => 90,
4264        2 => 180,
4265        _ => 270, // angle_code & 0x03 can only be 0..=3
4266    };
4267    skip_box_remain(src)?;
4268    Ok(ImageRotation { angle })
4269}
4270
4271/// Parse an Image Mirror property box.
4272/// See ISOBMFF § 12.1.4. NOT a FullBox.
4273fn read_imir<T: Read>(src: &mut BMFFBox<'_, T>) -> Result<ImageMirror> {
4274    let byte = src.read_u8()?;
4275    let axis = byte & 0x01;
4276    skip_box_remain(src)?;
4277    Ok(ImageMirror { axis })
4278}
4279
4280/// Parse a Clean Aperture property box.
4281/// See ISOBMFF § 12.1.4. NOT a FullBox.
4282fn read_clap<T: Read>(src: &mut BMFFBox<'_, T>) -> Result<CleanAperture> {
4283    let width_n = be_u32(src)?;
4284    let width_d = be_u32(src)?;
4285    let height_n = be_u32(src)?;
4286    let height_d = be_u32(src)?;
4287    let horiz_off_n = be_i32(src)?;
4288    let horiz_off_d = be_u32(src)?;
4289    let vert_off_n = be_i32(src)?;
4290    let vert_off_d = be_u32(src)?;
4291    // Validate denominators are non-zero
4292    if width_d == 0 || height_d == 0 || horiz_off_d == 0 || vert_off_d == 0 {
4293        return Err(Error::InvalidData("clap denominator cannot be zero"));
4294    }
4295    skip_box_remain(src)?;
4296    Ok(CleanAperture {
4297        width_n, width_d,
4298        height_n, height_d,
4299        horiz_off_n, horiz_off_d,
4300        vert_off_n, vert_off_d,
4301    })
4302}
4303
4304/// Parse a Pixel Aspect Ratio property box.
4305/// See ISOBMFF § 12.1.4. NOT a FullBox.
4306fn read_pasp<T: Read>(src: &mut BMFFBox<'_, T>) -> Result<PixelAspectRatio> {
4307    let h_spacing = be_u32(src)?;
4308    let v_spacing = be_u32(src)?;
4309    skip_box_remain(src)?;
4310    Ok(PixelAspectRatio { h_spacing, v_spacing })
4311}
4312
4313/// Parse a Content Light Level Info property box.
4314/// See ISOBMFF § 12.1.5 / ITU-T H.274. NOT a FullBox.
4315fn read_clli<T: Read>(src: &mut BMFFBox<'_, T>) -> Result<ContentLightLevel> {
4316    let max_content_light_level = be_u16(src)?;
4317    let max_pic_average_light_level = be_u16(src)?;
4318    skip_box_remain(src)?;
4319    Ok(ContentLightLevel {
4320        max_content_light_level,
4321        max_pic_average_light_level,
4322    })
4323}
4324
4325/// Parse a Mastering Display Colour Volume property box.
4326/// See ISOBMFF § 12.1.5 / SMPTE ST 2086. NOT a FullBox.
4327fn read_mdcv<T: Read>(src: &mut BMFFBox<'_, T>) -> Result<MasteringDisplayColourVolume> {
4328    // 3 primaries, each (x, y) as u16
4329    let primaries = [
4330        (be_u16(src)?, be_u16(src)?),
4331        (be_u16(src)?, be_u16(src)?),
4332        (be_u16(src)?, be_u16(src)?),
4333    ];
4334    let white_point = (be_u16(src)?, be_u16(src)?);
4335    let max_luminance = be_u32(src)?;
4336    let min_luminance = be_u32(src)?;
4337    skip_box_remain(src)?;
4338    Ok(MasteringDisplayColourVolume {
4339        primaries,
4340        white_point,
4341        max_luminance,
4342        min_luminance,
4343    })
4344}
4345
4346/// Parse a Content Colour Volume property box.
4347/// See ISOBMFF § 12.1.5 / H.265 D.2.40. NOT a FullBox.
4348fn read_cclv<T: Read>(src: &mut BMFFBox<'_, T>) -> Result<ContentColourVolume> {
4349    let flags = src.read_u8()?;
4350    let primaries_present = flags & 0x20 != 0;
4351    let min_lum_present = flags & 0x10 != 0;
4352    let max_lum_present = flags & 0x08 != 0;
4353    let avg_lum_present = flags & 0x04 != 0;
4354
4355    let primaries = if primaries_present {
4356        Some([
4357            (be_i32(src)?, be_i32(src)?),
4358            (be_i32(src)?, be_i32(src)?),
4359            (be_i32(src)?, be_i32(src)?),
4360        ])
4361    } else {
4362        None
4363    };
4364
4365    let min_luminance = if min_lum_present { Some(be_u32(src)?) } else { None };
4366    let max_luminance = if max_lum_present { Some(be_u32(src)?) } else { None };
4367    let avg_luminance = if avg_lum_present { Some(be_u32(src)?) } else { None };
4368
4369    skip_box_remain(src)?;
4370    Ok(ContentColourVolume {
4371        primaries,
4372        min_luminance,
4373        max_luminance,
4374        avg_luminance,
4375    })
4376}
4377
4378/// Parse an Ambient Viewing Environment property box.
4379/// See ISOBMFF § 12.1.5 / H.265 D.2.39. NOT a FullBox.
4380fn read_amve<T: Read>(src: &mut BMFFBox<'_, T>) -> Result<AmbientViewingEnvironment> {
4381    let ambient_illuminance = be_u32(src)?;
4382    let ambient_light_x = be_u16(src)?;
4383    let ambient_light_y = be_u16(src)?;
4384    skip_box_remain(src)?;
4385    Ok(AmbientViewingEnvironment {
4386        ambient_illuminance,
4387        ambient_light_x,
4388        ambient_light_y,
4389    })
4390}
4391
4392/// Parse an Operating Point Selector property box.
4393/// See AVIF § 4.3.4. NOT a FullBox.
4394fn read_a1op<T: Read>(src: &mut BMFFBox<'_, T>) -> Result<OperatingPointSelector> {
4395    let op_index = src.read_u8()?;
4396    if op_index > 31 {
4397        return Err(Error::InvalidData("a1op op_index must be 0..31"));
4398    }
4399    skip_box_remain(src)?;
4400    Ok(OperatingPointSelector { op_index })
4401}
4402
4403/// Parse a Layer Selector property box.
4404/// See HEIF (ISO 23008-12). NOT a FullBox.
4405fn read_lsel<T: Read>(src: &mut BMFFBox<'_, T>) -> Result<LayerSelector> {
4406    let layer_id = be_u16(src)?;
4407    skip_box_remain(src)?;
4408    Ok(LayerSelector { layer_id })
4409}
4410
4411/// Parse an AV1 Layered Image Indexing property box.
4412/// See AVIF § 4.3.6. NOT a FullBox.
4413fn read_a1lx<T: Read>(src: &mut BMFFBox<'_, T>) -> Result<AV1LayeredImageIndexing> {
4414    let flags = src.read_u8()?;
4415    let large_size = flags & 0x01 != 0;
4416    let layer_sizes = if large_size {
4417        [be_u32(src)?, be_u32(src)?, be_u32(src)?]
4418    } else {
4419        [u32::from(be_u16(src)?), u32::from(be_u16(src)?), u32::from(be_u16(src)?)]
4420    };
4421    skip_box_remain(src)?;
4422    Ok(AV1LayeredImageIndexing { layer_sizes })
4423}
4424
4425/// Parse an Image Spatial Extents property box
4426/// See ISO/IEC 23008-12:2017 § 6.5.3
4427fn read_ispe<T: Read>(src: &mut BMFFBox<'_, T>, options: &ParseOptions) -> Result<ImageSpatialExtents> {
4428    let _version = read_fullbox_version_no_flags(src, options)?;
4429    // Version is always 0 for ispe
4430
4431    let width = be_u32(src)?;
4432    let height = be_u32(src)?;
4433
4434    // Validate dimensions are non-zero (0×0 images are invalid)
4435    if width == 0 || height == 0 {
4436        return Err(Error::InvalidData("ispe dimensions cannot be zero"));
4437    }
4438
4439    Ok(ImageSpatialExtents { width, height })
4440}
4441
4442/// Parse a Movie Header box (mvhd)
4443/// See ISO/IEC 14496-12:2015 § 8.2.2
4444fn read_mvhd<T: Read>(src: &mut BMFFBox<'_, T>) -> Result<MovieHeader> {
4445    let version = src.read_u8()?;
4446    let _flags = [src.read_u8()?, src.read_u8()?, src.read_u8()?];
4447
4448    let (timescale, duration) = if version == 1 {
4449        let _creation_time = be_u64(src)?;
4450        let _modification_time = be_u64(src)?;
4451        let timescale = be_u32(src)?;
4452        let duration = be_u64(src)?;
4453        (timescale, duration)
4454    } else {
4455        let _creation_time = be_u32(src)?;
4456        let _modification_time = be_u32(src)?;
4457        let timescale = be_u32(src)?;
4458        let duration = be_u32(src)?;
4459        (timescale, duration as u64)
4460    };
4461
4462    // Skip rest of mvhd (rate, volume, matrix, etc.)
4463    skip_box_remain(src)?;
4464
4465    Ok(MovieHeader { _timescale: timescale, _duration: duration })
4466}
4467
4468/// Parse a Media Header box (mdhd)
4469/// See ISO/IEC 14496-12:2015 § 8.4.2
4470fn read_mdhd<T: Read>(src: &mut BMFFBox<'_, T>) -> Result<MediaHeader> {
4471    let version = src.read_u8()?;
4472    let _flags = [src.read_u8()?, src.read_u8()?, src.read_u8()?];
4473
4474    let (timescale, duration) = if version == 1 {
4475        let _creation_time = be_u64(src)?;
4476        let _modification_time = be_u64(src)?;
4477        let timescale = be_u32(src)?;
4478        let duration = be_u64(src)?;
4479        (timescale, duration)
4480    } else {
4481        let _creation_time = be_u32(src)?;
4482        let _modification_time = be_u32(src)?;
4483        let timescale = be_u32(src)?;
4484        let duration = be_u32(src)?;
4485        (timescale, duration as u64)
4486    };
4487
4488    // Skip language and pre_defined
4489    skip_box_remain(src)?;
4490
4491    Ok(MediaHeader { timescale, _duration: duration })
4492}
4493
4494/// Parse Time To Sample box (stts)
4495/// See ISO/IEC 14496-12:2015 § 8.6.1.2
4496fn read_stts<T: Read>(src: &mut BMFFBox<'_, T>) -> Result<TryVec<TimeToSampleEntry>> {
4497    let _version = src.read_u8()?;
4498    let _flags = [src.read_u8()?, src.read_u8()?, src.read_u8()?];
4499    let entry_count = be_u32(src)?;
4500
4501    let mut entries = TryVec::new();
4502    for _ in 0..entry_count {
4503        entries.push(TimeToSampleEntry {
4504            sample_count: be_u32(src)?,
4505            sample_delta: be_u32(src)?,
4506        })?;
4507    }
4508
4509    Ok(entries)
4510}
4511
4512/// Parse Sample To Chunk box (stsc)
4513/// See ISO/IEC 14496-12:2015 § 8.7.4
4514fn read_stsc<T: Read>(src: &mut BMFFBox<'_, T>) -> Result<TryVec<SampleToChunkEntry>> {
4515    let _version = src.read_u8()?;
4516    let _flags = [src.read_u8()?, src.read_u8()?, src.read_u8()?];
4517    let entry_count = be_u32(src)?;
4518
4519    let mut entries = TryVec::new();
4520    for _ in 0..entry_count {
4521        entries.push(SampleToChunkEntry {
4522            first_chunk: be_u32(src)?,
4523            samples_per_chunk: be_u32(src)?,
4524            _sample_description_index: be_u32(src)?,
4525        })?;
4526    }
4527
4528    Ok(entries)
4529}
4530
4531/// Parse Sample Size box (stsz)
4532/// See ISO/IEC 14496-12:2015 § 8.7.3
4533fn read_stsz<T: Read>(src: &mut BMFFBox<'_, T>) -> Result<TryVec<u32>> {
4534    let _version = src.read_u8()?;
4535    let _flags = [src.read_u8()?, src.read_u8()?, src.read_u8()?];
4536    let sample_size = be_u32(src)?;
4537    let sample_count = be_u32(src)?;
4538
4539    let mut sizes = TryVec::new();
4540    if sample_size == 0 {
4541        // Variable sizes - read each one
4542        for _ in 0..sample_count {
4543            sizes.push(be_u32(src)?)?;
4544        }
4545    } else {
4546        // Constant size for all samples
4547        for _ in 0..sample_count {
4548            sizes.push(sample_size)?;
4549        }
4550    }
4551
4552    Ok(sizes)
4553}
4554
4555/// Parse Chunk Offset box (stco or co64)
4556/// See ISO/IEC 14496-12:2015 § 8.7.5
4557fn read_chunk_offsets<T: Read>(src: &mut BMFFBox<'_, T>, is_64bit: bool) -> Result<TryVec<u64>> {
4558    let _version = src.read_u8()?;
4559    let _flags = [src.read_u8()?, src.read_u8()?, src.read_u8()?];
4560    let entry_count = be_u32(src)?;
4561
4562    let mut offsets = TryVec::new();
4563    for _ in 0..entry_count {
4564        let offset = if is_64bit {
4565            be_u64(src)?
4566        } else {
4567            be_u32(src)? as u64
4568        };
4569        offsets.push(offset)?;
4570    }
4571
4572    Ok(offsets)
4573}
4574
4575/// Parse Sample Description box (stsd) to extract codec config from VisualSampleEntry.
4576/// See ISO/IEC 14496-12:2015 § 8.5.2
4577///
4578/// For AVIF sequences, the VisualSampleEntry is `av01` which contains sub-boxes
4579/// like `av1C` (codec config) and `colr` (color info), similar to ipco properties.
4580fn read_stsd<T: Read>(src: &mut BMFFBox<'_, T>) -> Result<TrackCodecConfig> {
4581    let _version = src.read_u8()?;
4582    let _flags = [src.read_u8()?, src.read_u8()?, src.read_u8()?];
4583    let entry_count = be_u32(src)?;
4584
4585    let mut config = TrackCodecConfig::default();
4586
4587    // Parse first entry only (AVIF tracks have one sample description)
4588    let mut iter = src.box_iter();
4589    for _ in 0..entry_count {
4590        let Some(mut entry_box) = iter.next_box()? else {
4591            break;
4592        };
4593
4594        // Check if this is an av01 VisualSampleEntry
4595        if entry_box.head.name != BoxType::AV1SampleEntry {
4596            skip_box_remain(&mut entry_box)?;
4597            continue;
4598        }
4599
4600        // Skip VisualSampleEntry fixed fields (78 bytes total):
4601        //   reserved[6] + data_ref_index[2] + pre_defined[2] + reserved[2] +
4602        //   pre_defined[12] + width[2] + height[2] + horiz_res[4] + vert_res[4] +
4603        //   reserved[4] + frame_count[2] + compressorname[32] + depth[2] + pre_defined[2]
4604        const VISUAL_SAMPLE_ENTRY_SIZE: u64 = 78;
4605        if entry_box.bytes_left() < VISUAL_SAMPLE_ENTRY_SIZE {
4606            skip_box_remain(&mut entry_box)?;
4607            continue;
4608        }
4609        skip(&mut entry_box, VISUAL_SAMPLE_ENTRY_SIZE)?;
4610
4611        // Parse sub-boxes within the VisualSampleEntry for av1C and colr
4612        let mut sub_iter = entry_box.box_iter();
4613        while let Some(mut sub_box) = sub_iter.next_box()? {
4614            match sub_box.head.name {
4615                BoxType::AV1CodecConfigurationBox => {
4616                    config.av1_config = Some(read_av1c(&mut sub_box)?);
4617                }
4618                BoxType::ColorInformationBox => {
4619                    if let Ok(colr) = read_colr(&mut sub_box) {
4620                        config.color_info = Some(colr);
4621                    } else {
4622                        skip_box_remain(&mut sub_box)?;
4623                    }
4624                }
4625                _ => {
4626                    skip_box_remain(&mut sub_box)?;
4627                }
4628            }
4629        }
4630
4631        // Only need the first av01 entry
4632        if config.av1_config.is_some() {
4633            break;
4634        }
4635    }
4636
4637    Ok(config)
4638}
4639
4640/// Parse Sample Table box (stbl)
4641/// See ISO/IEC 14496-12:2015 § 8.5
4642fn read_stbl<T: Read>(src: &mut BMFFBox<'_, T>) -> Result<(SampleTable, TrackCodecConfig)> {
4643    let mut time_to_sample = TryVec::new();
4644    let mut sample_to_chunk = TryVec::new();
4645    let mut sample_sizes = TryVec::new();
4646    let mut chunk_offsets = TryVec::new();
4647    let mut codec_config = TrackCodecConfig::default();
4648
4649    let mut iter = src.box_iter();
4650    while let Some(mut b) = iter.next_box()? {
4651        match b.head.name {
4652            BoxType::SampleDescriptionBox => {
4653                codec_config = read_stsd(&mut b)?;
4654            }
4655            BoxType::TimeToSampleBox => {
4656                time_to_sample = read_stts(&mut b)?;
4657            }
4658            BoxType::SampleToChunkBox => {
4659                sample_to_chunk = read_stsc(&mut b)?;
4660            }
4661            BoxType::SampleSizeBox => {
4662                sample_sizes = read_stsz(&mut b)?;
4663            }
4664            BoxType::ChunkOffsetBox => {
4665                chunk_offsets = read_chunk_offsets(&mut b, false)?;
4666            }
4667            BoxType::ChunkLargeOffsetBox => {
4668                chunk_offsets = read_chunk_offsets(&mut b, true)?;
4669            }
4670            _ => {
4671                skip_box_remain(&mut b)?;
4672            }
4673        }
4674    }
4675
4676    // Precompute per-sample byte offsets from sample_to_chunk + chunk_offsets + sample_sizes.
4677    // This flattens the ISOBMFF indirection into a simple array for O(1) frame lookup.
4678    let mut sample_offsets = TryVec::new();
4679    let mut sample_idx = 0usize;
4680    for (i, entry) in sample_to_chunk.iter().enumerate() {
4681        let next_first_chunk = sample_to_chunk
4682            .get(i + 1)
4683            .map(|e| e.first_chunk)
4684            .unwrap_or(u32::MAX);
4685
4686        for chunk_no in entry.first_chunk..next_first_chunk {
4687            if chunk_no == 0 {
4688                break;
4689            }
4690            let co_idx = (chunk_no - 1) as usize;
4691            let chunk_offset = match chunk_offsets.get(co_idx) {
4692                Some(&o) => o,
4693                None => break,
4694            };
4695
4696            let mut offset = chunk_offset;
4697            for _ in 0..entry.samples_per_chunk {
4698                if sample_idx >= sample_sizes.len() {
4699                    break;
4700                }
4701                sample_offsets.push(offset)?;
4702                offset += *sample_sizes.get(sample_idx)
4703                    .ok_or(Error::InvalidData("sample index mismatch"))? as u64;
4704                sample_idx += 1;
4705            }
4706        }
4707    }
4708
4709    Ok((SampleTable {
4710        time_to_sample,
4711        sample_sizes,
4712        sample_offsets,
4713    }, codec_config))
4714}
4715
4716/// Parse Track Header box (tkhd)
4717/// See ISO/IEC 14496-12:2015 § 8.3.2
4718fn read_tkhd<T: Read>(src: &mut BMFFBox<'_, T>) -> Result<u32> {
4719    let version = src.read_u8()?;
4720    let _flags = [src.read_u8()?, src.read_u8()?, src.read_u8()?];
4721
4722    let track_id = if version == 1 {
4723        let _creation_time = be_u64(src)?;
4724        let _modification_time = be_u64(src)?;
4725        let track_id = be_u32(src)?;
4726        let _reserved = be_u32(src)?;
4727        let _duration = be_u64(src)?;
4728        track_id
4729    } else {
4730        let _creation_time = be_u32(src)?;
4731        let _modification_time = be_u32(src)?;
4732        let track_id = be_u32(src)?;
4733        let _reserved = be_u32(src)?;
4734        let _duration = be_u32(src)?;
4735        track_id
4736    };
4737
4738    // Skip rest (reserved, layer, alternate_group, volume, matrix, width, height)
4739    skip_box_remain(src)?;
4740    Ok(track_id)
4741}
4742
4743/// Parse Track Reference box (tref)
4744/// See ISO/IEC 14496-12:2015 § 8.3.3
4745///
4746/// Contains sub-boxes typed by FourCC (e.g., `auxl`, `cdsc`), each with a list of track IDs.
4747fn read_tref<T: Read>(src: &mut BMFFBox<'_, T>) -> Result<TryVec<TrackReference>> {
4748    let mut refs = TryVec::new();
4749    let mut iter = src.box_iter();
4750    while let Some(mut b) = iter.next_box()? {
4751        let reference_type = FourCC::from(u32::from(b.head.name));
4752        let bytes_left = b.bytes_left();
4753        if bytes_left < 4 || bytes_left % 4 != 0 {
4754            skip_box_remain(&mut b)?;
4755            continue;
4756        }
4757        let count = bytes_left / 4;
4758        let mut track_ids = TryVec::new();
4759        for _ in 0..count {
4760            track_ids.push(be_u32(&mut b)?)?;
4761        }
4762        refs.push(TrackReference { reference_type, track_ids })?;
4763    }
4764    Ok(refs)
4765}
4766
4767/// Parse Edit List box (elst) to extract loop count from flags.
4768/// See ISO/IEC 14496-12:2015 § 8.6.6
4769///
4770/// Returns the loop count: flags bit 0 set = infinite looping (0), otherwise 1.
4771fn read_elst<T: Read>(src: &mut BMFFBox<'_, T>) -> Result<u32> {
4772    let (version, flags) = read_fullbox_extra(src)?;
4773
4774    let entry_count = be_u32(src)?;
4775    // Skip all entries
4776    let entry_size: u64 = if version == 1 { 20 } else { 12 };
4777    skip(src, (entry_count as u64).checked_mul(entry_size)
4778        .ok_or(Error::InvalidData("edit list entry count overflow"))?)?;
4779    skip_box_remain(src)?;
4780
4781    // Bit 0 of flags: repeat (1 = infinite loop → loop_count=0, 0 = play once → loop_count=1)
4782    if flags & 1 != 0 {
4783        Ok(0) // infinite
4784    } else {
4785        Ok(1) // play once
4786    }
4787}
4788
4789/// Parse animation from moov box.
4790/// Returns all parsed tracks.
4791fn read_moov<T: Read>(src: &mut BMFFBox<'_, T>) -> Result<TryVec<ParsedTrack>> {
4792    let mut tracks = TryVec::new();
4793
4794    let mut iter = src.box_iter();
4795    while let Some(mut b) = iter.next_box()? {
4796        match b.head.name {
4797            BoxType::MovieHeaderBox => {
4798                let _mvhd = read_mvhd(&mut b)?;
4799            }
4800            BoxType::TrackBox => {
4801                if let Some(track) = read_trak(&mut b)? {
4802                    tracks.push(track)?;
4803                }
4804            }
4805            _ => {
4806                skip_box_remain(&mut b)?;
4807            }
4808        }
4809    }
4810
4811    Ok(tracks)
4812}
4813
4814/// Parse track box (trak).
4815/// Returns a ParsedTrack if this track has a valid sample table.
4816fn read_trak<T: Read>(src: &mut BMFFBox<'_, T>) -> Result<Option<ParsedTrack>> {
4817    let mut track_id = 0u32;
4818    let mut references = TryVec::new();
4819    let mut loop_count = 1u32; // default: play once
4820    let mut mdia_result: Option<(FourCC, u32, SampleTable, TrackCodecConfig)> = None;
4821
4822    let mut iter = src.box_iter();
4823    while let Some(mut b) = iter.next_box()? {
4824        match b.head.name {
4825            BoxType::TrackHeaderBox => {
4826                track_id = read_tkhd(&mut b)?;
4827            }
4828            BoxType::TrackReferenceBox => {
4829                references = read_tref(&mut b)?;
4830            }
4831            BoxType::EditBox => {
4832                // Parse edts to find elst
4833                let mut edts_iter = b.box_iter();
4834                while let Some(mut eb) = edts_iter.next_box()? {
4835                    if eb.head.name == BoxType::EditListBox {
4836                        loop_count = read_elst(&mut eb)?;
4837                    } else {
4838                        skip_box_remain(&mut eb)?;
4839                    }
4840                }
4841            }
4842            BoxType::MediaBox => {
4843                mdia_result = read_mdia(&mut b)?;
4844            }
4845            _ => {
4846                skip_box_remain(&mut b)?;
4847            }
4848        }
4849    }
4850
4851    if let Some((handler_type, media_timescale, sample_table, codec_config)) = mdia_result {
4852        Ok(Some(ParsedTrack {
4853            track_id,
4854            handler_type,
4855            media_timescale,
4856            sample_table,
4857            references,
4858            loop_count,
4859            codec_config,
4860        }))
4861    } else {
4862        Ok(None)
4863    }
4864}
4865
4866/// Parse media box (mdia).
4867/// Returns (handler_type, media_timescale, sample_table, codec_config) if valid.
4868fn read_mdia<T: Read>(src: &mut BMFFBox<'_, T>) -> Result<Option<(FourCC, u32, SampleTable, TrackCodecConfig)>> {
4869    let mut media_timescale = 1000; // default
4870    let mut handler_type = FourCC::default();
4871    let mut stbl_result: Option<(SampleTable, TrackCodecConfig)> = None;
4872
4873    let mut iter = src.box_iter();
4874    while let Some(mut b) = iter.next_box()? {
4875        match b.head.name {
4876            BoxType::MediaHeaderBox => {
4877                let mdhd = read_mdhd(&mut b)?;
4878                media_timescale = mdhd.timescale;
4879            }
4880            BoxType::HandlerBox => {
4881                let hdlr = read_hdlr(&mut b)?;
4882                handler_type = hdlr.handler_type;
4883            }
4884            BoxType::MediaInformationBox => {
4885                stbl_result = read_minf(&mut b)?;
4886            }
4887            _ => {
4888                skip_box_remain(&mut b)?;
4889            }
4890        }
4891    }
4892
4893    if let Some((stbl, codec_config)) = stbl_result {
4894        Ok(Some((handler_type, media_timescale, stbl, codec_config)))
4895    } else {
4896        Ok(None)
4897    }
4898}
4899
4900/// Associate parsed tracks into color + optional alpha animation data.
4901///
4902/// - Color track: first with handler `pict` (fallback: first track with a sample table)
4903/// - Alpha track: handler `auxv` with `tref/auxl` referencing color's track_id
4904/// - Audio tracks (handler `soun`) are skipped
4905fn associate_tracks(tracks: TryVec<ParsedTrack>) -> Result<ParsedAnimationData> {
4906    // Find color track: first with handler_type == "pict"
4907    let color_idx = tracks
4908        .iter()
4909        .position(|t| t.handler_type == b"pict")
4910        .or_else(|| {
4911            // Fallback: first track that isn't audio
4912            tracks.iter().position(|t| t.handler_type != b"soun")
4913        })
4914        .ok_or(Error::InvalidData("no color track found in moov"))?;
4915
4916    let color_track = tracks.get(color_idx)
4917        .ok_or(Error::InvalidData("color track index out of bounds"))?;
4918    let color_track_id = color_track.track_id;
4919
4920    // Find alpha track: handler_type == "auxv" with tref/auxl referencing color track
4921    let alpha_idx = tracks.iter().position(|t| {
4922        t.handler_type == b"auxv"
4923            && t.references.iter().any(|r| {
4924                r.reference_type == b"auxl"
4925                    && r.track_ids.iter().any(|&id| id == color_track_id)
4926            })
4927    });
4928
4929    if let Some(ai) = alpha_idx {
4930        let alpha_track = tracks.get(ai)
4931            .ok_or(Error::InvalidData("alpha track index out of bounds"))?;
4932        let color_track = tracks.get(color_idx)
4933            .ok_or(Error::InvalidData("color track index out of bounds"))?;
4934        let alpha_frames = alpha_track.sample_table.sample_sizes.len();
4935        let color_frames = color_track.sample_table.sample_sizes.len();
4936        if alpha_frames != color_frames {
4937            warn!(
4938                "alpha track has {} frames but color track has {} frames",
4939                alpha_frames, color_frames
4940            );
4941        }
4942    }
4943
4944    // Destructure — we need to consume the vec
4945    // Convert to a std vec so we can remove by index
4946    let mut tracks_vec: std::vec::Vec<ParsedTrack> = tracks.into_iter().collect();
4947
4948    // Remove alpha first if it has a higher index to avoid shifting
4949    let (color_track, alpha_track) = if let Some(ai) = alpha_idx {
4950        if ai > color_idx {
4951            let alpha = tracks_vec.remove(ai);
4952            let color = tracks_vec.remove(color_idx);
4953            (color, Some(alpha))
4954        } else {
4955            let color = tracks_vec.remove(color_idx);
4956            let alpha = tracks_vec.remove(ai);
4957            (color, Some(alpha))
4958        }
4959    } else {
4960        let color = tracks_vec.remove(color_idx);
4961        (color, None)
4962    };
4963
4964    let (alpha_timescale, alpha_sample_table) = match alpha_track {
4965        Some(t) => (Some(t.media_timescale), Some(t.sample_table)),
4966        None => (None, None),
4967    };
4968
4969    Ok(ParsedAnimationData {
4970        color_timescale: color_track.media_timescale,
4971        color_codec_config: color_track.codec_config,
4972        color_sample_table: color_track.sample_table,
4973        alpha_timescale,
4974        alpha_sample_table,
4975        loop_count: color_track.loop_count,
4976    })
4977}
4978
4979/// Parse media information box (minf)
4980fn read_minf<T: Read>(src: &mut BMFFBox<'_, T>) -> Result<Option<(SampleTable, TrackCodecConfig)>> {
4981    let mut iter = src.box_iter();
4982    while let Some(mut b) = iter.next_box()? {
4983        if b.head.name == BoxType::SampleTableBox {
4984            return Ok(Some(read_stbl(&mut b)?));
4985        } else {
4986            skip_box_remain(&mut b)?;
4987        }
4988    }
4989    Ok(None)
4990}
4991
4992/// Extract animation frames using sample table
4993#[cfg(feature = "eager")]
4994#[allow(deprecated)]
4995fn extract_animation_frames(
4996    sample_table: &SampleTable,
4997    media_timescale: u32,
4998    mdats: &mut [MediaDataBox],
4999) -> Result<TryVec<AnimationFrame>> {
5000    let mut frames = TryVec::new();
5001
5002    // Calculate frame durations from time-to-sample
5003    let mut frame_durations = TryVec::new();
5004    for entry in &sample_table.time_to_sample {
5005        for _ in 0..entry.sample_count {
5006            let duration_ms = if media_timescale > 0 {
5007                ((entry.sample_delta as u64) * 1000) / (media_timescale as u64)
5008            } else {
5009                0
5010            };
5011            frame_durations.push(u32::try_from(duration_ms).unwrap_or(u32::MAX))?;
5012        }
5013    }
5014
5015    // Extract each frame using precomputed sample offsets
5016    for i in 0..sample_table.sample_sizes.len() {
5017        let sample_offset = *sample_table.sample_offsets.get(i)
5018            .ok_or(Error::InvalidData("sample offset index out of bounds"))?;
5019        let sample_size = *sample_table.sample_sizes.get(i)
5020            .ok_or(Error::InvalidData("sample size index out of bounds"))?;
5021        let duration_ms = frame_durations.get(i).copied().unwrap_or(0);
5022
5023        let mut frame_data = TryVec::new();
5024        let mut found = false;
5025
5026        for mdat in mdats.iter_mut() {
5027            let range = ExtentRange::WithLength(Range {
5028                start: sample_offset,
5029                end: sample_offset + sample_size as u64,
5030            });
5031
5032            if mdat.contains_extent(&range) {
5033                mdat.read_extent(&range, &mut frame_data)?;
5034                found = true;
5035                break;
5036            }
5037        }
5038
5039        if !found {
5040            log::warn!("Animation frame {} not found in mdat", i);
5041        }
5042
5043        frames.push(AnimationFrame {
5044            data: frame_data,
5045            duration_ms,
5046        })?;
5047    }
5048
5049    Ok(frames)
5050}
5051
5052/// Parse an ImageGrid property box
5053/// See ISO/IEC 23008-12:2017 § 6.6.2.3
5054fn read_grid<T: Read>(src: &mut BMFFBox<'_, T>, options: &ParseOptions) -> Result<GridConfig> {
5055    let version = read_fullbox_version_no_flags(src, options)?;
5056    if version > 0 {
5057        return Err(Error::Unsupported("grid version > 0"));
5058    }
5059
5060    let flags_byte = src.read_u8()?;
5061    let rows = src.read_u8()?;
5062    let columns = src.read_u8()?;
5063
5064    // flags & 1 determines field size: 0 = 16-bit, 1 = 32-bit
5065    let (output_width, output_height) = if flags_byte & 1 == 0 {
5066        // 16-bit fields
5067        (u32::from(be_u16(src)?), u32::from(be_u16(src)?))
5068    } else {
5069        // 32-bit fields
5070        (be_u32(src)?, be_u32(src)?)
5071    };
5072
5073    Ok(GridConfig {
5074        rows,
5075        columns,
5076        output_width,
5077        output_height,
5078    })
5079}
5080
5081/// Parse an item location box inside a meta box
5082/// See ISO 14496-12:2015 § 8.11.3
5083fn read_iloc<T: Read>(src: &mut BMFFBox<'_, T>, options: &ParseOptions) -> Result<TryVec<ItemLocationBoxItem>> {
5084    let version: IlocVersion = read_fullbox_version_no_flags(src, options)?.try_into()?;
5085
5086    let iloc = src.read_into_try_vec()?;
5087    let mut iloc = BitReader::new(&iloc);
5088
5089    let offset_size: IlocFieldSize = iloc.read_u8(4)?.try_into()?;
5090    let length_size: IlocFieldSize = iloc.read_u8(4)?.try_into()?;
5091    let base_offset_size: IlocFieldSize = iloc.read_u8(4)?.try_into()?;
5092
5093    let index_size: Option<IlocFieldSize> = match version {
5094        IlocVersion::One | IlocVersion::Two => Some(iloc.read_u8(4)?.try_into()?),
5095        IlocVersion::Zero => {
5096            let _reserved = iloc.read_u8(4)?;
5097            None
5098        },
5099    };
5100
5101    let item_count = match version {
5102        IlocVersion::Zero | IlocVersion::One => iloc.read_u32(16)?,
5103        IlocVersion::Two => iloc.read_u32(32)?,
5104    };
5105
5106    // Cap pre-allocation: item_count is untrusted, actual data is bounded by bitstream
5107    let mut items = TryVec::with_capacity(item_count.to_usize().min(4096))?;
5108
5109    for _ in 0..item_count {
5110        let item_id = match version {
5111            IlocVersion::Zero | IlocVersion::One => iloc.read_u32(16)?,
5112            IlocVersion::Two => iloc.read_u32(32)?,
5113        };
5114
5115        // The spec isn't entirely clear how an `iloc` should be interpreted for version 0,
5116        // which has no `construction_method` field. It does say:
5117        // "For maximum compatibility, version 0 of this box should be used in preference to
5118        //  version 1 with `construction_method==0`, or version 2 when possible."
5119        // We take this to imply version 0 can be interpreted as using file offsets.
5120        let construction_method = match version {
5121            IlocVersion::Zero => ConstructionMethod::File,
5122            IlocVersion::One | IlocVersion::Two => {
5123                let _reserved = iloc.read_u16(12)?;
5124                match iloc.read_u16(4)? {
5125                    0 => ConstructionMethod::File,
5126                    1 => ConstructionMethod::Idat,
5127                    2 => return Err(Error::Unsupported("construction_method 'item_offset' is not supported")),
5128                    _ => return Err(Error::InvalidData("construction_method is taken from the set 0, 1 or 2 per ISO 14496-12:2015 § 8.11.3.3")),
5129                }
5130            },
5131        };
5132
5133        let data_reference_index = iloc.read_u16(16)?;
5134
5135        if data_reference_index != 0 {
5136            return Err(Error::Unsupported("external file references (iloc.data_reference_index != 0) are not supported"));
5137        }
5138
5139        let base_offset = iloc.read_u64(base_offset_size.to_bits())?;
5140        let extent_count = iloc.read_u16(16)?;
5141
5142        if extent_count < 1 {
5143            return Err(Error::InvalidData("extent_count must have a value 1 or greater per ISO 14496-12:2015 § 8.11.3.3"));
5144        }
5145
5146        let mut extents = TryVec::with_capacity(extent_count.to_usize())?;
5147
5148        for _ in 0..extent_count {
5149            // Parsed but currently ignored, see `ItemLocationBoxExtent`
5150            let _extent_index = match &index_size {
5151                None | Some(IlocFieldSize::Zero) => None,
5152                Some(index_size) => {
5153                    debug_assert!(version == IlocVersion::One || version == IlocVersion::Two);
5154                    Some(iloc.read_u64(index_size.to_bits())?)
5155                },
5156            };
5157
5158            // Per ISO 14496-12:2015 § 8.11.3.1:
5159            // "If the offset is not identified (the field has a length of zero), then the
5160            //  beginning of the source (offset 0) is implied"
5161            // This behavior will follow from BitReader::read_u64(0) -> 0.
5162            let extent_offset = iloc.read_u64(offset_size.to_bits())?;
5163            let extent_length = iloc.read_u64(length_size.to_bits())?;
5164
5165            // "If the length is not specified, or specified as zero, then the entire length of
5166            //  the source is implied" (ibid)
5167            let start = base_offset
5168                .checked_add(extent_offset)
5169                .ok_or(Error::InvalidData("offset calculation overflow"))?;
5170            let extent_range = if extent_length == 0 {
5171                ExtentRange::ToEnd(RangeFrom { start })
5172            } else {
5173                let end = start
5174                    .checked_add(extent_length)
5175                    .ok_or(Error::InvalidData("end calculation overflow"))?;
5176                ExtentRange::WithLength(Range { start, end })
5177            };
5178
5179            extents.push(ItemLocationBoxExtent { extent_range })?;
5180        }
5181
5182        items.push(ItemLocationBoxItem { item_id, construction_method, extents })?;
5183    }
5184
5185    if iloc.remaining() == 0 {
5186        Ok(items)
5187    } else {
5188        Err(Error::InvalidData("invalid iloc size"))
5189    }
5190}
5191
5192/// Parse an ftyp box.
5193/// See ISO 14496-12:2015 § 4.3
5194fn read_ftyp<T: Read>(src: &mut BMFFBox<'_, T>) -> Result<FileTypeBox> {
5195    let major = be_u32(src)?;
5196    let minor = be_u32(src)?;
5197    let bytes_left = src.bytes_left();
5198    if !bytes_left.is_multiple_of(4) {
5199        return Err(Error::InvalidData("invalid ftyp size"));
5200    }
5201    // Is a brand_count of zero valid?
5202    let brand_count = bytes_left / 4;
5203    let mut brands = TryVec::with_capacity(brand_count.try_into()?)?;
5204    for _ in 0..brand_count {
5205        brands.push(be_u32(src)?.into())?;
5206    }
5207    Ok(FileTypeBox {
5208        major_brand: From::from(major),
5209        minor_version: minor,
5210        compatible_brands: brands,
5211    })
5212}
5213
5214#[cfg_attr(debug_assertions, track_caller)]
5215fn check_parser_state<T>(header: &BoxHeader, left: &Take<T>) -> Result<(), Error> {
5216    let limit = left.limit();
5217    // Allow fully consumed boxes, or size=0 boxes (where original size was u64::MAX)
5218    if limit == 0 || header.size == u64::MAX {
5219        Ok(())
5220    } else {
5221        Err(Error::InvalidData("unread box content or bad parser sync"))
5222    }
5223}
5224
5225/// Skip a number of bytes that we don't care to parse.
5226fn skip<T: Read>(src: &mut T, bytes: u64) -> Result<()> {
5227    std::io::copy(&mut src.take(bytes), &mut std::io::sink())?;
5228    Ok(())
5229}
5230
5231fn be_u16<T: ReadBytesExt>(src: &mut T) -> Result<u16> {
5232    src.read_u16::<byteorder::BigEndian>().map_err(From::from)
5233}
5234
5235fn be_u32<T: ReadBytesExt>(src: &mut T) -> Result<u32> {
5236    src.read_u32::<byteorder::BigEndian>().map_err(From::from)
5237}
5238
5239fn be_i32<T: ReadBytesExt>(src: &mut T) -> Result<i32> {
5240    src.read_i32::<byteorder::BigEndian>().map_err(From::from)
5241}
5242
5243fn be_u64<T: ReadBytesExt>(src: &mut T) -> Result<u64> {
5244    src.read_u64::<byteorder::BigEndian>().map_err(From::from)
5245}