Skip to main content

zenavif_parse/
lib.rs

1#![deny(unsafe_code)]
2#![allow(clippy::missing_safety_doc)]
3//! AVIF container parser (ISOBMFF/MIAF demuxer).
4//!
5//! Extracts AV1 payloads, alpha channels, grid tiles, animation frames,
6//! and container metadata from AVIF files. Written in safe Rust with
7//! fallible allocations throughout.
8//!
9//! The primary API is [`AvifParser`], which performs zero-copy parsing by
10//! recording byte offsets and resolving data on demand.
11//!
12//! A legacy eager API ([`read_avif`]) is available behind the `eager` feature flag.
13
14// This Source Code Form is subject to the terms of the Mozilla Public
15// License, v. 2.0. If a copy of the MPL was not distributed with this
16// file, You can obtain one at https://mozilla.org/MPL/2.0/.
17
18use arrayvec::ArrayVec;
19use log::{debug, warn};
20
21use bitreader::BitReader;
22use byteorder::ReadBytesExt;
23use fallible_collections::{TryClone, TryReserveError};
24use std::borrow::Cow;
25use std::convert::{TryFrom, TryInto as _};
26
27use std::io::{Read, Take};
28use std::num::NonZeroU32;
29use std::ops::{Range, RangeFrom};
30
31mod obu;
32
33mod boxes;
34use crate::boxes::{BoxType, FourCC};
35
36/// This crate can be used from C.
37#[cfg(feature = "c_api")]
38pub mod c_api;
39
40pub use enough::{Stop, StopReason, Unstoppable};
41
42// Arbitrary buffer size limit used for raw read_bufs on a box.
43// const BUF_SIZE_LIMIT: u64 = 10 * 1024 * 1024;
44
45/// A trait to indicate a type can be infallibly converted to `u64`.
46/// This should only be implemented for infallible conversions, so only unsigned types are valid.
47trait ToU64 {
48    fn to_u64(self) -> u64;
49}
50
51/// Infallible: usize always fits in u64.
52impl ToU64 for usize {
53    fn to_u64(self) -> u64 {
54        const _: () = assert!(std::mem::size_of::<usize>() <= std::mem::size_of::<u64>());
55        self as u64
56    }
57}
58
59/// A trait to indicate a type can be infallibly converted to `usize`.
60/// This should only be implemented for infallible conversions, so only unsigned types are valid.
61pub(crate) trait ToUsize {
62    fn to_usize(self) -> usize;
63}
64
65/// Infallible widening cast: `$from_type` always fits in `usize`.
66macro_rules! impl_to_usize_from {
67    ( $from_type:ty ) => {
68        impl ToUsize for $from_type {
69            fn to_usize(self) -> usize {
70                const _: () = assert!(std::mem::size_of::<$from_type>() <= std::mem::size_of::<usize>());
71                self as usize
72            }
73        }
74    };
75}
76
77impl_to_usize_from!(u8);
78impl_to_usize_from!(u16);
79impl_to_usize_from!(u32);
80
81/// Indicate the current offset (i.e., bytes already read) in a reader
82trait Offset {
83    fn offset(&self) -> u64;
84}
85
86/// Wraps a reader to track the current offset
87struct OffsetReader<'a, T> {
88    reader: &'a mut T,
89    offset: u64,
90}
91
92impl<'a, T> OffsetReader<'a, T> {
93    fn new(reader: &'a mut T) -> Self {
94        Self { reader, offset: 0 }
95    }
96}
97
98impl<T> Offset for OffsetReader<'_, T> {
99    fn offset(&self) -> u64 {
100        self.offset
101    }
102}
103
104impl<T: Read> Read for OffsetReader<'_, T> {
105    fn read(&mut self, buf: &mut [u8]) -> std::io::Result<usize> {
106        let bytes_read = self.reader.read(buf)?;
107        self.offset = self
108            .offset
109            .checked_add(bytes_read.to_u64())
110            .ok_or(Error::Unsupported("total bytes read too large for offset type"))?;
111        Ok(bytes_read)
112    }
113}
114
115pub(crate) type TryVec<T> = fallible_collections::TryVec<T>;
116pub(crate) type TryString = fallible_collections::TryVec<u8>;
117
118// To ensure we don't use stdlib allocating types by accident
119#[allow(dead_code)]
120struct Vec;
121#[allow(dead_code)]
122struct Box;
123#[allow(dead_code)]
124struct HashMap;
125#[allow(dead_code)]
126struct String;
127
128/// Describes parser failures.
129///
130/// This enum wraps the standard `io::Error` type, unified with
131/// our own parser error states and those of crates we use.
132#[derive(Debug)]
133pub enum Error {
134    /// Parse error caused by corrupt or malformed data.
135    InvalidData(&'static str),
136    /// Parse error caused by limited parser support rather than invalid data.
137    Unsupported(&'static str),
138    /// Reflect `std::io::ErrorKind::UnexpectedEof` for short data.
139    UnexpectedEOF,
140    /// Propagate underlying errors from `std::io`.
141    Io(std::io::Error),
142    /// `read_mp4` terminated without detecting a moov box.
143    NoMoov,
144    /// Out of memory
145    OutOfMemory,
146    /// Resource limit exceeded during parsing
147    ResourceLimitExceeded(&'static str),
148    /// Operation was stopped/cancelled
149    Stopped(enough::StopReason),
150}
151
152impl std::fmt::Display for Error {
153    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
154        let msg = match self {
155            Self::InvalidData(s) | Self::Unsupported(s) | Self::ResourceLimitExceeded(s) => s,
156            Self::UnexpectedEOF => "EOF",
157            Self::Io(err) => return err.fmt(f),
158            Self::NoMoov => "Missing Moov box",
159            Self::OutOfMemory => "OOM",
160            Self::Stopped(reason) => return write!(f, "Stopped: {}", reason),
161        };
162        f.write_str(msg)
163    }
164}
165
166impl std::error::Error for Error {}
167
168impl From<bitreader::BitReaderError> for Error {
169    #[cold]
170    #[cfg_attr(debug_assertions, track_caller)]
171    fn from(err: bitreader::BitReaderError) -> Self {
172        log::warn!("bitreader: {err}");
173        Self::InvalidData("truncated bits")
174    }
175}
176
177impl From<std::io::Error> for Error {
178    fn from(err: std::io::Error) -> Self {
179        match err.kind() {
180            std::io::ErrorKind::UnexpectedEof => Self::UnexpectedEOF,
181            _ => Self::Io(err),
182        }
183    }
184}
185
186impl From<std::string::FromUtf8Error> for Error {
187    fn from(_: std::string::FromUtf8Error) -> Self {
188        Self::InvalidData("invalid utf8")
189    }
190}
191
192impl From<std::num::TryFromIntError> for Error {
193    fn from(_: std::num::TryFromIntError) -> Self {
194        Self::Unsupported("integer conversion failed")
195    }
196}
197
198impl From<Error> for std::io::Error {
199    fn from(err: Error) -> Self {
200        let kind = match err {
201            Error::InvalidData(_) => std::io::ErrorKind::InvalidData,
202            Error::UnexpectedEOF => std::io::ErrorKind::UnexpectedEof,
203            Error::Io(io_err) => return io_err,
204            _ => std::io::ErrorKind::Other,
205        };
206        Self::new(kind, err)
207    }
208}
209
210impl From<TryReserveError> for Error {
211    fn from(_: TryReserveError) -> Self {
212        Self::OutOfMemory
213    }
214}
215
216impl From<enough::StopReason> for Error {
217    fn from(reason: enough::StopReason) -> Self {
218        Self::Stopped(reason)
219    }
220}
221
222/// Result shorthand using our Error enum.
223pub type Result<T, E = Error> = std::result::Result<T, E>;
224
225/// Basic ISO box structure.
226///
227/// mp4 files are a sequence of possibly-nested 'box' structures.  Each box
228/// begins with a header describing the length of the box's data and a
229/// four-byte box type which identifies the type of the box. Together these
230/// are enough to interpret the contents of that section of the file.
231///
232/// See ISO 14496-12:2015 § 4.2
233#[derive(Debug, Clone, Copy)]
234struct BoxHeader {
235    /// Box type.
236    name: BoxType,
237    /// Size of the box in bytes.
238    size: u64,
239    /// Offset to the start of the contained data (or header size).
240    offset: u64,
241    /// Uuid for extended type.
242    #[allow(unused)]
243    uuid: Option<[u8; 16]>,
244}
245
246impl BoxHeader {
247    /// 4-byte size + 4-byte type
248    const MIN_SIZE: u64 = 8;
249    /// 4-byte size + 4-byte type + 16-byte size
250    const MIN_LARGE_SIZE: u64 = 16;
251}
252
253/// File type box 'ftyp'.
254#[derive(Debug)]
255#[allow(unused)]
256struct FileTypeBox {
257    major_brand: FourCC,
258    minor_version: u32,
259    compatible_brands: TryVec<FourCC>,
260}
261
262// Handler reference box 'hdlr'
263#[derive(Debug)]
264#[allow(unused)]
265struct HandlerBox {
266    handler_type: FourCC,
267}
268
269/// AV1 codec configuration from the `av1C` property box.
270///
271/// Contains the AV1 codec parameters as signaled in the container.
272/// See AV1-ISOBMFF § 2.3.
273#[derive(Debug, Clone, PartialEq, Eq)]
274pub struct AV1Config {
275    /// AV1 seq_profile (0=Main, 1=High, 2=Professional)
276    pub profile: u8,
277    /// AV1 seq_level_idx for operating point 0
278    pub level: u8,
279    /// AV1 seq_tier for operating point 0
280    pub tier: u8,
281    /// Bit depth (8, 10, or 12)
282    pub bit_depth: u8,
283    /// True if monochrome (no chroma planes)
284    pub monochrome: bool,
285    /// Chroma subsampling X (1 = horizontally subsampled)
286    pub chroma_subsampling_x: u8,
287    /// Chroma subsampling Y (1 = vertically subsampled)
288    pub chroma_subsampling_y: u8,
289    /// Chroma sample position (0=unknown, 1=vertical, 2=colocated)
290    pub chroma_sample_position: u8,
291}
292
293/// Colour information from the `colr` property box.
294///
295/// Can be either CICP-based (`nclx`) or an ICC profile (`rICC`/`prof`).
296/// See ISOBMFF § 12.1.5.
297#[derive(Debug, Clone, PartialEq, Eq)]
298pub enum ColorInformation {
299    /// CICP-based color information (colour_type = 'nclx')
300    Nclx {
301        /// Colour primaries (ITU-T H.273 Table 2)
302        color_primaries: u16,
303        /// Transfer characteristics (ITU-T H.273 Table 3)
304        transfer_characteristics: u16,
305        /// Matrix coefficients (ITU-T H.273 Table 4)
306        matrix_coefficients: u16,
307        /// True if full range (0-255 for 8-bit), false if limited/studio range
308        full_range: bool,
309    },
310    /// ICC profile (colour_type = 'rICC' or 'prof')
311    IccProfile(std::vec::Vec<u8>),
312}
313
314/// Image rotation from the `irot` property box.
315///
316/// Specifies a counter-clockwise rotation to apply after decoding.
317/// See ISOBMFF § 12.1.4.
318#[derive(Debug, Clone, Copy, PartialEq, Eq)]
319pub struct ImageRotation {
320    /// Rotation angle in degrees counter-clockwise: 0, 90, 180, or 270.
321    pub angle: u16,
322}
323
324/// Image mirror from the `imir` property box.
325///
326/// Specifies a mirror (flip) axis to apply after rotation.
327/// See ISOBMFF § 12.1.4.
328#[derive(Debug, Clone, Copy, PartialEq, Eq)]
329pub struct ImageMirror {
330    /// Mirror axis: 0 = top-to-bottom (vertical axis, left-right flip),
331    /// 1 = left-to-right (horizontal axis, top-bottom flip).
332    pub axis: u8,
333}
334
335/// Clean aperture from the `clap` property box.
336///
337/// Defines a crop rectangle as a centered region. All values are
338/// stored as exact rationals (numerator/denominator).
339/// See ISOBMFF § 12.1.4.
340#[derive(Debug, Clone, Copy, PartialEq, Eq)]
341pub struct CleanAperture {
342    /// Width of the clean aperture (numerator)
343    pub width_n: u32,
344    /// Width of the clean aperture (denominator)
345    pub width_d: u32,
346    /// Height of the clean aperture (numerator)
347    pub height_n: u32,
348    /// Height of the clean aperture (denominator)
349    pub height_d: u32,
350    /// Horizontal offset of the clean aperture center (numerator, signed)
351    pub horiz_off_n: i32,
352    /// Horizontal offset of the clean aperture center (denominator)
353    pub horiz_off_d: u32,
354    /// Vertical offset of the clean aperture center (numerator, signed)
355    pub vert_off_n: i32,
356    /// Vertical offset of the clean aperture center (denominator)
357    pub vert_off_d: u32,
358}
359
360/// Pixel aspect ratio from the `pasp` property box.
361///
362/// For AVIF, the spec requires this to be 1:1 if present.
363/// See ISOBMFF § 12.1.4.
364#[derive(Debug, Clone, Copy, PartialEq, Eq)]
365pub struct PixelAspectRatio {
366    /// Horizontal spacing
367    pub h_spacing: u32,
368    /// Vertical spacing
369    pub v_spacing: u32,
370}
371
372/// Content light level info from the `clli` property box.
373///
374/// HDR metadata for display mapping.
375/// See ISOBMFF § 12.1.5 / ITU-T H.274.
376#[derive(Debug, Clone, Copy, PartialEq, Eq)]
377pub struct ContentLightLevel {
378    /// Maximum content light level (cd/m²)
379    pub max_content_light_level: u16,
380    /// Maximum picture average light level (cd/m²)
381    pub max_pic_average_light_level: u16,
382}
383
384/// Mastering display colour volume from the `mdcv` property box.
385///
386/// HDR metadata describing the mastering display's color volume.
387/// See ISOBMFF § 12.1.5 / SMPTE ST 2086.
388#[derive(Debug, Clone, Copy, PartialEq, Eq)]
389pub struct MasteringDisplayColourVolume {
390    /// Display primaries: [(x, y); 3] in 0.00002 units (CIE 1931)
391    /// Order: green, blue, red (per SMPTE ST 2086)
392    pub primaries: [(u16, u16); 3],
393    /// White point (x, y) in 0.00002 units
394    pub white_point: (u16, u16),
395    /// Maximum display luminance in 0.0001 cd/m² units
396    pub max_luminance: u32,
397    /// Minimum display luminance in 0.0001 cd/m² units
398    pub min_luminance: u32,
399}
400
401/// Content colour volume from the `cclv` property box.
402///
403/// Describes the colour volume of the content. Derived from H.265 D.2.40 /
404/// ITU-T H.274. All fields are optional, controlled by presence flags.
405/// See ISOBMFF § 12.1.5.
406#[derive(Debug, Clone, Copy, PartialEq, Eq)]
407pub struct ContentColourVolume {
408    /// Content colour primaries (x, y) for 3 primaries, as signed i32.
409    /// Present only if `ccv_primaries_present_flag` was set.
410    pub primaries: Option<[(i32, i32); 3]>,
411    /// Minimum luminance value. Present only if flag was set.
412    pub min_luminance: Option<u32>,
413    /// Maximum luminance value. Present only if flag was set.
414    pub max_luminance: Option<u32>,
415    /// Average luminance value. Present only if flag was set.
416    pub avg_luminance: Option<u32>,
417}
418
419/// Ambient viewing environment from the `amve` property box.
420///
421/// Describes the ambient viewing conditions under which the content
422/// was authored. See ISOBMFF § 12.1.5 / H.265 D.2.39.
423#[derive(Debug, Clone, Copy, PartialEq, Eq)]
424pub struct AmbientViewingEnvironment {
425    /// Ambient illuminance in units of 1/10000 cd/m²
426    pub ambient_illuminance: u32,
427    /// Ambient light x chromaticity (CIE 1931), units of 1/50000
428    pub ambient_light_x: u16,
429    /// Ambient light y chromaticity (CIE 1931), units of 1/50000
430    pub ambient_light_y: u16,
431}
432
433/// Per-channel gain map parameters from ISO 21496-1.
434///
435/// Each field is a rational number (numerator/denominator pair) describing
436/// how to apply the gain map for this channel.
437#[derive(Debug, Clone, Copy, PartialEq, Eq)]
438pub struct GainMapChannel {
439    /// Minimum gain map value (numerator).
440    pub gain_map_min_n: i32,
441    /// Minimum gain map value (denominator).
442    pub gain_map_min_d: u32,
443    /// Maximum gain map value (numerator).
444    pub gain_map_max_n: i32,
445    /// Maximum gain map value (denominator).
446    pub gain_map_max_d: u32,
447    /// Gamma curve parameter (numerator).
448    pub gamma_n: u32,
449    /// Gamma curve parameter (denominator).
450    pub gamma_d: u32,
451    /// Base image offset (numerator).
452    pub base_offset_n: i32,
453    /// Base image offset (denominator).
454    pub base_offset_d: u32,
455    /// Alternate image offset (numerator).
456    pub alternate_offset_n: i32,
457    /// Alternate image offset (denominator).
458    pub alternate_offset_d: u32,
459}
460
461/// Gain map metadata from a ToneMapImage (`tmap`) derived image item.
462///
463/// Describes how to apply a gain map to convert between SDR and HDR
464/// renditions. The gain map is a separate AV1-encoded image that, combined
465/// with this metadata, allows reconstructing an HDR image from the SDR base.
466///
467/// See ISO 21496-1:2025 for the full specification.
468#[derive(Debug, Clone, PartialEq, Eq)]
469pub struct GainMapMetadata {
470    /// If true, each RGB channel has independent gain map parameters.
471    /// If false, `channels[0]` applies to all three channels.
472    pub is_multichannel: bool,
473    /// If true, the gain map is encoded in the base image's colour space.
474    /// If false, it's in the alternate image's colour space.
475    pub use_base_colour_space: bool,
476    /// Base HDR headroom (numerator).
477    pub base_hdr_headroom_n: u32,
478    /// Base HDR headroom (denominator).
479    pub base_hdr_headroom_d: u32,
480    /// Alternate HDR headroom (numerator).
481    pub alternate_hdr_headroom_n: u32,
482    /// Alternate HDR headroom (denominator).
483    pub alternate_hdr_headroom_d: u32,
484    /// Per-channel parameters. For single-channel mode, only index 0 is
485    /// meaningful (indices 1 and 2 are copies of index 0).
486    pub channels: [GainMapChannel; 3],
487}
488
489/// Gain map information extracted from an AVIF container.
490///
491/// Bundles the ISO 21496-1 metadata, the raw AV1-encoded gain map image data,
492/// and the alternate rendition's color information into a single type.
493///
494/// The `gain_map_data` field contains an AV1 bitstream that can be decoded
495/// with any AV1 decoder (e.g., rav1d) to obtain the gain map pixel values.
496///
497/// # Example
498///
499/// ```no_run
500/// let bytes = std::fs::read("hdr.avif").unwrap();
501/// let parser = zenavif_parse::AvifParser::from_bytes(&bytes).unwrap();
502/// if let Some(Ok(gm)) = parser.gain_map() {
503///     println!("Gain map: {} bytes", gm.gain_map_data.len());
504///     println!("Multichannel: {}", gm.metadata.is_multichannel);
505/// }
506/// ```
507#[derive(Debug, Clone)]
508pub struct AvifGainMap {
509    /// ISO 21496-1 gain map metadata (parsed from the `tmap` item payload).
510    pub metadata: GainMapMetadata,
511    /// Raw AV1 bitstream of the gain map image. Decode with an AV1 decoder
512    /// to obtain the gain map pixel values.
513    pub gain_map_data: std::vec::Vec<u8>,
514    /// Color information for the alternate (typically HDR) rendition,
515    /// from the `tmap` item's `colr` property.
516    pub alt_color_info: Option<ColorInformation>,
517}
518
519/// Depth auxiliary image extracted from an AVIF container.
520///
521/// AVIF supports auxiliary images via `auxl` item references with `auxC` type
522/// properties, following the HEIF (ISO 23008-12) auxiliary image mechanism.
523/// Depth maps use the auxiliary type URN
524/// `urn:mpeg:mpegB:cicp:systems:auxiliary:depth` (MPEG-B Part 23) or the
525/// legacy HEVC-style `urn:mpeg:hevc:2015:auxid:2`.
526///
527/// The `data` field contains a raw AV1 bitstream that can be decoded with
528/// any AV1 decoder to obtain the depth image pixel values (typically
529/// monochrome 8-bit or 10-bit).
530///
531/// # Example
532///
533/// ```no_run
534/// let bytes = std::fs::read("portrait.avif").unwrap();
535/// let parser = zenavif_parse::AvifParser::from_bytes(&bytes).unwrap();
536/// if let Some(Ok(dm)) = parser.depth_map() {
537///     println!("Depth map: {}x{}, {} bytes AV1 data", dm.width, dm.height, dm.data.len());
538/// }
539/// ```
540#[derive(Debug, Clone)]
541pub struct AvifDepthMap {
542    /// Raw AV1 bitstream of the depth auxiliary image. Decode with an AV1
543    /// decoder to obtain grayscale depth pixel values.
544    pub data: std::vec::Vec<u8>,
545    /// Width of the depth image in pixels (from `ispe` property).
546    pub width: u32,
547    /// Height of the depth image in pixels (from `ispe` property).
548    pub height: u32,
549    /// AV1 codec configuration for the depth item (from `av1C` property).
550    pub av1_config: Option<AV1Config>,
551    /// Color information for the depth item (from `colr` property), if present.
552    pub color_info: Option<ColorInformation>,
553}
554
555/// Operating point selector from the `a1op` property box.
556///
557/// Selects which AV1 operating point to decode for multi-operating-point images.
558/// See AVIF § 4.3.4.
559#[derive(Debug, Clone, Copy, PartialEq, Eq)]
560pub struct OperatingPointSelector {
561    /// Operating point index (0..31)
562    pub op_index: u8,
563}
564
565/// Layer selector from the `lsel` property box.
566///
567/// Selects which spatial layer to render for layered/progressive images.
568/// See HEIF (ISO 23008-12).
569#[derive(Debug, Clone, Copy, PartialEq, Eq)]
570pub struct LayerSelector {
571    /// Layer ID to render (0-3), or 0xFFFF for all layers (progressive)
572    pub layer_id: u16,
573}
574
575/// AV1 layered image indexing from the `a1lx` property box.
576///
577/// Provides byte sizes for the first 3 layers so decoders can seek
578/// to a specific layer without parsing the full bitstream.
579/// See AVIF § 4.3.6.
580#[derive(Debug, Clone, Copy, PartialEq, Eq)]
581pub struct AV1LayeredImageIndexing {
582    /// Byte sizes of layers 0, 1, 2. The last layer's size is implicit
583    /// (total item size minus the sum of these three).
584    pub layer_sizes: [u32; 3],
585}
586
587/// Options for parsing AVIF files
588///
589/// Prefer using [`DecodeConfig::lenient()`] with [`AvifParser`] instead.
590#[derive(Debug, Clone, Copy)]
591#[derive(Default)]
592pub struct ParseOptions {
593    /// Enable lenient parsing mode
594    ///
595    /// When true, non-critical validation errors (like non-zero flags in boxes
596    /// that expect zero flags) will be ignored instead of returning errors.
597    /// This allows parsing of slightly malformed but otherwise valid AVIF files.
598    ///
599    /// Default: false (strict validation)
600    pub lenient: bool,
601}
602
603/// Configuration for parsing AVIF files with resource limits and validation options
604///
605/// Provides fine-grained control over resource consumption during AVIF parsing,
606/// allowing defensive parsing against malicious or malformed files.
607///
608/// Resource limits are checked **before** allocations occur, preventing out-of-memory
609/// conditions from malicious files that claim unrealistic dimensions or counts.
610///
611/// # Examples
612///
613/// ```rust
614/// use zenavif_parse::DecodeConfig;
615///
616/// // Default limits (suitable for most apps)
617/// let config = DecodeConfig::default();
618///
619/// // Strict limits for untrusted input
620/// let config = DecodeConfig::default()
621///     .with_peak_memory_limit(100_000_000)  // 100MB
622///     .with_total_megapixels_limit(64)       // 64MP max
623///     .with_max_animation_frames(100);       // 100 frames
624///
625/// // No limits (backwards compatible with read_avif)
626/// let config = DecodeConfig::unlimited();
627/// ```
628#[derive(Debug, Clone)]
629pub struct DecodeConfig {
630    /// Maximum peak heap memory usage in bytes.
631    /// Default: 1GB (1,000,000,000 bytes)
632    pub peak_memory_limit: Option<u64>,
633
634    /// Maximum total megapixels for grid images.
635    /// Default: 512 megapixels
636    pub total_megapixels_limit: Option<u32>,
637
638    /// Maximum number of animation frames.
639    /// Default: 10,000 frames
640    pub max_animation_frames: Option<u32>,
641
642    /// Maximum number of grid tiles.
643    /// Default: 1,000 tiles
644    pub max_grid_tiles: Option<u32>,
645
646    /// Enable lenient parsing mode.
647    /// Default: false (strict validation)
648    pub lenient: bool,
649}
650
651impl Default for DecodeConfig {
652    fn default() -> Self {
653        Self {
654            peak_memory_limit: Some(1_000_000_000),
655            total_megapixels_limit: Some(512),
656            max_animation_frames: Some(10_000),
657            max_grid_tiles: Some(1_000),
658            lenient: false,
659        }
660    }
661}
662
663impl DecodeConfig {
664    /// Create a configuration with no resource limits.
665    ///
666    /// Equivalent to the behavior of `read_avif()` before resource limits were added.
667    pub fn unlimited() -> Self {
668        Self {
669            peak_memory_limit: None,
670            total_megapixels_limit: None,
671            max_animation_frames: None,
672            max_grid_tiles: None,
673            lenient: false,
674        }
675    }
676
677    /// Set the peak memory limit in bytes
678    pub fn with_peak_memory_limit(mut self, bytes: u64) -> Self {
679        self.peak_memory_limit = Some(bytes);
680        self
681    }
682
683    /// Set the total megapixels limit for grid images
684    pub fn with_total_megapixels_limit(mut self, megapixels: u32) -> Self {
685        self.total_megapixels_limit = Some(megapixels);
686        self
687    }
688
689    /// Set the maximum animation frame count
690    pub fn with_max_animation_frames(mut self, frames: u32) -> Self {
691        self.max_animation_frames = Some(frames);
692        self
693    }
694
695    /// Set the maximum grid tile count
696    pub fn with_max_grid_tiles(mut self, tiles: u32) -> Self {
697        self.max_grid_tiles = Some(tiles);
698        self
699    }
700
701    /// Enable lenient parsing mode
702    pub fn lenient(mut self, lenient: bool) -> Self {
703        self.lenient = lenient;
704        self
705    }
706}
707
708/// Grid configuration for tiled/grid-based AVIF images
709#[derive(Debug, Clone, PartialEq)]
710/// Grid image configuration
711///
712/// For tiled/grid AVIF images, this describes the grid layout.
713/// Grid images are composed of multiple AV1 image items (tiles) arranged in a rectangular grid.
714///
715/// ## Grid Layout Determination
716///
717/// Grid layout can be specified in two ways:
718/// 1. **Explicit ImageGrid property box** - contains rows, columns, and output dimensions
719/// 2. **Calculated from ispe properties** - when no ImageGrid box exists, dimensions are
720///    calculated by dividing the grid item's dimensions by a tile's dimensions
721///
722/// ## Output Dimensions
723///
724/// - `output_width` and `output_height` may be 0, indicating the decoder should calculate
725///   them from the tile dimensions
726/// - When non-zero, they specify the exact output dimensions of the composed image
727pub struct GridConfig {
728    /// Number of tile rows (1-256)
729    pub rows: u8,
730    /// Number of tile columns (1-256)
731    pub columns: u8,
732    /// Output width in pixels (0 = calculate from tiles)
733    pub output_width: u32,
734    /// Output height in pixels (0 = calculate from tiles)
735    pub output_height: u32,
736}
737
738/// Frame information for animated AVIF
739#[cfg(feature = "eager")]
740#[deprecated(since = "1.5.0", note = "Use `AvifParser::frame()` which returns `FrameRef` instead")]
741#[derive(Debug)]
742pub struct AnimationFrame {
743    /// AV1 bitstream data for this frame
744    pub data: TryVec<u8>,
745    /// Duration in milliseconds (0 if unknown)
746    pub duration_ms: u32,
747}
748
749/// Animation configuration for animated AVIF (avis brand)
750#[cfg(feature = "eager")]
751#[deprecated(since = "1.5.0", note = "Use `AvifParser::animation_info()` and `AvifParser::frames()` instead")]
752#[derive(Debug)]
753#[allow(deprecated)]
754pub struct AnimationConfig {
755    /// Number of times to loop (0 = infinite)
756    pub loop_count: u32,
757    /// All frames in the animation
758    pub frames: TryVec<AnimationFrame>,
759}
760
761// Internal structures for animation parsing
762
763#[derive(Debug)]
764struct MovieHeader {
765    _timescale: u32,
766    _duration: u64,
767}
768
769#[derive(Debug)]
770struct MediaHeader {
771    timescale: u32,
772    _duration: u64,
773}
774
775#[derive(Debug)]
776struct TimeToSampleEntry {
777    sample_count: u32,
778    sample_delta: u32,
779}
780
781#[derive(Debug)]
782struct SampleToChunkEntry {
783    first_chunk: u32,
784    samples_per_chunk: u32,
785    _sample_description_index: u32,
786}
787
788#[derive(Debug)]
789struct SampleTable {
790    time_to_sample: TryVec<TimeToSampleEntry>,
791    sample_sizes: TryVec<u32>,
792    /// Precomputed byte offset for each sample, derived from
793    /// sample_to_chunk + chunk_offsets + sample_sizes during parsing.
794    sample_offsets: TryVec<u64>,
795}
796
797/// A track reference entry (e.g., auxl, cdsc) parsed from a `tref` sub-box.
798#[derive(Debug)]
799struct TrackReference {
800    reference_type: FourCC,
801    track_ids: TryVec<u32>,
802}
803
804/// Codec properties extracted from a `stsd` VisualSampleEntry.
805#[derive(Debug, Clone, Default)]
806struct TrackCodecConfig {
807    av1_config: Option<AV1Config>,
808    color_info: Option<ColorInformation>,
809}
810
811/// Parsed data from a single track box (`trak`).
812#[derive(Debug)]
813struct ParsedTrack {
814    track_id: u32,
815    handler_type: FourCC,
816    media_timescale: u32,
817    sample_table: SampleTable,
818    references: TryVec<TrackReference>,
819    loop_count: u32,
820    codec_config: TrackCodecConfig,
821}
822
823/// Paired color + optional alpha animation data after track association.
824struct ParsedAnimationData {
825    color_timescale: u32,
826    color_sample_table: SampleTable,
827    alpha_timescale: Option<u32>,
828    alpha_sample_table: Option<SampleTable>,
829    loop_count: u32,
830    color_codec_config: TrackCodecConfig,
831}
832
833#[cfg(feature = "eager")]
834#[deprecated(since = "1.5.0", note = "Use `AvifParser` for zero-copy parsing instead")]
835#[derive(Debug, Default)]
836#[allow(deprecated)]
837pub struct AvifData {
838    /// AV1 data for the color channels.
839    ///
840    /// The collected data indicated by the `pitm` box, See ISO 14496-12:2015 § 8.11.4
841    pub primary_item: TryVec<u8>,
842    /// AV1 data for alpha channel.
843    ///
844    /// Associated alpha channel for the primary item, if any
845    pub alpha_item: Option<TryVec<u8>>,
846    /// If true, divide RGB values by the alpha value.
847    ///
848    /// See `prem` in MIAF § 7.3.5.2
849    pub premultiplied_alpha: bool,
850
851    /// Grid configuration for tiled images.
852    ///
853    /// If present, the image is a grid and `grid_tiles` contains the tile data.
854    /// Grid layout is determined either from an explicit ImageGrid property box or
855    /// calculated from ispe (Image Spatial Extents) properties.
856    ///
857    /// ## Example
858    ///
859    /// ```no_run
860    /// #[allow(deprecated)]
861    /// use std::fs::File;
862    /// # fn main() -> Result<(), Box<dyn std::error::Error>> {
863    /// #[allow(deprecated)]
864    /// let data = zenavif_parse::read_avif(&mut File::open("image.avif")?)?;
865    ///
866    /// if let Some(grid) = data.grid_config {
867    ///     println!("Grid: {}×{} tiles", grid.rows, grid.columns);
868    ///     println!("Output: {}×{}", grid.output_width, grid.output_height);
869    ///     println!("Tile count: {}", data.grid_tiles.len());
870    /// }
871    /// # Ok(())
872    /// # }
873    /// ```
874    pub grid_config: Option<GridConfig>,
875
876    /// AV1 payloads for grid image tiles.
877    ///
878    /// Empty for non-grid images. For grid images, contains one entry per tile.
879    ///
880    /// **Tile ordering:** Tiles are guaranteed to be in the correct order for grid assembly,
881    /// sorted by their dimgIdx (reference index). This is row-major order: tiles in the first
882    /// row from left to right, then the second row, etc.
883    pub grid_tiles: TryVec<TryVec<u8>>,
884
885    /// Animation configuration (for animated AVIF with avis brand)
886    ///
887    /// When present, primary_item contains the first frame
888    pub animation: Option<AnimationConfig>,
889
890    /// AV1 codec configuration from the container's `av1C` property.
891    pub av1_config: Option<AV1Config>,
892
893    /// Colour information from the container's `colr` property.
894    pub color_info: Option<ColorInformation>,
895
896    /// Image rotation from the container's `irot` property.
897    pub rotation: Option<ImageRotation>,
898
899    /// Image mirror from the container's `imir` property.
900    pub mirror: Option<ImageMirror>,
901
902    /// Clean aperture (crop) from the container's `clap` property.
903    pub clean_aperture: Option<CleanAperture>,
904
905    /// Pixel aspect ratio from the container's `pasp` property.
906    pub pixel_aspect_ratio: Option<PixelAspectRatio>,
907
908    /// Content light level from the container's `clli` property.
909    pub content_light_level: Option<ContentLightLevel>,
910
911    /// Mastering display colour volume from the container's `mdcv` property.
912    pub mastering_display: Option<MasteringDisplayColourVolume>,
913
914    /// Content colour volume from the container's `cclv` property.
915    pub content_colour_volume: Option<ContentColourVolume>,
916
917    /// Ambient viewing environment from the container's `amve` property.
918    pub ambient_viewing: Option<AmbientViewingEnvironment>,
919
920    /// Operating point selector from the container's `a1op` property.
921    pub operating_point: Option<OperatingPointSelector>,
922
923    /// Layer selector from the container's `lsel` property.
924    pub layer_selector: Option<LayerSelector>,
925
926    /// AV1 layered image indexing from the container's `a1lx` property.
927    pub layered_image_indexing: Option<AV1LayeredImageIndexing>,
928
929    /// EXIF metadata from a `cdsc`-linked `Exif` item.
930    ///
931    /// Raw EXIF data (TIFF header onwards), with the 4-byte AVIF offset prefix stripped.
932    pub exif: Option<TryVec<u8>>,
933
934    /// XMP metadata from a `cdsc`-linked `mime` item.
935    ///
936    /// Raw XMP/XML data as UTF-8.
937    pub xmp: Option<TryVec<u8>>,
938
939    /// Gain map metadata from a `tmap` derived image item.
940    pub gain_map_metadata: Option<GainMapMetadata>,
941
942    /// AV1-encoded gain map image data.
943    pub gain_map_item: Option<TryVec<u8>>,
944
945    /// Color information for the alternate (HDR) rendition from the `tmap` item.
946    pub gain_map_color_info: Option<ColorInformation>,
947
948    /// Depth auxiliary image data, if present.
949    pub depth_item: Option<TryVec<u8>>,
950
951    /// Width of the depth auxiliary image (from `ispe`).
952    pub depth_width: u32,
953
954    /// Height of the depth auxiliary image (from `ispe`).
955    pub depth_height: u32,
956
957    /// AV1 codec configuration for the depth auxiliary item.
958    pub depth_av1_config: Option<AV1Config>,
959
960    /// Color information for the depth auxiliary item.
961    pub depth_color_info: Option<ColorInformation>,
962
963    /// Major brand from the `ftyp` box (e.g., `*b"avif"` or `*b"avis"`).
964    pub major_brand: [u8; 4],
965
966    /// Compatible brands from the `ftyp` box.
967    pub compatible_brands: std::vec::Vec<[u8; 4]>,
968}
969
970#[cfg(feature = "eager")]
971#[allow(deprecated)]
972impl AvifData {
973    /// Get the full gain map bundle, if present.
974    ///
975    /// Consumes the gain map metadata and data from this `AvifData` and returns
976    /// an [`AvifGainMap`]. Returns `None` if no gain map metadata or data is present.
977    pub fn gain_map(&self) -> Option<AvifGainMap> {
978        let metadata = self.gain_map_metadata.as_ref()?.clone();
979        let gain_map_data = self.gain_map_item.as_ref()?.to_vec();
980        Some(AvifGainMap {
981            metadata,
982            gain_map_data,
983            alt_color_info: self.gain_map_color_info.clone(),
984        })
985    }
986
987    /// Get the depth auxiliary image bundle, if present.
988    ///
989    /// Returns [`AvifDepthMap`] with the raw AV1 depth data, dimensions,
990    /// and codec/color info. Returns `None` if no depth auxiliary is present.
991    pub fn depth_map(&self) -> Option<AvifDepthMap> {
992        let data = self.depth_item.as_ref()?.to_vec();
993        Some(AvifDepthMap {
994            data,
995            width: self.depth_width,
996            height: self.depth_height,
997            av1_config: self.depth_av1_config.clone(),
998            color_info: self.depth_color_info.clone(),
999        })
1000    }
1001}
1002
1003// # Memory Usage
1004//
1005// This implementation loads all image data into owned vectors (`TryVec<u8>`), which has
1006// memory implications depending on the file type:
1007//
1008// - **Static images**: Single copy of compressed data (~5-50KB typical)
1009//   - `primary_item`: compressed AV1 data
1010//   - `alpha_item`: compressed alpha data (if present)
1011//
1012// - **Grid images**: All tiles loaded (~100KB-2MB for large grids)
1013//   - `grid_tiles`: one compressed tile per grid cell
1014//
1015// - **Animated images**: All frames loaded eagerly (⚠️ HIGH MEMORY)
1016//   - Internal mdat boxes: ~500KB for 95-frame video
1017//   - Extracted frames: ~500KB duplicated in `animation.frames[].data`
1018//   - **Total: ~2× file size in memory**
1019//
1020// For large animated files, consider using a streaming approach or processing frames
1021// individually rather than loading the entire `AvifData` structure.
1022
1023#[cfg(feature = "eager")]
1024#[allow(deprecated)]
1025impl AvifData {
1026    #[deprecated(since = "1.5.0", note = "Use `AvifParser::from_reader()` instead")]
1027    pub fn from_reader<R: Read>(reader: &mut R) -> Result<Self> {
1028        read_avif(reader)
1029    }
1030
1031    /// Parses AV1 data to get basic properties of the opaque channel
1032    pub fn primary_item_metadata(&self) -> Result<AV1Metadata> {
1033        AV1Metadata::parse_av1_bitstream(&self.primary_item)
1034    }
1035
1036    /// Parses AV1 data to get basic properties about the alpha channel, if any
1037    pub fn alpha_item_metadata(&self) -> Result<Option<AV1Metadata>> {
1038        self.alpha_item.as_deref().map(AV1Metadata::parse_av1_bitstream).transpose()
1039    }
1040}
1041
1042/// Chroma subsampling configuration for AV1/AVIF.
1043///
1044/// `(false, false)` = 4:4:4 (no subsampling).
1045/// `(true, true)` = 4:2:0 (both axes subsampled).
1046/// `(true, false)` = 4:2:2 (horizontal only).
1047#[derive(Debug, Clone, Copy, PartialEq, Eq)]
1048pub struct ChromaSubsampling {
1049    /// Whether the horizontal (X) axis is subsampled.
1050    pub horizontal: bool,
1051    /// Whether the vertical (Y) axis is subsampled.
1052    pub vertical: bool,
1053}
1054
1055impl ChromaSubsampling {
1056    /// 4:4:4 — no chroma subsampling.
1057    pub const NONE: Self = Self { horizontal: false, vertical: false };
1058    /// 4:2:0 — both axes subsampled.
1059    pub const YUV420: Self = Self { horizontal: true, vertical: true };
1060    /// 4:2:2 — horizontal subsampling only.
1061    pub const YUV422: Self = Self { horizontal: true, vertical: false };
1062}
1063
1064impl From<(bool, bool)> for ChromaSubsampling {
1065    fn from((h, v): (bool, bool)) -> Self {
1066        Self { horizontal: h, vertical: v }
1067    }
1068}
1069
1070impl From<ChromaSubsampling> for (bool, bool) {
1071    fn from(cs: ChromaSubsampling) -> Self {
1072        (cs.horizontal, cs.vertical)
1073    }
1074}
1075
1076/// AV1 sequence header metadata parsed from an OBU bitstream.
1077///
1078/// See [`AvifParser::primary_metadata()`] and [`AV1Metadata::parse_av1_bitstream()`].
1079#[non_exhaustive]
1080#[derive(Debug, Clone)]
1081pub struct AV1Metadata {
1082    /// Should be true for non-animated AVIF
1083    pub still_picture: bool,
1084    pub max_frame_width: NonZeroU32,
1085    pub max_frame_height: NonZeroU32,
1086    /// 8, 10, or 12
1087    pub bit_depth: u8,
1088    /// 0, 1 or 2 for the level of complexity
1089    pub seq_profile: u8,
1090    /// Chroma subsampling. Use named fields (`horizontal`, `vertical`) or
1091    /// constants like [`ChromaSubsampling::YUV420`].
1092    pub chroma_subsampling: ChromaSubsampling,
1093    pub monochrome: bool,
1094    /// AV1 base quantizer index (0-255) from the first frame header.
1095    /// `None` if the frame header could not be parsed.
1096    /// 0 = lossless candidate, 255 = worst quality.
1097    pub base_q_idx: Option<u8>,
1098    /// Whether the encoding is lossless (all quantization parameters are zero
1099    /// and chroma is not subsampled).
1100    /// `None` if the frame header could not be parsed.
1101    pub lossless: Option<bool>,
1102}
1103
1104impl AV1Metadata {
1105    /// Parses raw AV1 bitstream (sequence header + optional frame header).
1106    ///
1107    /// Extracts sequence-level metadata and attempts to parse the first frame
1108    /// header for quantization/lossless detection.
1109    ///
1110    /// This is for the bare image payload from an encoder, not an AVIF/HEIF file.
1111    /// To parse AVIF files, see [`AvifParser::from_reader()`].
1112    #[inline(never)]
1113    pub fn parse_av1_bitstream(obu_bitstream: &[u8]) -> Result<Self> {
1114        let (h, frame_quant) = obu::parse_obu_with_frame_info(obu_bitstream)?;
1115        let no_chroma_subsampling = !h.color.chroma_subsampling.horizontal
1116            && !h.color.chroma_subsampling.vertical;
1117        Ok(Self {
1118            still_picture: h.still_picture,
1119            max_frame_width: h.max_frame_width,
1120            max_frame_height: h.max_frame_height,
1121            bit_depth: h.color.bit_depth,
1122            seq_profile: h.seq_profile,
1123            chroma_subsampling: h.color.chroma_subsampling,
1124            monochrome: h.color.monochrome,
1125            base_q_idx: frame_quant.map(|fq| fq.base_q_idx),
1126            lossless: frame_quant.map(|fq| fq.coded_lossless && no_chroma_subsampling),
1127        })
1128    }
1129}
1130
1131/// A single frame from an animated AVIF, with zero-copy when possible.
1132///
1133/// The `data` field is `Cow::Borrowed` when the frame lives in a single
1134/// contiguous mdat extent, and `Cow::Owned` when extents must be concatenated.
1135pub struct FrameRef<'a> {
1136    pub data: Cow<'a, [u8]>,
1137    /// Alpha channel data for this frame, if the animation has a separate alpha track.
1138    pub alpha_data: Option<Cow<'a, [u8]>>,
1139    pub duration_ms: u32,
1140}
1141
1142/// Byte range of a media data box within the file.
1143struct MdatBounds {
1144    offset: u64,
1145    length: u64,
1146}
1147
1148/// Where an item's data lives: construction method + extent ranges.
1149struct ItemExtents {
1150    construction_method: ConstructionMethod,
1151    extents: TryVec<ExtentRange>,
1152}
1153
1154/// Zero-copy AVIF parser backed by a borrowed or owned byte buffer.
1155///
1156/// `AvifParser` records byte offsets during parsing but does **not** copy
1157/// mdat payload data. Data access methods return `Cow<[u8]>` — borrowed
1158/// when the item is a single contiguous extent, owned when extents must
1159/// be concatenated.
1160///
1161/// # Constructors
1162///
1163/// | Method | Lifetime | Zero-copy? |
1164/// |--------|----------|------------|
1165/// | [`from_bytes`](Self::from_bytes) | `'data` | Yes — borrows the slice |
1166/// | [`from_owned`](Self::from_owned) | `'static` | Within the owned buffer |
1167/// | [`from_reader`](Self::from_reader) | `'static` | Reads all, then owned |
1168///
1169/// # Example
1170///
1171/// ```no_run
1172/// use zenavif_parse::AvifParser;
1173///
1174/// let bytes = std::fs::read("image.avif")?;
1175/// let parser = AvifParser::from_bytes(&bytes)?;
1176/// let primary = parser.primary_data()?; // Cow::Borrowed for single-extent
1177/// # Ok::<(), Box<dyn std::error::Error>>(())
1178/// ```
1179pub struct AvifParser<'data> {
1180    raw: Cow<'data, [u8]>,
1181    mdat_bounds: TryVec<MdatBounds>,
1182    idat: Option<TryVec<u8>>,
1183    primary: ItemExtents,
1184    alpha: Option<ItemExtents>,
1185    grid_config: Option<GridConfig>,
1186    tiles: TryVec<ItemExtents>,
1187    animation_data: Option<AnimationParserData>,
1188    premultiplied_alpha: bool,
1189    av1_config: Option<AV1Config>,
1190    color_info: Option<ColorInformation>,
1191    rotation: Option<ImageRotation>,
1192    mirror: Option<ImageMirror>,
1193    clean_aperture: Option<CleanAperture>,
1194    pixel_aspect_ratio: Option<PixelAspectRatio>,
1195    content_light_level: Option<ContentLightLevel>,
1196    mastering_display: Option<MasteringDisplayColourVolume>,
1197    content_colour_volume: Option<ContentColourVolume>,
1198    ambient_viewing: Option<AmbientViewingEnvironment>,
1199    operating_point: Option<OperatingPointSelector>,
1200    layer_selector: Option<LayerSelector>,
1201    layered_image_indexing: Option<AV1LayeredImageIndexing>,
1202    exif_item: Option<ItemExtents>,
1203    xmp_item: Option<ItemExtents>,
1204    gain_map_metadata: Option<GainMapMetadata>,
1205    gain_map: Option<ItemExtents>,
1206    gain_map_color_info: Option<ColorInformation>,
1207    depth_item: Option<ItemExtents>,
1208    depth_width: u32,
1209    depth_height: u32,
1210    depth_av1_config: Option<AV1Config>,
1211    depth_color_info: Option<ColorInformation>,
1212    major_brand: [u8; 4],
1213    compatible_brands: std::vec::Vec<[u8; 4]>,
1214}
1215
1216struct AnimationParserData {
1217    media_timescale: u32,
1218    sample_table: SampleTable,
1219    alpha_media_timescale: Option<u32>,
1220    alpha_sample_table: Option<SampleTable>,
1221    loop_count: u32,
1222    codec_config: TrackCodecConfig,
1223}
1224
1225/// Animation metadata from [`AvifParser`]
1226#[derive(Debug, Clone, Copy)]
1227pub struct AnimationInfo {
1228    pub frame_count: usize,
1229    pub loop_count: u32,
1230    /// Whether animation has a separate alpha track.
1231    pub has_alpha: bool,
1232    /// Media timescale (ticks per second) for the color track.
1233    pub timescale: u32,
1234}
1235
1236/// Parsed structure from the box-level parse pass (no mdat data).
1237struct ParsedStructure {
1238    /// `None` for pure AVIF sequences (`avis` brand) that have only `moov`+`mdat`.
1239    meta: Option<AvifInternalMeta>,
1240    mdat_bounds: TryVec<MdatBounds>,
1241    animation_data: Option<ParsedAnimationData>,
1242    major_brand: [u8; 4],
1243    compatible_brands: std::vec::Vec<[u8; 4]>,
1244}
1245
1246impl<'data> AvifParser<'data> {
1247    // ========================================
1248    // Constructors
1249    // ========================================
1250
1251    /// Parse AVIF from a borrowed byte slice (true zero-copy).
1252    ///
1253    /// The returned parser borrows `data` — single-extent items will be
1254    /// returned as `Cow::Borrowed` slices into this buffer.
1255    pub fn from_bytes(data: &'data [u8]) -> Result<Self> {
1256        Self::from_bytes_with_config(data, &DecodeConfig::default(), &Unstoppable)
1257    }
1258
1259    /// Parse AVIF from a borrowed byte slice with resource limits.
1260    pub fn from_bytes_with_config(
1261        data: &'data [u8],
1262        config: &DecodeConfig,
1263        stop: &dyn Stop,
1264    ) -> Result<Self> {
1265        let parsed = Self::parse_raw(data, config, stop)?;
1266        Self::build(Cow::Borrowed(data), parsed, config)
1267    }
1268
1269    /// Parse AVIF from an owned buffer.
1270    ///
1271    /// The returned parser owns the data — single-extent items will still
1272    /// be returned as `Cow::Borrowed` slices (borrowing from the internal buffer).
1273    pub fn from_owned(data: std::vec::Vec<u8>) -> Result<AvifParser<'static>> {
1274        AvifParser::from_owned_with_config(data, &DecodeConfig::default(), &Unstoppable)
1275    }
1276
1277    /// Parse AVIF from an owned buffer with resource limits.
1278    pub fn from_owned_with_config(
1279        data: std::vec::Vec<u8>,
1280        config: &DecodeConfig,
1281        stop: &dyn Stop,
1282    ) -> Result<AvifParser<'static>> {
1283        let parsed = AvifParser::parse_raw(&data, config, stop)?;
1284        AvifParser::build(Cow::Owned(data), parsed, config)
1285    }
1286
1287    /// Parse AVIF from a reader (reads all bytes, then parses).
1288    pub fn from_reader<R: Read>(reader: &mut R) -> Result<AvifParser<'static>> {
1289        AvifParser::from_reader_with_config(reader, &DecodeConfig::default(), &Unstoppable)
1290    }
1291
1292    /// Parse AVIF from a reader with resource limits.
1293    ///
1294    /// If `config.peak_memory_limit` is set, reading is capped at that many
1295    /// bytes to prevent unbounded allocation from an untrusted reader.
1296    pub fn from_reader_with_config<R: Read>(
1297        reader: &mut R,
1298        config: &DecodeConfig,
1299        stop: &dyn Stop,
1300    ) -> Result<AvifParser<'static>> {
1301        let buf = if let Some(limit) = config.peak_memory_limit {
1302            let mut limited = reader.take(limit.saturating_add(1));
1303            let mut buf = std::vec::Vec::new();
1304            limited.read_to_end(&mut buf)?;
1305            if buf.len() as u64 > limit {
1306                return Err(Error::ResourceLimitExceeded(
1307                    "input exceeds peak_memory_limit",
1308                ));
1309            }
1310            buf
1311        } else {
1312            let mut buf = std::vec::Vec::new();
1313            reader.read_to_end(&mut buf)?;
1314            buf
1315        };
1316        AvifParser::from_owned_with_config(buf, config, stop)
1317    }
1318
1319    // ========================================
1320    // Internal: parse pass (records offsets, no mdat copy)
1321    // ========================================
1322
1323    /// Parse the AVIF box structure from raw bytes, recording mdat offsets
1324    /// without copying mdat content.
1325    fn parse_raw(data: &[u8], config: &DecodeConfig, stop: &dyn Stop) -> Result<ParsedStructure> {
1326        let parse_opts = ParseOptions { lenient: config.lenient };
1327        let mut cursor = std::io::Cursor::new(data);
1328        let mut f = OffsetReader::new(&mut cursor);
1329        let mut iter = BoxIter::with_max_remaining(&mut f, data.len() as u64);
1330
1331        // 'ftyp' box must occur first; see ISO 14496-12:2015 § 4.3.1
1332        let (major_brand, compatible_brands) = if let Some(mut b) = iter.next_box()? {
1333            if b.head.name == BoxType::FileTypeBox {
1334                let ftyp = read_ftyp(&mut b)?;
1335                if ftyp.major_brand != b"avif" && ftyp.major_brand != b"avis" {
1336                    return Err(Error::InvalidData("ftyp must be 'avif' or 'avis'"));
1337                }
1338                let major = ftyp.major_brand.value;
1339                let compat = ftyp.compatible_brands.iter().map(|b| b.value).collect();
1340                (major, compat)
1341            } else {
1342                return Err(Error::InvalidData("'ftyp' box must occur first"));
1343            }
1344        } else {
1345            return Err(Error::InvalidData("'ftyp' box must occur first"));
1346        };
1347
1348        let mut meta = None;
1349        let mut mdat_bounds = TryVec::new();
1350        let mut animation_data: Option<ParsedAnimationData> = None;
1351
1352        while let Some(mut b) = iter.next_box()? {
1353            stop.check()?;
1354
1355            match b.head.name {
1356                BoxType::MetadataBox => {
1357                    if meta.is_some() {
1358                        return Err(Error::InvalidData(
1359                            "There should be zero or one meta boxes per ISO 14496-12:2015 § 8.11.1.1",
1360                        ));
1361                    }
1362                    meta = Some(read_avif_meta(&mut b, &parse_opts)?);
1363                }
1364                BoxType::MovieBox => {
1365                    let tracks = read_moov(&mut b)?;
1366                    if !tracks.is_empty() {
1367                        animation_data = Some(associate_tracks(tracks)?);
1368                    }
1369                }
1370                BoxType::MediaDataBox => {
1371                    if b.bytes_left() > 0 {
1372                        let offset = b.offset();
1373                        let length = b.bytes_left();
1374                        mdat_bounds.push(MdatBounds { offset, length })?;
1375                    }
1376                    // Skip the content — we'll slice into raw later
1377                    skip_box_content(&mut b)?;
1378                }
1379                _ => skip_box_content(&mut b)?,
1380            }
1381
1382            check_parser_state(&b.head, &b.content)?;
1383        }
1384
1385        // meta is required for still images, but pure AVIF sequences (avis brand)
1386        // can have only moov+mdat with no meta box.
1387        if meta.is_none() && animation_data.is_none() {
1388            return Err(Error::InvalidData("missing meta"));
1389        }
1390
1391        Ok(ParsedStructure { meta, mdat_bounds, animation_data, major_brand, compatible_brands })
1392    }
1393
1394    /// Build an AvifParser from raw bytes + parsed structure.
1395    fn build(raw: Cow<'data, [u8]>, parsed: ParsedStructure, config: &DecodeConfig) -> Result<Self> {
1396        let tracker = ResourceTracker::new(config);
1397
1398        // Store animation metadata if present
1399        let animation_data = if let Some(anim) = parsed.animation_data {
1400            tracker.validate_animation_frames(anim.color_sample_table.sample_sizes.len() as u32)?;
1401            Some(AnimationParserData {
1402                media_timescale: anim.color_timescale,
1403                sample_table: anim.color_sample_table,
1404                alpha_media_timescale: anim.alpha_timescale,
1405                alpha_sample_table: anim.alpha_sample_table,
1406                loop_count: anim.loop_count,
1407                codec_config: anim.color_codec_config,
1408            })
1409        } else {
1410            None
1411        };
1412
1413        // Pure sequence (no meta box): only animation methods will work.
1414        // Use codec config from the color track's stsd if available.
1415        let Some(meta) = parsed.meta else {
1416            let track_config = animation_data.as_ref()
1417                .map(|a| a.codec_config.clone())
1418                .unwrap_or_default();
1419            return Ok(Self {
1420                raw,
1421                mdat_bounds: parsed.mdat_bounds,
1422                idat: None,
1423                primary: ItemExtents { construction_method: ConstructionMethod::File, extents: TryVec::new() },
1424                alpha: None,
1425                grid_config: None,
1426                tiles: TryVec::new(),
1427                animation_data,
1428                premultiplied_alpha: false,
1429                av1_config: track_config.av1_config,
1430                color_info: track_config.color_info,
1431                rotation: None,
1432                mirror: None,
1433                clean_aperture: None,
1434                pixel_aspect_ratio: None,
1435                content_light_level: None,
1436                mastering_display: None,
1437                content_colour_volume: None,
1438                ambient_viewing: None,
1439                operating_point: None,
1440                layer_selector: None,
1441                layered_image_indexing: None,
1442                exif_item: None,
1443                xmp_item: None,
1444                gain_map_metadata: None,
1445                gain_map: None,
1446                gain_map_color_info: None,
1447                depth_item: None,
1448                depth_width: 0,
1449                depth_height: 0,
1450                depth_av1_config: None,
1451                depth_color_info: None,
1452                major_brand: parsed.major_brand,
1453                compatible_brands: parsed.compatible_brands,
1454            });
1455        };
1456
1457        // Get primary item extents
1458        let primary = Self::get_item_extents(&meta, meta.primary_item_id)?;
1459
1460        // Find alpha item and get its extents
1461        let alpha_item_id = meta
1462            .item_references
1463            .iter()
1464            .filter(|iref| {
1465                iref.to_item_id == meta.primary_item_id
1466                    && iref.from_item_id != meta.primary_item_id
1467                    && iref.item_type == b"auxl"
1468            })
1469            .map(|iref| iref.from_item_id)
1470            .find(|&item_id| {
1471                meta.properties.iter().any(|prop| {
1472                    prop.item_id == item_id
1473                        && match &prop.property {
1474                            ItemProperty::AuxiliaryType(urn) => {
1475                                urn.type_subtype().0 == b"urn:mpeg:mpegB:cicp:systems:auxiliary:alpha"
1476                            }
1477                            _ => false,
1478                        }
1479                })
1480            });
1481
1482        let alpha = alpha_item_id
1483            .map(|id| Self::get_item_extents(&meta, id))
1484            .transpose()?;
1485
1486        // Check for premultiplied alpha
1487        let premultiplied_alpha = alpha_item_id.is_some_and(|alpha_id| {
1488            meta.item_references.iter().any(|iref| {
1489                iref.from_item_id == meta.primary_item_id
1490                    && iref.to_item_id == alpha_id
1491                    && iref.item_type == b"prem"
1492            })
1493        });
1494
1495        // Find depth auxiliary item (auxl reference with depth auxC type)
1496        let depth_item_id = meta
1497            .item_references
1498            .iter()
1499            .filter(|iref| {
1500                iref.to_item_id == meta.primary_item_id
1501                    && iref.from_item_id != meta.primary_item_id
1502                    && iref.item_type == b"auxl"
1503            })
1504            .map(|iref| iref.from_item_id)
1505            .find(|&item_id| {
1506                // Skip the alpha item if we already found one
1507                if alpha_item_id == Some(item_id) {
1508                    return false;
1509                }
1510                meta.properties.iter().any(|prop| {
1511                    prop.item_id == item_id
1512                        && match &prop.property {
1513                            ItemProperty::AuxiliaryType(urn) => {
1514                                is_depth_auxiliary_urn(urn.type_subtype().0)
1515                            }
1516                            _ => false,
1517                        }
1518                })
1519            });
1520
1521        let (depth_item, depth_width, depth_height, depth_av1_config, depth_color_info) =
1522            if let Some(depth_id) = depth_item_id {
1523                let extents = Self::get_item_extents(&meta, depth_id)?;
1524                // Get dimensions from ispe property
1525                let dims = meta.properties.iter().find_map(|p| {
1526                    if p.item_id == depth_id {
1527                        match &p.property {
1528                            ItemProperty::ImageSpatialExtents(e) => Some((e.width, e.height)),
1529                            _ => None,
1530                        }
1531                    } else {
1532                        None
1533                    }
1534                });
1535                let (w, h) = dims.unwrap_or((0, 0));
1536                // Get av1C property
1537                let av1c = meta.properties.iter().find_map(|p| {
1538                    if p.item_id == depth_id {
1539                        match &p.property {
1540                            ItemProperty::AV1Config(c) => Some(c.clone()),
1541                            _ => None,
1542                        }
1543                    } else {
1544                        None
1545                    }
1546                });
1547                // Get colr property
1548                let colr = meta.properties.iter().find_map(|p| {
1549                    if p.item_id == depth_id {
1550                        match &p.property {
1551                            ItemProperty::ColorInformation(c) => Some(c.clone()),
1552                            _ => None,
1553                        }
1554                    } else {
1555                        None
1556                    }
1557                });
1558                (Some(extents), w, h, av1c, colr)
1559            } else {
1560                (None, 0, 0, None, None)
1561            };
1562
1563        // Find EXIF/XMP items linked via cdsc references to the primary item
1564        let mut exif_item = None;
1565        let mut xmp_item = None;
1566        for iref in meta.item_references.iter() {
1567            if iref.to_item_id != meta.primary_item_id || iref.item_type != b"cdsc" {
1568                continue;
1569            }
1570            let desc_item_id = iref.from_item_id;
1571            let Some(info) = meta.item_infos.iter().find(|i| i.item_id == desc_item_id) else {
1572                continue;
1573            };
1574            if info.item_type == b"Exif" && exif_item.is_none() {
1575                exif_item = Some(Self::get_item_extents(&meta, desc_item_id)?);
1576            } else if info.item_type == b"mime" && xmp_item.is_none() {
1577                xmp_item = Some(Self::get_item_extents(&meta, desc_item_id)?);
1578            }
1579        }
1580
1581        // Check if primary item is a grid (tiled image)
1582        let is_grid = meta
1583            .item_infos
1584            .iter()
1585            .find(|x| x.item_id == meta.primary_item_id)
1586            .is_some_and(|info| info.item_type == b"grid");
1587
1588        // Extract grid configuration and tile extents if this is a grid
1589        let (grid_config, tiles) = if is_grid {
1590            let mut tiles_with_index: TryVec<(u32, u16)> = TryVec::new();
1591            for iref in meta.item_references.iter() {
1592                if iref.from_item_id == meta.primary_item_id && iref.item_type == b"dimg" {
1593                    tiles_with_index.push((iref.to_item_id, iref.reference_index))?;
1594                }
1595            }
1596
1597            tracker.validate_grid_tiles(tiles_with_index.len() as u32)?;
1598            tiles_with_index.sort_by_key(|&(_, idx)| idx);
1599
1600            let mut tile_extents = TryVec::new();
1601            for (tile_id, _) in tiles_with_index.iter() {
1602                tile_extents.push(Self::get_item_extents(&meta, *tile_id)?)?;
1603            }
1604
1605            let mut tile_ids = TryVec::new();
1606            for (tile_id, _) in tiles_with_index.iter() {
1607                tile_ids.push(*tile_id)?;
1608            }
1609
1610            let grid_config = Self::calculate_grid_config(&meta, &tile_ids)?;
1611
1612            // AVIF 1.2: transformative properties SHALL NOT be on grid tile items
1613            for (tile_id, _) in tiles_with_index.iter() {
1614                for prop in meta.properties.iter() {
1615                    if prop.item_id == *tile_id {
1616                        match &prop.property {
1617                            ItemProperty::Rotation(_)
1618                            | ItemProperty::Mirror(_)
1619                            | ItemProperty::CleanAperture(_) => {
1620                                warn!("grid tile {} has a transformative property (irot/imir/clap), violating AVIF spec", tile_id);
1621                            }
1622                            _ => {}
1623                        }
1624                    }
1625                }
1626            }
1627
1628            (Some(grid_config), tile_extents)
1629        } else {
1630            (None, TryVec::new())
1631        };
1632
1633        // Detect gain map (tmap derived image item)
1634        let (gain_map_metadata, gain_map, gain_map_color_info) = {
1635            let tmap_item = meta.item_infos.iter()
1636                .find(|info| info.item_type == b"tmap");
1637
1638            if let Some(tmap_info) = tmap_item {
1639                let tmap_id = tmap_info.item_id;
1640
1641                // Find dimg references FROM tmap TO its inputs
1642                let mut inputs: TryVec<(u32, u16)> = TryVec::new();
1643                for iref in meta.item_references.iter() {
1644                    if iref.from_item_id == tmap_id && iref.item_type == b"dimg" {
1645                        inputs.push((iref.to_item_id, iref.reference_index))?;
1646                    }
1647                }
1648                inputs.sort_by_key(|&(_, idx)| idx);
1649
1650                if inputs.len() >= 2 {
1651                    let base_item_id = inputs[0].0;
1652                    let gmap_item_id = inputs[1].0;
1653
1654                    if base_item_id == meta.primary_item_id {
1655                        // Read tmap item's data payload (ToneMapImage)
1656                        let tmap_extents = Self::get_item_extents(&meta, tmap_id)?;
1657                        let tmap_data = Self::resolve_extents_from_raw(
1658                            raw.as_ref(), &parsed.mdat_bounds, &tmap_extents,
1659                        )?;
1660                        let metadata = parse_tone_map_image(&tmap_data)?;
1661
1662                        // Get gain map image extents
1663                        let gmap_extents = Self::get_item_extents(&meta, gmap_item_id)?;
1664
1665                        // Get alternate color info from tmap item's properties
1666                        let alt_color = meta.properties.iter().find_map(|p| {
1667                            if p.item_id == tmap_id {
1668                                match &p.property {
1669                                    ItemProperty::ColorInformation(c) => Some(c.clone()),
1670                                    _ => None,
1671                                }
1672                            } else {
1673                                None
1674                            }
1675                        });
1676
1677                        (Some(metadata), Some(gmap_extents), alt_color)
1678                    } else {
1679                        (None, None, None)
1680                    }
1681                } else {
1682                    (None, None, None)
1683                }
1684            } else {
1685                (None, None, None)
1686            }
1687        };
1688
1689        // Extract properties for the primary item
1690        macro_rules! find_prop {
1691            ($variant:ident) => {
1692                meta.properties.iter().find_map(|p| {
1693                    if p.item_id == meta.primary_item_id {
1694                        match &p.property {
1695                            ItemProperty::$variant(c) => Some(c.clone()),
1696                            _ => None,
1697                        }
1698                    } else {
1699                        None
1700                    }
1701                })
1702            };
1703        }
1704
1705        let track_config = animation_data.as_ref().map(|a| &a.codec_config);
1706        let av1_config = find_prop!(AV1Config)
1707            .or_else(|| track_config.and_then(|c| c.av1_config.clone()));
1708        let color_info = find_prop!(ColorInformation)
1709            .or_else(|| track_config.and_then(|c| c.color_info.clone()));
1710        let rotation = find_prop!(Rotation);
1711        let mirror = find_prop!(Mirror);
1712        let clean_aperture = find_prop!(CleanAperture);
1713        let pixel_aspect_ratio = find_prop!(PixelAspectRatio);
1714        let content_light_level = find_prop!(ContentLightLevel);
1715        let mastering_display = find_prop!(MasteringDisplayColourVolume);
1716        let content_colour_volume = find_prop!(ContentColourVolume);
1717        let ambient_viewing = find_prop!(AmbientViewingEnvironment);
1718        let operating_point = find_prop!(OperatingPointSelector);
1719        let layer_selector = find_prop!(LayerSelector);
1720        let layered_image_indexing = find_prop!(AV1LayeredImageIndexing);
1721
1722        // Clone idat
1723        let idat = if let Some(ref idat_data) = meta.idat {
1724            let mut cloned = TryVec::new();
1725            cloned.extend_from_slice(idat_data)?;
1726            Some(cloned)
1727        } else {
1728            None
1729        };
1730
1731        Ok(Self {
1732            raw,
1733            mdat_bounds: parsed.mdat_bounds,
1734            idat,
1735            primary,
1736            alpha,
1737            grid_config,
1738            tiles,
1739            animation_data,
1740            premultiplied_alpha,
1741            av1_config,
1742            color_info,
1743            rotation,
1744            mirror,
1745            clean_aperture,
1746            pixel_aspect_ratio,
1747            content_light_level,
1748            mastering_display,
1749            content_colour_volume,
1750            ambient_viewing,
1751            operating_point,
1752            layer_selector,
1753            layered_image_indexing,
1754            exif_item,
1755            xmp_item,
1756            gain_map_metadata,
1757            gain_map,
1758            gain_map_color_info,
1759            depth_item,
1760            depth_width,
1761            depth_height,
1762            depth_av1_config,
1763            depth_color_info,
1764            major_brand: parsed.major_brand,
1765            compatible_brands: parsed.compatible_brands,
1766        })
1767    }
1768
1769    // ========================================
1770    // Internal helpers
1771    // ========================================
1772
1773    /// Get item extents (construction method + ranges) from metadata.
1774    fn get_item_extents(meta: &AvifInternalMeta, item_id: u32) -> Result<ItemExtents> {
1775        let item = meta
1776            .iloc_items
1777            .iter()
1778            .find(|item| item.item_id == item_id)
1779            .ok_or(Error::InvalidData("item not found in iloc"))?;
1780
1781        let mut extents = TryVec::new();
1782        for extent in &item.extents {
1783            extents.push(extent.extent_range.clone())?;
1784        }
1785        Ok(ItemExtents {
1786            construction_method: item.construction_method,
1787            extents,
1788        })
1789    }
1790
1791    /// Resolve file-based item extents from a raw buffer during `build()`,
1792    /// before `self` exists. Returns owned data (small payloads like tmap).
1793    fn resolve_extents_from_raw(
1794        raw: &[u8],
1795        mdat_bounds: &[MdatBounds],
1796        item: &ItemExtents,
1797    ) -> Result<std::vec::Vec<u8>> {
1798        if item.construction_method != ConstructionMethod::File {
1799            return Err(Error::Unsupported("tmap item must use file construction method"));
1800        }
1801        let mut data = std::vec::Vec::new();
1802        for extent in &item.extents {
1803            let file_offset = extent.start();
1804            let start = usize::try_from(file_offset)?;
1805            let end = match extent {
1806                ExtentRange::WithLength(range) => {
1807                    let len = range.end.checked_sub(range.start)
1808                        .ok_or(Error::InvalidData("extent range start > end"))?;
1809                    start.checked_add(usize::try_from(len)?)
1810                        .ok_or(Error::InvalidData("extent end overflow"))?
1811                }
1812                ExtentRange::ToEnd(_) => {
1813                    // Find the mdat that contains this offset
1814                    let mut found_end = raw.len();
1815                    for mdat in mdat_bounds {
1816                        if file_offset >= mdat.offset && file_offset < mdat.offset + mdat.length {
1817                            found_end = usize::try_from(mdat.offset + mdat.length)?;
1818                            break;
1819                        }
1820                    }
1821                    found_end
1822                }
1823            };
1824            let slice = raw.get(start..end)
1825                .ok_or(Error::InvalidData("tmap extent out of bounds"))?;
1826            data.extend_from_slice(slice);
1827        }
1828        Ok(data)
1829    }
1830
1831    /// Resolve an item's data from the raw buffer, returning `Cow::Borrowed`
1832    /// for single-extent file items and `Cow::Owned` for multi-extent or idat.
1833    fn resolve_item(&self, item: &ItemExtents) -> Result<Cow<'_, [u8]>> {
1834        match item.construction_method {
1835            ConstructionMethod::Idat => self.resolve_idat_extents(&item.extents),
1836            ConstructionMethod::File => self.resolve_file_extents(&item.extents),
1837            ConstructionMethod::Item => Err(Error::Unsupported("construction_method 'item' not supported")),
1838        }
1839    }
1840
1841    /// Resolve file-based extents from the raw buffer.
1842    fn resolve_file_extents(&self, extents: &[ExtentRange]) -> Result<Cow<'_, [u8]>> {
1843        let raw = self.raw.as_ref();
1844
1845        // Fast path: single extent → borrow directly from raw
1846        if extents.len() == 1 {
1847            let extent = &extents[0];
1848            let (start, end) = self.extent_byte_range(extent)?;
1849            let slice = raw.get(start..end).ok_or(Error::InvalidData("extent out of bounds in raw buffer"))?;
1850            return Ok(Cow::Borrowed(slice));
1851        }
1852
1853        // Multi-extent: concatenate into owned buffer
1854        let mut data = TryVec::new();
1855        for extent in extents {
1856            let (start, end) = self.extent_byte_range(extent)?;
1857            let slice = raw.get(start..end).ok_or(Error::InvalidData("extent out of bounds in raw buffer"))?;
1858            data.extend_from_slice(slice)?;
1859        }
1860        Ok(Cow::Owned(data.into_iter().collect()))
1861    }
1862
1863    /// Convert an ExtentRange to a (start, end) byte range within the raw buffer.
1864    fn extent_byte_range(&self, extent: &ExtentRange) -> Result<(usize, usize)> {
1865        let file_offset = extent.start();
1866        let start = usize::try_from(file_offset)?;
1867
1868        match extent {
1869            ExtentRange::WithLength(range) => {
1870                let len = range.end.checked_sub(range.start)
1871                    .ok_or(Error::InvalidData("extent range start > end"))?;
1872                let end = start.checked_add(usize::try_from(len)?)
1873                    .ok_or(Error::InvalidData("extent end overflow"))?;
1874                Ok((start, end))
1875            }
1876            ExtentRange::ToEnd(_) => {
1877                // Find the mdat that contains this offset and use its bounds
1878                for mdat in &self.mdat_bounds {
1879                    if file_offset >= mdat.offset && file_offset < mdat.offset + mdat.length {
1880                        let end = usize::try_from(mdat.offset + mdat.length)?;
1881                        return Ok((start, end));
1882                    }
1883                }
1884                // Fall back to end of raw buffer
1885                Ok((start, self.raw.len()))
1886            }
1887        }
1888    }
1889
1890    /// Resolve idat-based extents.
1891    fn resolve_idat_extents(&self, extents: &[ExtentRange]) -> Result<Cow<'_, [u8]>> {
1892        let idat_data = self.idat.as_ref()
1893            .ok_or(Error::InvalidData("idat box missing but construction_method is Idat"))?;
1894
1895        if extents.len() == 1 {
1896            let extent = &extents[0];
1897            let start = usize::try_from(extent.start())?;
1898            let slice = match extent {
1899                ExtentRange::WithLength(range) => {
1900                    let len = usize::try_from(range.end - range.start)?;
1901                    idat_data.get(start..start + len)
1902                        .ok_or(Error::InvalidData("idat extent out of bounds"))?
1903                }
1904                ExtentRange::ToEnd(_) => {
1905                    idat_data.get(start..)
1906                        .ok_or(Error::InvalidData("idat extent out of bounds"))?
1907                }
1908            };
1909            return Ok(Cow::Borrowed(slice));
1910        }
1911
1912        // Multi-extent idat: concatenate
1913        let mut data = TryVec::new();
1914        for extent in extents {
1915            let start = usize::try_from(extent.start())?;
1916            let slice = match extent {
1917                ExtentRange::WithLength(range) => {
1918                    let len = usize::try_from(range.end - range.start)?;
1919                    idat_data.get(start..start + len)
1920                        .ok_or(Error::InvalidData("idat extent out of bounds"))?
1921                }
1922                ExtentRange::ToEnd(_) => {
1923                    idat_data.get(start..)
1924                        .ok_or(Error::InvalidData("idat extent out of bounds"))?
1925                }
1926            };
1927            data.extend_from_slice(slice)?;
1928        }
1929        Ok(Cow::Owned(data.into_iter().collect()))
1930    }
1931
1932    /// Resolve a single animation frame from the raw buffer.
1933    fn resolve_frame(&self, index: usize) -> Result<FrameRef<'_>> {
1934        let anim = self.animation_data.as_ref()
1935            .ok_or(Error::InvalidData("not an animated AVIF"))?;
1936
1937        if index >= anim.sample_table.sample_sizes.len() {
1938            return Err(Error::InvalidData("frame index out of bounds"));
1939        }
1940
1941        let duration_ms = self.calculate_frame_duration(&anim.sample_table, anim.media_timescale, index)?;
1942        let (offset, size) = self.calculate_sample_location(&anim.sample_table, index)?;
1943
1944        let start = usize::try_from(offset)?;
1945        let end = start.checked_add(size as usize)
1946            .ok_or(Error::InvalidData("frame end overflow"))?;
1947
1948        let raw = self.raw.as_ref();
1949        let slice = raw.get(start..end)
1950            .ok_or(Error::InvalidData("frame not found in raw buffer"))?;
1951
1952        // Resolve alpha frame if alpha track exists and has this index
1953        let alpha_data = if let Some(ref alpha_st) = anim.alpha_sample_table {
1954            let alpha_timescale = anim.alpha_media_timescale.unwrap_or(anim.media_timescale);
1955            if index < alpha_st.sample_sizes.len() {
1956                let (a_offset, a_size) = self.calculate_sample_location(alpha_st, index)?;
1957                let a_start = usize::try_from(a_offset)?;
1958                let a_end = a_start.checked_add(a_size as usize)
1959                    .ok_or(Error::InvalidData("alpha frame end overflow"))?;
1960                let a_slice = raw.get(a_start..a_end)
1961                    .ok_or(Error::InvalidData("alpha frame not found in raw buffer"))?;
1962                let _ = alpha_timescale; // timescale used for duration, which comes from color track
1963                Some(Cow::Borrowed(a_slice))
1964            } else {
1965                warn!("alpha track has fewer frames than color track (index {})", index);
1966                None
1967            }
1968        } else {
1969            None
1970        };
1971
1972        Ok(FrameRef {
1973            data: Cow::Borrowed(slice),
1974            alpha_data,
1975            duration_ms,
1976        })
1977    }
1978
1979    /// Calculate grid configuration from metadata.
1980    fn calculate_grid_config(meta: &AvifInternalMeta, tile_ids: &[u32]) -> Result<GridConfig> {
1981        // Try explicit grid property first
1982        for prop in &meta.properties {
1983            if prop.item_id == meta.primary_item_id
1984                && let ItemProperty::ImageGrid(grid) = &prop.property {
1985                    return Ok(grid.clone());
1986                }
1987        }
1988
1989        // Fall back to ispe calculation
1990        let grid_dims = meta
1991            .properties
1992            .iter()
1993            .find(|p| p.item_id == meta.primary_item_id)
1994            .and_then(|p| match &p.property {
1995                ItemProperty::ImageSpatialExtents(e) => Some(e),
1996                _ => None,
1997            });
1998
1999        let tile_dims = tile_ids.first().and_then(|&tile_id| {
2000            meta.properties
2001                .iter()
2002                .find(|p| p.item_id == tile_id)
2003                .and_then(|p| match &p.property {
2004                    ItemProperty::ImageSpatialExtents(e) => Some(e),
2005                    _ => None,
2006                })
2007        });
2008
2009        if let (Some(grid), Some(tile)) = (grid_dims, tile_dims)
2010            && tile.width != 0
2011                && tile.height != 0
2012                && grid.width % tile.width == 0
2013                && grid.height % tile.height == 0
2014            {
2015                let columns = grid.width / tile.width;
2016                let rows = grid.height / tile.height;
2017
2018                if columns <= 255 && rows <= 255 {
2019                    return Ok(GridConfig {
2020                        rows: rows as u8,
2021                        columns: columns as u8,
2022                        output_width: grid.width,
2023                        output_height: grid.height,
2024                    });
2025                }
2026            }
2027
2028        let tile_count = tile_ids.len();
2029        Ok(GridConfig {
2030            rows: tile_count.min(255) as u8,
2031            columns: 1,
2032            output_width: 0,
2033            output_height: 0,
2034        })
2035    }
2036
2037    /// Calculate frame duration from sample table.
2038    fn calculate_frame_duration(
2039        &self,
2040        st: &SampleTable,
2041        timescale: u32,
2042        index: usize,
2043    ) -> Result<u32> {
2044        let mut current_sample = 0;
2045        for entry in &st.time_to_sample {
2046            if current_sample + entry.sample_count as usize > index {
2047                let duration_ms = if timescale > 0 {
2048                    ((entry.sample_delta as u64) * 1000) / (timescale as u64)
2049                } else {
2050                    0
2051                };
2052                return Ok(u32::try_from(duration_ms).unwrap_or(u32::MAX));
2053            }
2054            current_sample += entry.sample_count as usize;
2055        }
2056        Ok(0)
2057    }
2058
2059    /// Look up precomputed sample location (offset and size) from sample table.
2060    fn calculate_sample_location(&self, st: &SampleTable, index: usize) -> Result<(u64, u32)> {
2061        let offset = *st
2062            .sample_offsets
2063            .get(index)
2064            .ok_or(Error::InvalidData("sample index out of bounds"))?;
2065        let size = *st
2066            .sample_sizes
2067            .get(index)
2068            .ok_or(Error::InvalidData("sample index out of bounds"))?;
2069        Ok((offset, size))
2070    }
2071
2072    // ========================================
2073    // Public data access API (one way each)
2074    // ========================================
2075
2076    /// Get primary item data.
2077    ///
2078    /// Returns `Cow::Borrowed` for single-extent items, `Cow::Owned` for multi-extent.
2079    pub fn primary_data(&self) -> Result<Cow<'_, [u8]>> {
2080        self.resolve_item(&self.primary)
2081    }
2082
2083    /// Get alpha item data, if present.
2084    pub fn alpha_data(&self) -> Option<Result<Cow<'_, [u8]>>> {
2085        self.alpha.as_ref().map(|item| self.resolve_item(item))
2086    }
2087
2088    /// Get grid tile data by index.
2089    pub fn tile_data(&self, index: usize) -> Result<Cow<'_, [u8]>> {
2090        let item = self.tiles.get(index)
2091            .ok_or(Error::InvalidData("tile index out of bounds"))?;
2092        self.resolve_item(item)
2093    }
2094
2095    /// Get a single animation frame by index.
2096    pub fn frame(&self, index: usize) -> Result<FrameRef<'_>> {
2097        self.resolve_frame(index)
2098    }
2099
2100    /// Iterate over all animation frames.
2101    pub fn frames(&self) -> FrameIterator<'_> {
2102        let count = self
2103            .animation_info()
2104            .map(|info| info.frame_count)
2105            .unwrap_or(0);
2106        FrameIterator { parser: self, index: 0, count }
2107    }
2108
2109    // ========================================
2110    // Metadata (no data access)
2111    // ========================================
2112
2113    /// Get animation metadata (if animated).
2114    pub fn animation_info(&self) -> Option<AnimationInfo> {
2115        self.animation_data.as_ref().map(|data| AnimationInfo {
2116            frame_count: data.sample_table.sample_sizes.len(),
2117            loop_count: data.loop_count,
2118            has_alpha: data.alpha_sample_table.is_some(),
2119            timescale: data.media_timescale,
2120        })
2121    }
2122
2123    /// Get grid configuration (if grid image).
2124    pub fn grid_config(&self) -> Option<&GridConfig> {
2125        self.grid_config.as_ref()
2126    }
2127
2128    /// Get number of grid tiles.
2129    pub fn grid_tile_count(&self) -> usize {
2130        self.tiles.len()
2131    }
2132
2133    /// Check if alpha channel uses premultiplied alpha.
2134    pub fn premultiplied_alpha(&self) -> bool {
2135        self.premultiplied_alpha
2136    }
2137
2138    /// Get the AV1 codec configuration for the primary item, if present.
2139    ///
2140    /// This is parsed from the `av1C` property box in the container.
2141    pub fn av1_config(&self) -> Option<&AV1Config> {
2142        self.av1_config.as_ref()
2143    }
2144
2145    /// Get colour information for the primary item, if present.
2146    ///
2147    /// This is parsed from the `colr` property box in the container.
2148    /// For CICP/nclx values, this is the authoritative source and may
2149    /// differ from values in the AV1 bitstream sequence header.
2150    pub fn color_info(&self) -> Option<&ColorInformation> {
2151        self.color_info.as_ref()
2152    }
2153
2154    /// Get rotation for the primary item, if present.
2155    pub fn rotation(&self) -> Option<&ImageRotation> {
2156        self.rotation.as_ref()
2157    }
2158
2159    /// Get mirror for the primary item, if present.
2160    pub fn mirror(&self) -> Option<&ImageMirror> {
2161        self.mirror.as_ref()
2162    }
2163
2164    /// Get clean aperture (crop) for the primary item, if present.
2165    pub fn clean_aperture(&self) -> Option<&CleanAperture> {
2166        self.clean_aperture.as_ref()
2167    }
2168
2169    /// Get pixel aspect ratio for the primary item, if present.
2170    pub fn pixel_aspect_ratio(&self) -> Option<&PixelAspectRatio> {
2171        self.pixel_aspect_ratio.as_ref()
2172    }
2173
2174    /// Get content light level info for the primary item, if present.
2175    pub fn content_light_level(&self) -> Option<&ContentLightLevel> {
2176        self.content_light_level.as_ref()
2177    }
2178
2179    /// Get mastering display colour volume for the primary item, if present.
2180    pub fn mastering_display(&self) -> Option<&MasteringDisplayColourVolume> {
2181        self.mastering_display.as_ref()
2182    }
2183
2184    /// Get content colour volume for the primary item, if present.
2185    pub fn content_colour_volume(&self) -> Option<&ContentColourVolume> {
2186        self.content_colour_volume.as_ref()
2187    }
2188
2189    /// Get ambient viewing environment for the primary item, if present.
2190    pub fn ambient_viewing(&self) -> Option<&AmbientViewingEnvironment> {
2191        self.ambient_viewing.as_ref()
2192    }
2193
2194    /// Get operating point selector for the primary item, if present.
2195    pub fn operating_point(&self) -> Option<&OperatingPointSelector> {
2196        self.operating_point.as_ref()
2197    }
2198
2199    /// Get layer selector for the primary item, if present.
2200    pub fn layer_selector(&self) -> Option<&LayerSelector> {
2201        self.layer_selector.as_ref()
2202    }
2203
2204    /// Get AV1 layered image indexing for the primary item, if present.
2205    pub fn layered_image_indexing(&self) -> Option<&AV1LayeredImageIndexing> {
2206        self.layered_image_indexing.as_ref()
2207    }
2208
2209    /// Get EXIF metadata for the primary item, if present.
2210    ///
2211    /// Returns raw EXIF data (TIFF header onwards), with the 4-byte AVIF offset prefix stripped.
2212    pub fn exif(&self) -> Option<Result<Cow<'_, [u8]>>> {
2213        self.exif_item.as_ref().map(|item| {
2214            let raw = self.resolve_item(item)?;
2215            // AVIF EXIF items start with a 4-byte big-endian offset to the TIFF header
2216            if raw.len() <= 4 {
2217                return Err(Error::InvalidData("EXIF item too short"));
2218            }
2219            let offset = u32::from_be_bytes([raw[0], raw[1], raw[2], raw[3]]) as usize;
2220            let start = 4 + offset;
2221            if start >= raw.len() {
2222                return Err(Error::InvalidData("EXIF offset exceeds item size"));
2223            }
2224            match raw {
2225                Cow::Borrowed(slice) => Ok(Cow::Borrowed(&slice[start..])),
2226                Cow::Owned(vec) => Ok(Cow::Owned(vec[start..].to_vec())),
2227            }
2228        })
2229    }
2230
2231    /// Get XMP metadata for the primary item, if present.
2232    ///
2233    /// Returns raw XMP/XML data.
2234    pub fn xmp(&self) -> Option<Result<Cow<'_, [u8]>>> {
2235        self.xmp_item.as_ref().map(|item| self.resolve_item(item))
2236    }
2237
2238    /// Gain map metadata, if a `tmap` derived image item is present.
2239    ///
2240    /// Describes how to apply a gain map to reconstruct an HDR rendition
2241    /// from the SDR base image. See ISO 21496-1.
2242    pub fn gain_map_metadata(&self) -> Option<&GainMapMetadata> {
2243        self.gain_map_metadata.as_ref()
2244    }
2245
2246    /// Gain map image data (AV1-encoded), if present.
2247    pub fn gain_map_data(&self) -> Option<Result<Cow<'_, [u8]>>> {
2248        self.gain_map.as_ref().map(|item| self.resolve_item(item))
2249    }
2250
2251    /// Color information for the alternate (typically HDR) rendition.
2252    ///
2253    /// This comes from the `tmap` item's `colr` property and describes
2254    /// the colour space of the tone-mapped output.
2255    pub fn gain_map_color_info(&self) -> Option<&ColorInformation> {
2256        self.gain_map_color_info.as_ref()
2257    }
2258
2259    /// Get the full gain map bundle, if a `tmap` derived image item is present.
2260    ///
2261    /// Returns [`AvifGainMap`] containing metadata, raw AV1 gain map data,
2262    /// and alternate rendition color info. Returns `None` if no gain map
2263    /// is present, or `Some(Err(..))` if the gain map data cannot be resolved.
2264    pub fn gain_map(&self) -> Option<Result<AvifGainMap>> {
2265        let metadata = self.gain_map_metadata.as_ref()?.clone();
2266        let data_extents = self.gain_map.as_ref()?;
2267        let alt_color_info = self.gain_map_color_info.clone();
2268
2269        Some(self.resolve_item(data_extents).map(|data| AvifGainMap {
2270            metadata,
2271            gain_map_data: data.into_owned(),
2272            alt_color_info,
2273        }))
2274    }
2275
2276    /// Check if a depth auxiliary image is present.
2277    ///
2278    /// Returns `true` if the AVIF container has an `auxl`-linked item with
2279    /// a depth auxiliary type URN.
2280    pub fn has_depth_map(&self) -> bool {
2281        self.depth_item.is_some()
2282    }
2283
2284    /// Get the raw AV1 bitstream of the depth auxiliary image, if present.
2285    pub fn depth_map_data(&self) -> Option<Result<Cow<'_, [u8]>>> {
2286        self.depth_item.as_ref().map(|item| self.resolve_item(item))
2287    }
2288
2289    /// Get the full depth map bundle, if a depth auxiliary image is present.
2290    ///
2291    /// Returns [`AvifDepthMap`] containing the raw AV1 depth image data,
2292    /// dimensions, codec config, and color info. Returns `None` if no depth
2293    /// auxiliary is present, or `Some(Err(..))` if the data cannot be resolved.
2294    ///
2295    /// # Example
2296    ///
2297    /// ```no_run
2298    /// let bytes = std::fs::read("portrait.avif").unwrap();
2299    /// let parser = zenavif_parse::AvifParser::from_bytes(&bytes).unwrap();
2300    /// if let Some(Ok(dm)) = parser.depth_map() {
2301    ///     println!("Depth: {}x{}, {} bytes", dm.width, dm.height, dm.data.len());
2302    /// }
2303    /// ```
2304    pub fn depth_map(&self) -> Option<Result<AvifDepthMap>> {
2305        let data_extents = self.depth_item.as_ref()?;
2306        let av1_config = self.depth_av1_config.clone();
2307        let color_info = self.depth_color_info.clone();
2308        let width = self.depth_width;
2309        let height = self.depth_height;
2310
2311        Some(self.resolve_item(data_extents).map(|data| AvifDepthMap {
2312            data: data.into_owned(),
2313            width,
2314            height,
2315            av1_config,
2316            color_info,
2317        }))
2318    }
2319
2320    /// Get the major brand from the `ftyp` box (e.g., `*b"avif"` or `*b"avis"`).
2321    pub fn major_brand(&self) -> &[u8; 4] {
2322        &self.major_brand
2323    }
2324
2325    /// Get the compatible brands from the `ftyp` box.
2326    pub fn compatible_brands(&self) -> &[[u8; 4]] {
2327        &self.compatible_brands
2328    }
2329
2330    /// Parse AV1 metadata from the primary item.
2331    pub fn primary_metadata(&self) -> Result<AV1Metadata> {
2332        let data = self.primary_data()?;
2333        AV1Metadata::parse_av1_bitstream(&data)
2334    }
2335
2336    /// Parse AV1 metadata from the alpha item, if present.
2337    pub fn alpha_metadata(&self) -> Option<Result<AV1Metadata>> {
2338        self.alpha.as_ref().map(|item| {
2339            let data = self.resolve_item(item)?;
2340            AV1Metadata::parse_av1_bitstream(&data)
2341        })
2342    }
2343
2344    // ========================================
2345    // Conversion
2346    // ========================================
2347
2348    /// Convert to [`AvifData`] (eagerly loads all frames and tiles).
2349    ///
2350    /// Provided for migration from the eager API. Prefer using `AvifParser`
2351    /// methods directly.
2352    #[cfg(feature = "eager")]
2353    #[deprecated(since = "1.5.0", note = "Use AvifParser methods directly instead of converting to AvifData")]
2354    #[allow(deprecated)]
2355    pub fn to_avif_data(&self) -> Result<AvifData> {
2356        let primary_data = self.primary_data()?;
2357        let mut primary_item = TryVec::new();
2358        primary_item.extend_from_slice(&primary_data)?;
2359
2360        let alpha_item = match self.alpha_data() {
2361            Some(Ok(data)) => {
2362                let mut v = TryVec::new();
2363                v.extend_from_slice(&data)?;
2364                Some(v)
2365            }
2366            Some(Err(e)) => return Err(e),
2367            None => None,
2368        };
2369
2370        let mut grid_tiles = TryVec::new();
2371        for i in 0..self.grid_tile_count() {
2372            let data = self.tile_data(i)?;
2373            let mut v = TryVec::new();
2374            v.extend_from_slice(&data)?;
2375            grid_tiles.push(v)?;
2376        }
2377
2378        let animation = if let Some(info) = self.animation_info() {
2379            let mut frames = TryVec::new();
2380            for i in 0..info.frame_count {
2381                let frame_ref = self.frame(i)?;
2382                let mut data = TryVec::new();
2383                data.extend_from_slice(&frame_ref.data)?;
2384                frames.push(AnimationFrame { data, duration_ms: frame_ref.duration_ms })?;
2385            }
2386            Some(AnimationConfig {
2387                loop_count: info.loop_count,
2388                frames,
2389            })
2390        } else {
2391            None
2392        };
2393
2394        Ok(AvifData {
2395            primary_item,
2396            alpha_item,
2397            premultiplied_alpha: self.premultiplied_alpha,
2398            grid_config: self.grid_config.clone(),
2399            grid_tiles,
2400            animation,
2401            av1_config: self.av1_config.clone(),
2402            color_info: self.color_info.clone(),
2403            rotation: self.rotation,
2404            mirror: self.mirror,
2405            clean_aperture: self.clean_aperture,
2406            pixel_aspect_ratio: self.pixel_aspect_ratio,
2407            content_light_level: self.content_light_level,
2408            mastering_display: self.mastering_display,
2409            content_colour_volume: self.content_colour_volume,
2410            ambient_viewing: self.ambient_viewing,
2411            operating_point: self.operating_point,
2412            layer_selector: self.layer_selector,
2413            layered_image_indexing: self.layered_image_indexing,
2414            exif: self.exif().and_then(|r| r.ok()).map(|c| {
2415                let mut v = TryVec::new();
2416                let _ = v.extend_from_slice(&c);
2417                v
2418            }),
2419            xmp: self.xmp().and_then(|r| r.ok()).map(|c| {
2420                let mut v = TryVec::new();
2421                let _ = v.extend_from_slice(&c);
2422                v
2423            }),
2424            gain_map_metadata: self.gain_map_metadata.clone(),
2425            gain_map_item: self.gain_map_data().and_then(|r| r.ok()).map(|c| {
2426                let mut v = TryVec::new();
2427                let _ = v.extend_from_slice(&c);
2428                v
2429            }),
2430            gain_map_color_info: self.gain_map_color_info.clone(),
2431            depth_item: self.depth_map_data().and_then(|r| r.ok()).map(|c| {
2432                let mut v = TryVec::new();
2433                let _ = v.extend_from_slice(&c);
2434                v
2435            }),
2436            depth_width: self.depth_width,
2437            depth_height: self.depth_height,
2438            depth_av1_config: self.depth_av1_config.clone(),
2439            depth_color_info: self.depth_color_info.clone(),
2440            major_brand: self.major_brand,
2441            compatible_brands: self.compatible_brands.clone(),
2442        })
2443    }
2444}
2445
2446/// Iterator over animation frames.
2447///
2448/// Created by [`AvifParser::frames()`]. Yields [`FrameRef`] on demand.
2449pub struct FrameIterator<'a> {
2450    parser: &'a AvifParser<'a>,
2451    index: usize,
2452    count: usize,
2453}
2454
2455impl<'a> Iterator for FrameIterator<'a> {
2456    type Item = Result<FrameRef<'a>>;
2457
2458    fn next(&mut self) -> Option<Self::Item> {
2459        if self.index >= self.count {
2460            return None;
2461        }
2462        let result = self.parser.frame(self.index);
2463        self.index += 1;
2464        Some(result)
2465    }
2466
2467    fn size_hint(&self) -> (usize, Option<usize>) {
2468        let remaining = self.count.saturating_sub(self.index);
2469        (remaining, Some(remaining))
2470    }
2471}
2472
2473impl ExactSizeIterator for FrameIterator<'_> {
2474    fn len(&self) -> usize {
2475        self.count.saturating_sub(self.index)
2476    }
2477}
2478
2479struct AvifInternalMeta {
2480    item_references: TryVec<SingleItemTypeReferenceBox>,
2481    properties: TryVec<AssociatedProperty>,
2482    primary_item_id: u32,
2483    iloc_items: TryVec<ItemLocationBoxItem>,
2484    item_infos: TryVec<ItemInfoEntry>,
2485    idat: Option<TryVec<u8>>,
2486    #[allow(dead_code)] // Parsed for future altr group support
2487    entity_groups: TryVec<EntityGroup>,
2488}
2489
2490/// A Media Data Box
2491/// See ISO 14496-12:2015 § 8.1.1
2492#[cfg(feature = "eager")]
2493struct MediaDataBox {
2494    /// Offset of `data` from the beginning of the file. See `ConstructionMethod::File`
2495    offset: u64,
2496    data: TryVec<u8>,
2497}
2498
2499#[cfg(feature = "eager")]
2500impl MediaDataBox {
2501    /// Check whether the beginning of `extent` is within the bounds of the `MediaDataBox`.
2502    /// We assume extents to not cross box boundaries. If so, this will cause an error
2503    /// in `read_extent`.
2504    fn contains_extent(&self, extent: &ExtentRange) -> bool {
2505        if self.offset <= extent.start() {
2506            let start_offset = extent.start() - self.offset;
2507            start_offset < self.data.len().to_u64()
2508        } else {
2509            false
2510        }
2511    }
2512
2513    /// Check whether `extent` covers the `MediaDataBox` exactly.
2514    fn matches_extent(&self, extent: &ExtentRange) -> bool {
2515        if self.offset == extent.start() {
2516            match extent {
2517                ExtentRange::WithLength(range) => {
2518                    if let Some(end) = self.offset.checked_add(self.data.len().to_u64()) {
2519                        end == range.end
2520                    } else {
2521                        false
2522                    }
2523                },
2524                ExtentRange::ToEnd(_) => true,
2525            }
2526        } else {
2527            false
2528        }
2529    }
2530
2531    /// Copy the range specified by `extent` to the end of `buf` or return an error if the range
2532    /// is not fully contained within `MediaDataBox`.
2533    fn read_extent(&self, extent: &ExtentRange, buf: &mut TryVec<u8>) -> Result<()> {
2534        let start_offset = extent
2535            .start()
2536            .checked_sub(self.offset)
2537            .ok_or(Error::InvalidData("mdat does not contain extent"))?;
2538        let slice = match extent {
2539            ExtentRange::WithLength(range) => {
2540                let range_len = range
2541                    .end
2542                    .checked_sub(range.start)
2543                    .ok_or(Error::InvalidData("range start > end"))?;
2544                let end = start_offset
2545                    .checked_add(range_len)
2546                    .ok_or(Error::InvalidData("extent end overflow"))?;
2547                self.data.get(start_offset.try_into()?..end.try_into()?)
2548            },
2549            ExtentRange::ToEnd(_) => self.data.get(start_offset.try_into()?..),
2550        };
2551        let slice = slice.ok_or(Error::InvalidData("extent crosses box boundary"))?;
2552        buf.extend_from_slice(slice)?;
2553        Ok(())
2554    }
2555
2556}
2557
2558/// Used for 'infe' boxes within 'iinf' boxes
2559/// See ISO 14496-12:2015 § 8.11.6
2560/// Only versions {2, 3} are supported
2561#[derive(Debug)]
2562struct ItemInfoEntry {
2563    item_id: u32,
2564    item_type: FourCC,
2565}
2566
2567/// See ISO 14496-12:2015 § 8.11.12
2568#[derive(Debug)]
2569struct SingleItemTypeReferenceBox {
2570    item_type: FourCC,
2571    from_item_id: u32,
2572    to_item_id: u32,
2573    /// Index of this reference within the list of references of the same type from the same item
2574    /// (0-based). This is the dimgIdx for grid tiles.
2575    reference_index: u16,
2576}
2577
2578/// Potential sizes (in bytes) of variable-sized fields of the 'iloc' box
2579/// See ISO 14496-12:2015 § 8.11.3
2580#[derive(Debug)]
2581enum IlocFieldSize {
2582    Zero,
2583    Four,
2584    Eight,
2585}
2586
2587impl IlocFieldSize {
2588    const fn to_bits(&self) -> u8 {
2589        match self {
2590            Self::Zero => 0,
2591            Self::Four => 32,
2592            Self::Eight => 64,
2593        }
2594    }
2595}
2596
2597impl TryFrom<u8> for IlocFieldSize {
2598    type Error = Error;
2599
2600    fn try_from(value: u8) -> Result<Self> {
2601        match value {
2602            0 => Ok(Self::Zero),
2603            4 => Ok(Self::Four),
2604            8 => Ok(Self::Eight),
2605            _ => Err(Error::InvalidData("value must be in the set {0, 4, 8}")),
2606        }
2607    }
2608}
2609
2610#[derive(PartialEq)]
2611enum IlocVersion {
2612    Zero,
2613    One,
2614    Two,
2615}
2616
2617impl TryFrom<u8> for IlocVersion {
2618    type Error = Error;
2619
2620    fn try_from(value: u8) -> Result<Self> {
2621        match value {
2622            0 => Ok(Self::Zero),
2623            1 => Ok(Self::One),
2624            2 => Ok(Self::Two),
2625            _ => Err(Error::Unsupported("unsupported version in 'iloc' box")),
2626        }
2627    }
2628}
2629
2630/// Used for 'iloc' boxes
2631/// See ISO 14496-12:2015 § 8.11.3
2632/// `base_offset` is omitted since it is integrated into the ranges in `extents`
2633/// `data_reference_index` is omitted, since only 0 (i.e., this file) is supported
2634#[derive(Debug)]
2635struct ItemLocationBoxItem {
2636    item_id: u32,
2637    construction_method: ConstructionMethod,
2638    /// Unused for `ConstructionMethod::Idat`
2639    extents: TryVec<ItemLocationBoxExtent>,
2640}
2641
2642#[derive(Clone, Copy, Debug, PartialEq)]
2643enum ConstructionMethod {
2644    File,
2645    Idat,
2646    #[allow(dead_code)] // TODO: see https://github.com/mozilla/mp4parse-rust/issues/196
2647    Item,
2648}
2649
2650/// `extent_index` is omitted since it's only used for `ConstructionMethod::Item` which
2651/// is currently not implemented.
2652#[derive(Clone, Debug)]
2653struct ItemLocationBoxExtent {
2654    extent_range: ExtentRange,
2655}
2656
2657#[derive(Clone, Debug)]
2658enum ExtentRange {
2659    WithLength(Range<u64>),
2660    ToEnd(RangeFrom<u64>),
2661}
2662
2663impl ExtentRange {
2664    const fn start(&self) -> u64 {
2665        match self {
2666            Self::WithLength(r) => r.start,
2667            Self::ToEnd(r) => r.start,
2668        }
2669    }
2670}
2671
2672/// See ISO 14496-12:2015 § 4.2
2673struct BMFFBox<'a, T> {
2674    head: BoxHeader,
2675    content: Take<&'a mut T>,
2676}
2677
2678impl<T: Read> BMFFBox<'_, T> {
2679    fn read_into_try_vec(&mut self) -> std::io::Result<TryVec<u8>> {
2680        let limit = self.content.limit();
2681        // For size=0 boxes, size is set to u64::MAX, but after subtracting offset
2682        // (8 or 16 bytes), the limit will be slightly less. Check for values very
2683        // close to u64::MAX to detect these cases.
2684        // Cap pre-allocation to 256 MB — the actual read_to_end will
2685        // grow as needed if the box really is larger, and return early
2686        // if the underlying reader has less data than claimed.
2687        const MAX_PREALLOC: u64 = 256 * 1024 * 1024;
2688        let mut vec = if limit >= u64::MAX - BoxHeader::MIN_LARGE_SIZE {
2689            // Unknown size (size=0 box), read without pre-allocation
2690            std::vec::Vec::new()
2691        } else {
2692            let mut v = std::vec::Vec::new();
2693            v.try_reserve_exact(limit.min(MAX_PREALLOC) as usize)
2694                .map_err(|_| std::io::ErrorKind::OutOfMemory)?;
2695            v
2696        };
2697        self.content.read_to_end(&mut vec)?; // The default impl
2698        Ok(vec.into())
2699    }
2700}
2701
2702#[test]
2703fn box_read_to_end() {
2704    let tmp = &mut b"1234567890".as_slice();
2705    let mut src = BMFFBox {
2706        head: BoxHeader { name: BoxType::FileTypeBox, size: 5, offset: 0, uuid: None },
2707        content: <_ as Read>::take(tmp, 5),
2708    };
2709    let buf = src.read_into_try_vec().unwrap();
2710    assert_eq!(buf.len(), 5);
2711    assert_eq!(buf, b"12345".as_ref());
2712}
2713
2714#[test]
2715fn box_read_to_end_large_claim() {
2716    // A box claiming huge size but backed by only 10 bytes should still succeed —
2717    // read_to_end returns what's actually available, pre-allocation is capped.
2718    let tmp = &mut b"1234567890".as_slice();
2719    let mut src = BMFFBox {
2720        head: BoxHeader { name: BoxType::FileTypeBox, size: 5, offset: 0, uuid: None },
2721        content: <_ as Read>::take(tmp, u64::MAX / 2),
2722    };
2723    let buf = src.read_into_try_vec().unwrap();
2724    assert_eq!(buf.len(), 10);
2725}
2726
2727struct BoxIter<'a, T> {
2728    src: &'a mut T,
2729    /// Upper bound on bytes remaining in the source.
2730    ///
2731    /// Used to clamp claimed box sizes so that a malformed header
2732    /// (e.g. claiming 4 GB when only 26 bytes remain) does not cause
2733    /// multi-gigabyte allocations based on [`BMFFBox::bytes_left`].
2734    max_remaining: u64,
2735}
2736
2737impl<T: Read> BoxIter<'_, T> {
2738    /// Create a BoxIter without a known data bound (used by streaming readers).
2739    #[cfg(feature = "eager")]
2740    fn new(src: &mut T) -> BoxIter<'_, T> {
2741        BoxIter { src, max_remaining: u64::MAX }
2742    }
2743
2744    fn with_max_remaining(src: &mut T, max_remaining: u64) -> BoxIter<'_, T> {
2745        BoxIter { src, max_remaining }
2746    }
2747
2748    fn next_box(&mut self) -> Result<Option<BMFFBox<'_, T>>> {
2749        let r = read_box_header(self.src);
2750        match r {
2751            Ok(h) => {
2752                let claimed = h.size - h.offset;
2753                // Clamp the Take limit so that allocations based on
2754                // bytes_left() cannot exceed the actual data available.
2755                let clamped = claimed.min(self.max_remaining);
2756                // Decrease our remaining budget by the clamped content
2757                // size plus the header bytes already consumed.
2758                self.max_remaining = self.max_remaining.saturating_sub(clamped.saturating_add(h.offset));
2759                Ok(Some(BMFFBox {
2760                    head: h,
2761                    content: self.src.take(clamped),
2762                }))
2763            }
2764            Err(Error::UnexpectedEOF) => Ok(None),
2765            Err(e) => Err(e),
2766        }
2767    }
2768}
2769
2770impl<T: Read> Read for BMFFBox<'_, T> {
2771    fn read(&mut self, buf: &mut [u8]) -> std::io::Result<usize> {
2772        self.content.read(buf)
2773    }
2774}
2775
2776impl<T: Offset> Offset for BMFFBox<'_, T> {
2777    fn offset(&self) -> u64 {
2778        self.content.get_ref().offset()
2779    }
2780}
2781
2782impl<T: Read> BMFFBox<'_, T> {
2783    fn bytes_left(&self) -> u64 {
2784        self.content.limit()
2785    }
2786
2787    const fn get_header(&self) -> &BoxHeader {
2788        &self.head
2789    }
2790
2791    fn box_iter(&mut self) -> BoxIter<'_, Self> {
2792        BoxIter::with_max_remaining(self, self.bytes_left())
2793    }
2794}
2795
2796impl<T> Drop for BMFFBox<'_, T> {
2797    fn drop(&mut self) {
2798        if self.content.limit() > 0 {
2799            let name: FourCC = From::from(self.head.name);
2800            debug!("Dropping {} bytes in '{}'", self.content.limit(), name);
2801        }
2802    }
2803}
2804
2805/// Read and parse a box header.
2806///
2807/// Call this first to determine the type of a particular mp4 box
2808/// and its length. Used internally for dispatching to specific
2809/// parsers for the internal content, or to get the length to
2810/// skip unknown or uninteresting boxes.
2811///
2812/// See ISO 14496-12:2015 § 4.2
2813fn read_box_header<T: ReadBytesExt>(src: &mut T) -> Result<BoxHeader> {
2814    let size32 = be_u32(src)?;
2815    let name = BoxType::from(be_u32(src)?);
2816    let size = match size32 {
2817        // valid only for top-level box and indicates it's the last box in the file.  usually mdat.
2818        0 => {
2819            // Size=0 means box extends to EOF (ISOBMFF spec allows this for last box)
2820            u64::MAX
2821        },
2822        1 => {
2823            let size64 = be_u64(src)?;
2824            if size64 < BoxHeader::MIN_LARGE_SIZE {
2825                return Err(Error::InvalidData("malformed wide size"));
2826            }
2827            size64
2828        },
2829        _ => {
2830            if u64::from(size32) < BoxHeader::MIN_SIZE {
2831                return Err(Error::InvalidData("malformed size"));
2832            }
2833            u64::from(size32)
2834        },
2835    };
2836    let mut offset = match size32 {
2837        1 => BoxHeader::MIN_LARGE_SIZE,
2838        _ => BoxHeader::MIN_SIZE,
2839    };
2840    let uuid = if name == BoxType::UuidBox {
2841        if size >= offset + 16 {
2842            let mut buffer = [0u8; 16];
2843            let count = src.read(&mut buffer)?;
2844            offset += count.to_u64();
2845            if count == 16 {
2846                Some(buffer)
2847            } else {
2848                debug!("malformed uuid (short read), skipping");
2849                None
2850            }
2851        } else {
2852            debug!("malformed uuid, skipping");
2853            None
2854        }
2855    } else {
2856        None
2857    };
2858    if offset > size {
2859        return Err(Error::InvalidData("box header offset exceeds size"));
2860    }
2861    Ok(BoxHeader { name, size, offset, uuid })
2862}
2863
2864/// Parse the extra header fields for a full box.
2865fn read_fullbox_extra<T: ReadBytesExt>(src: &mut T) -> Result<(u8, u32)> {
2866    let version = src.read_u8()?;
2867    let flags_a = src.read_u8()?;
2868    let flags_b = src.read_u8()?;
2869    let flags_c = src.read_u8()?;
2870    Ok((
2871        version,
2872        u32::from(flags_a) << 16 | u32::from(flags_b) << 8 | u32::from(flags_c),
2873    ))
2874}
2875
2876// Parse the extra fields for a full box whose flag fields must be zero.
2877fn read_fullbox_version_no_flags<T: ReadBytesExt>(src: &mut T, options: &ParseOptions) -> Result<u8> {
2878    let (version, flags) = read_fullbox_extra(src)?;
2879
2880    if flags != 0 && !options.lenient {
2881        return Err(Error::Unsupported("expected flags to be 0"));
2882    }
2883
2884    Ok(version)
2885}
2886
2887/// Skip over the entire contents of a box.
2888fn skip_box_content<T: Read>(src: &mut BMFFBox<'_, T>) -> Result<()> {
2889    // Skip the contents of unknown chunks.
2890    let to_skip = {
2891        let header = src.get_header();
2892        debug!("{header:?} (skipped)");
2893        header
2894            .size
2895            .checked_sub(header.offset)
2896            .ok_or(Error::InvalidData("header offset > size"))?
2897    };
2898    if to_skip != src.bytes_left() {
2899        return Err(Error::InvalidData("box content size mismatch"));
2900    }
2901    skip(src, to_skip)
2902}
2903
2904/// Skip over the remain data of a box.
2905fn skip_box_remain<T: Read>(src: &mut BMFFBox<'_, T>) -> Result<()> {
2906    let remain = {
2907        let header = src.get_header();
2908        let len = src.bytes_left();
2909        debug!("remain {len} (skipped) in {header:?}");
2910        len
2911    };
2912    skip(src, remain)
2913}
2914
2915struct ResourceTracker<'a> {
2916    config: &'a DecodeConfig,
2917    #[cfg(feature = "eager")]
2918    current_memory: u64,
2919    #[cfg(feature = "eager")]
2920    peak_memory: u64,
2921}
2922
2923impl<'a> ResourceTracker<'a> {
2924    fn new(config: &'a DecodeConfig) -> Self {
2925        Self {
2926            config,
2927            #[cfg(feature = "eager")]
2928            current_memory: 0,
2929            #[cfg(feature = "eager")]
2930            peak_memory: 0,
2931        }
2932    }
2933
2934    #[cfg(feature = "eager")]
2935    fn reserve(&mut self, bytes: u64) -> Result<()> {
2936        self.current_memory = self.current_memory.saturating_add(bytes);
2937        self.peak_memory = self.peak_memory.max(self.current_memory);
2938
2939        if let Some(limit) = self.config.peak_memory_limit
2940            && self.peak_memory > limit {
2941                return Err(Error::ResourceLimitExceeded("peak memory limit exceeded"));
2942            }
2943
2944        Ok(())
2945    }
2946
2947    #[cfg(feature = "eager")]
2948    fn release(&mut self, bytes: u64) {
2949        self.current_memory = self.current_memory.saturating_sub(bytes);
2950    }
2951
2952    #[cfg(feature = "eager")]
2953    fn validate_total_megapixels(&self, width: u32, height: u32) -> Result<()> {
2954        if let Some(limit) = self.config.total_megapixels_limit {
2955            let megapixels = (width as u64)
2956                .checked_mul(height as u64)
2957                .ok_or(Error::InvalidData("dimension overflow"))?
2958                / 1_000_000;
2959
2960            if megapixels > limit as u64 {
2961                return Err(Error::ResourceLimitExceeded("total megapixels limit exceeded"));
2962            }
2963        }
2964
2965        Ok(())
2966    }
2967
2968    fn validate_animation_frames(&self, count: u32) -> Result<()> {
2969        if let Some(limit) = self.config.max_animation_frames
2970            && count > limit {
2971                return Err(Error::ResourceLimitExceeded("animation frame count limit exceeded"));
2972            }
2973
2974        Ok(())
2975    }
2976
2977    fn validate_grid_tiles(&self, count: u32) -> Result<()> {
2978        if let Some(limit) = self.config.max_grid_tiles
2979            && count > limit {
2980                return Err(Error::ResourceLimitExceeded("grid tile count limit exceeded"));
2981            }
2982
2983        Ok(())
2984    }
2985}
2986
2987/// Read the contents of an AVIF file with resource limits and cancellation support
2988///
2989/// This is the primary parsing function with full control over resource limits
2990/// and cooperative cancellation via the [`Stop`] trait.
2991///
2992/// # Arguments
2993///
2994/// * `f` - Reader for the AVIF file
2995/// * `config` - Resource limits and parsing options
2996/// * `stop` - Cancellation token (use [`Unstoppable`] if not needed)
2997#[cfg(feature = "eager")]
2998#[deprecated(since = "1.5.0", note = "Use `AvifParser::from_reader_with_config()` instead")]
2999#[allow(deprecated)]
3000pub fn read_avif_with_config<T: Read>(
3001    f: &mut T,
3002    config: &DecodeConfig,
3003    stop: &dyn Stop,
3004) -> Result<AvifData> {
3005    let mut tracker = ResourceTracker::new(config);
3006    let mut f = OffsetReader::new(f);
3007
3008    let mut iter = BoxIter::new(&mut f);
3009
3010    // 'ftyp' box must occur first; see ISO 14496-12:2015 § 4.3.1
3011    let (major_brand, compatible_brands) = if let Some(mut b) = iter.next_box()? {
3012        if b.head.name == BoxType::FileTypeBox {
3013            let ftyp = read_ftyp(&mut b)?;
3014            // Accept both 'avif' (single-frame) and 'avis' (animated) brands
3015            if ftyp.major_brand != b"avif" && ftyp.major_brand != b"avis" {
3016                warn!("major_brand: {}", ftyp.major_brand);
3017                return Err(Error::InvalidData("ftyp must be 'avif' or 'avis'"));
3018            }
3019            let major = ftyp.major_brand.value;
3020            let compat = ftyp.compatible_brands.iter().map(|b| b.value).collect();
3021            (major, compat)
3022        } else {
3023            return Err(Error::InvalidData("'ftyp' box must occur first"));
3024        }
3025    } else {
3026        return Err(Error::InvalidData("'ftyp' box must occur first"));
3027    };
3028
3029    let mut meta = None;
3030    let mut mdats = TryVec::new();
3031    let mut animation_data: Option<ParsedAnimationData> = None;
3032
3033    let parse_opts = ParseOptions { lenient: config.lenient };
3034
3035    while let Some(mut b) = iter.next_box()? {
3036        stop.check()?;
3037
3038        match b.head.name {
3039            BoxType::MetadataBox => {
3040                if meta.is_some() {
3041                    return Err(Error::InvalidData("There should be zero or one meta boxes per ISO 14496-12:2015 § 8.11.1.1"));
3042                }
3043                meta = Some(read_avif_meta(&mut b, &parse_opts)?);
3044            },
3045            BoxType::MovieBox => {
3046                let tracks = read_moov(&mut b)?;
3047                if !tracks.is_empty() {
3048                    animation_data = Some(associate_tracks(tracks)?);
3049                }
3050            },
3051            BoxType::MediaDataBox => {
3052                if b.bytes_left() > 0 {
3053                    let offset = b.offset();
3054                    let size = b.bytes_left();
3055                    tracker.reserve(size)?;
3056                    let data = b.read_into_try_vec()?;
3057                    tracker.release(size);
3058                    mdats.push(MediaDataBox { offset, data })?;
3059                }
3060            },
3061            _ => skip_box_content(&mut b)?,
3062        }
3063
3064        check_parser_state(&b.head, &b.content)?;
3065    }
3066
3067    // meta is required for still images; pure sequences can have only moov+mdat
3068    if meta.is_none() && animation_data.is_none() {
3069        return Err(Error::InvalidData("missing meta"));
3070    }
3071    let Some(meta) = meta else {
3072        // Pure sequence: return minimal AvifData with no items
3073        return Ok(AvifData {
3074            ..Default::default()
3075        });
3076    };
3077
3078    // Check if primary item is a grid (tiled image)
3079    let is_grid = meta
3080        .item_infos
3081        .iter()
3082        .find(|x| x.item_id == meta.primary_item_id)
3083        .is_some_and(|info| {
3084            let is_g = info.item_type == b"grid";
3085            if is_g {
3086                log::debug!("Grid image detected: primary_item_id={}", meta.primary_item_id);
3087            }
3088            is_g
3089        });
3090
3091    // Extract grid configuration if this is a grid image
3092    let mut grid_config = if is_grid {
3093        meta.properties
3094            .iter()
3095            .find(|prop| {
3096                prop.item_id == meta.primary_item_id
3097                    && matches!(prop.property, ItemProperty::ImageGrid(_))
3098            })
3099            .and_then(|prop| match &prop.property {
3100                ItemProperty::ImageGrid(config) => {
3101                    log::debug!("Grid: found explicit ImageGrid property: {:?}", config);
3102                    Some(config.clone())
3103                },
3104                _ => None,
3105            })
3106    } else {
3107        None
3108    };
3109
3110    // Find tile item IDs if this is a grid
3111    let tile_item_ids: TryVec<u32> = if is_grid {
3112        // Collect tiles with their reference index
3113        let mut tiles_with_index: TryVec<(u32, u16)> = TryVec::new();
3114        for iref in meta.item_references.iter() {
3115            // Grid items reference tiles via "dimg" (derived image) type
3116            if iref.from_item_id == meta.primary_item_id && iref.item_type == b"dimg" {
3117                tiles_with_index.push((iref.to_item_id, iref.reference_index))?;
3118            }
3119        }
3120
3121        // Validate tile count
3122        tracker.validate_grid_tiles(tiles_with_index.len() as u32)?;
3123
3124        // Sort tiles by reference_index to get correct grid order
3125        tiles_with_index.sort_by_key(|&(_, idx)| idx);
3126
3127        // Extract just the IDs in sorted order
3128        let mut ids = TryVec::new();
3129        for (tile_id, _) in tiles_with_index.iter() {
3130            ids.push(*tile_id)?;
3131        }
3132
3133        // No logging here - too verbose for production
3134
3135        // If no ImageGrid property found, calculate grid layout from ispe dimensions
3136        if grid_config.is_none() && !ids.is_empty() {
3137            // Try to calculate grid dimensions from ispe properties
3138            let grid_dims = meta.properties.iter()
3139                .find(|p| p.item_id == meta.primary_item_id)
3140                .and_then(|p| match &p.property {
3141                    ItemProperty::ImageSpatialExtents(e) => Some(e),
3142                    _ => None,
3143                });
3144
3145            let tile_dims = ids.first().and_then(|&tile_id| {
3146                meta.properties.iter()
3147                    .find(|p| p.item_id == tile_id)
3148                    .and_then(|p| match &p.property {
3149                        ItemProperty::ImageSpatialExtents(e) => Some(e),
3150                        _ => None,
3151                    })
3152            });
3153
3154            if let (Some(grid), Some(tile)) = (grid_dims, tile_dims) {
3155                // Validate grid output dimensions
3156                tracker.validate_total_megapixels(grid.width, grid.height)?;
3157
3158                // Validate tile dimensions are non-zero (already validated in read_ispe, but defensive)
3159                if tile.width == 0 || tile.height == 0 {
3160                    log::warn!("Grid: tile has zero dimensions, using fallback");
3161                } else if grid.width % tile.width == 0 && grid.height % tile.height == 0 {
3162                    // Calculate grid layout: grid_dims ÷ tile_dims
3163                    let columns = grid.width / tile.width;
3164                    let rows = grid.height / tile.height;
3165
3166                    // Validate grid dimensions fit in u8 (max 255×255 grid)
3167                    if columns > 255 || rows > 255 {
3168                        log::warn!("Grid: calculated dimensions {}×{} exceed 255, using fallback", rows, columns);
3169                    } else {
3170                        log::debug!("Grid: calculated {}×{} layout from ispe dimensions", rows, columns);
3171                        grid_config = Some(GridConfig {
3172                            rows: rows as u8,
3173                            columns: columns as u8,
3174                            output_width: grid.width,
3175                            output_height: grid.height,
3176                        });
3177                    }
3178                } else {
3179                    log::warn!("Grid: dimension mismatch - grid {}×{} not evenly divisible by tile {}×{}, using fallback",
3180                              grid.width, grid.height, tile.width, tile.height);
3181                }
3182            }
3183
3184            // Fallback: if calculation failed or ispe not available, use N×1 inference
3185            if grid_config.is_none() {
3186                log::debug!("Grid: using fallback {}×1 layout inference", ids.len());
3187                grid_config = Some(GridConfig {
3188                    rows: ids.len() as u8,  // Changed: vertical stack
3189                    columns: 1,              // Changed: single column
3190                    output_width: 0,  // Will be calculated from tiles
3191                    output_height: 0, // Will be calculated from tiles
3192                });
3193            }
3194        }
3195
3196        ids
3197    } else {
3198        TryVec::new()
3199    };
3200
3201    let alpha_item_id = meta
3202        .item_references
3203        .iter()
3204        // Auxiliary image for the primary image
3205        .filter(|iref| {
3206            iref.to_item_id == meta.primary_item_id
3207                && iref.from_item_id != meta.primary_item_id
3208                && iref.item_type == b"auxl"
3209        })
3210        .map(|iref| iref.from_item_id)
3211        // which has the alpha property
3212        .find(|&item_id| {
3213            meta.properties.iter().any(|prop| {
3214                prop.item_id == item_id
3215                    && match &prop.property {
3216                        ItemProperty::AuxiliaryType(urn) => {
3217                            urn.type_subtype().0 == b"urn:mpeg:mpegB:cicp:systems:auxiliary:alpha"
3218                        }
3219                        _ => false,
3220                    }
3221            })
3222        });
3223
3224    // Extract properties for the primary item
3225    macro_rules! find_prop {
3226        ($variant:ident) => {
3227            meta.properties.iter().find_map(|p| {
3228                if p.item_id == meta.primary_item_id {
3229                    match &p.property {
3230                        ItemProperty::$variant(c) => Some(c.clone()),
3231                        _ => None,
3232                    }
3233                } else {
3234                    None
3235                }
3236            })
3237        };
3238    }
3239
3240    let av1_config = find_prop!(AV1Config);
3241    let color_info = find_prop!(ColorInformation);
3242    let rotation = find_prop!(Rotation);
3243    let mirror = find_prop!(Mirror);
3244    let clean_aperture = find_prop!(CleanAperture);
3245    let pixel_aspect_ratio = find_prop!(PixelAspectRatio);
3246    let content_light_level = find_prop!(ContentLightLevel);
3247    let mastering_display = find_prop!(MasteringDisplayColourVolume);
3248    let content_colour_volume = find_prop!(ContentColourVolume);
3249    let ambient_viewing = find_prop!(AmbientViewingEnvironment);
3250    let operating_point = find_prop!(OperatingPointSelector);
3251    let layer_selector = find_prop!(LayerSelector);
3252    let layered_image_indexing = find_prop!(AV1LayeredImageIndexing);
3253
3254    let mut context = AvifData {
3255        premultiplied_alpha: alpha_item_id.is_some_and(|alpha_item_id| {
3256            meta.item_references.iter().any(|iref| {
3257                iref.from_item_id == meta.primary_item_id
3258                    && iref.to_item_id == alpha_item_id
3259                    && iref.item_type == b"prem"
3260            })
3261        }),
3262        av1_config,
3263        color_info,
3264        rotation,
3265        mirror,
3266        clean_aperture,
3267        pixel_aspect_ratio,
3268        content_light_level,
3269        mastering_display,
3270        content_colour_volume,
3271        ambient_viewing,
3272        operating_point,
3273        layer_selector,
3274        layered_image_indexing,
3275        major_brand,
3276        compatible_brands,
3277        ..Default::default()
3278    };
3279
3280    // Helper to extract item data from either mdat or idat
3281    let mut extract_item_data = |loc: &ItemLocationBoxItem, buf: &mut TryVec<u8>| -> Result<()> {
3282        match loc.construction_method {
3283            ConstructionMethod::File => {
3284                for extent in loc.extents.iter() {
3285                    let mut found = false;
3286                    for mdat in mdats.iter_mut() {
3287                        if mdat.matches_extent(&extent.extent_range) {
3288                            buf.append(&mut mdat.data)?;
3289                            found = true;
3290                            break;
3291                        } else if mdat.contains_extent(&extent.extent_range) {
3292                            mdat.read_extent(&extent.extent_range, buf)?;
3293                            found = true;
3294                            break;
3295                        }
3296                    }
3297                    if !found {
3298                        return Err(Error::InvalidData("iloc contains an extent that is not in mdat"));
3299                    }
3300                }
3301                Ok(())
3302            },
3303            ConstructionMethod::Idat => {
3304                let idat_data = meta.idat.as_ref().ok_or(Error::InvalidData("idat box missing but construction_method is Idat"))?;
3305                for extent in loc.extents.iter() {
3306                    match &extent.extent_range {
3307                        ExtentRange::WithLength(range) => {
3308                            let start = usize::try_from(range.start).map_err(|_| Error::InvalidData("extent start too large"))?;
3309                            let end = usize::try_from(range.end).map_err(|_| Error::InvalidData("extent end too large"))?;
3310                            if end > idat_data.len() {
3311                                return Err(Error::InvalidData("extent exceeds idat size"));
3312                            }
3313                            buf.extend_from_slice(&idat_data[start..end]).map_err(|_| Error::OutOfMemory)?;
3314                        },
3315                        ExtentRange::ToEnd(range) => {
3316                            let start = usize::try_from(range.start).map_err(|_| Error::InvalidData("extent start too large"))?;
3317                            if start >= idat_data.len() {
3318                                return Err(Error::InvalidData("extent start exceeds idat size"));
3319                            }
3320                            buf.extend_from_slice(&idat_data[start..]).map_err(|_| Error::OutOfMemory)?;
3321                        },
3322                    }
3323                }
3324                Ok(())
3325            },
3326            ConstructionMethod::Item => {
3327                Err(Error::Unsupported("construction_method 'item' not supported"))
3328            },
3329        }
3330    };
3331
3332    // load data of relevant items
3333    // For grid images, we need to load tiles in the order specified by iref
3334    if is_grid {
3335        // Extract each tile in order
3336        for (idx, &tile_id) in tile_item_ids.iter().enumerate() {
3337            if idx % 16 == 0 {
3338                stop.check()?;
3339            }
3340
3341            let mut tile_data = TryVec::new();
3342
3343            if let Some(loc) = meta.iloc_items.iter().find(|loc| loc.item_id == tile_id) {
3344                extract_item_data(loc, &mut tile_data)?;
3345            } else {
3346                return Err(Error::InvalidData("grid tile not found in iloc"));
3347            }
3348
3349            context.grid_tiles.push(tile_data)?;
3350        }
3351
3352        // Set grid_config in context
3353        context.grid_config = grid_config;
3354    } else {
3355        // Standard single-frame AVIF: load primary_item and optional alpha_item
3356        for loc in meta.iloc_items.iter() {
3357            let item_data = if loc.item_id == meta.primary_item_id {
3358                &mut context.primary_item
3359            } else if Some(loc.item_id) == alpha_item_id {
3360                context.alpha_item.get_or_insert_with(TryVec::new)
3361            } else {
3362                continue;
3363            };
3364
3365            extract_item_data(loc, item_data)?;
3366        }
3367    }
3368
3369    // Extract EXIF and XMP items linked via cdsc references to the primary item
3370    for iref in meta.item_references.iter() {
3371        if iref.to_item_id != meta.primary_item_id || iref.item_type != b"cdsc" {
3372            continue;
3373        }
3374        let desc_item_id = iref.from_item_id;
3375        let Some(info) = meta.item_infos.iter().find(|i| i.item_id == desc_item_id) else {
3376            continue;
3377        };
3378        if info.item_type == b"Exif" {
3379            if let Some(loc) = meta.iloc_items.iter().find(|l| l.item_id == desc_item_id) {
3380                let mut raw = TryVec::new();
3381                extract_item_data(loc, &mut raw)?;
3382                // AVIF EXIF items start with a 4-byte big-endian offset to the TIFF header
3383                if raw.len() > 4 {
3384                    let offset = u32::from_be_bytes([raw[0], raw[1], raw[2], raw[3]]) as usize;
3385                    let start = 4 + offset;
3386                    if start < raw.len() {
3387                        let mut exif = TryVec::new();
3388                        exif.extend_from_slice(&raw[start..])?;
3389                        context.exif = Some(exif);
3390                    }
3391                }
3392            }
3393        } else if info.item_type == b"mime"
3394            && let Some(loc) = meta.iloc_items.iter().find(|l| l.item_id == desc_item_id)
3395        {
3396            let mut xmp = TryVec::new();
3397            extract_item_data(loc, &mut xmp)?;
3398            context.xmp = Some(xmp);
3399        }
3400    }
3401
3402    // Extract gain map (tmap derived image item)
3403    if let Some(tmap_info) = meta.item_infos.iter().find(|info| info.item_type == b"tmap") {
3404        let tmap_id = tmap_info.item_id;
3405
3406        let mut inputs: TryVec<(u32, u16)> = TryVec::new();
3407        for iref in meta.item_references.iter() {
3408            if iref.from_item_id == tmap_id && iref.item_type == b"dimg" {
3409                inputs.push((iref.to_item_id, iref.reference_index))?;
3410            }
3411        }
3412        inputs.sort_by_key(|&(_, idx)| idx);
3413
3414        if inputs.len() >= 2 && inputs[0].0 == meta.primary_item_id {
3415            let gmap_item_id = inputs[1].0;
3416
3417            // Read tmap item payload
3418            if let Some(loc) = meta.iloc_items.iter().find(|l| l.item_id == tmap_id) {
3419                let mut tmap_data = TryVec::new();
3420                extract_item_data(loc, &mut tmap_data)?;
3421                if let Ok(metadata) = parse_tone_map_image(&tmap_data) {
3422                    context.gain_map_metadata = Some(metadata);
3423                }
3424            }
3425
3426            // Read gain map image data
3427            if let Some(loc) = meta.iloc_items.iter().find(|l| l.item_id == gmap_item_id) {
3428                let mut gmap_data = TryVec::new();
3429                extract_item_data(loc, &mut gmap_data)?;
3430                context.gain_map_item = Some(gmap_data);
3431            }
3432
3433            // Get alternate color info from tmap item's properties
3434            context.gain_map_color_info = meta.properties.iter().find_map(|p| {
3435                if p.item_id == tmap_id {
3436                    match &p.property {
3437                        ItemProperty::ColorInformation(c) => Some(c.clone()),
3438                        _ => None,
3439                    }
3440                } else {
3441                    None
3442                }
3443            });
3444        }
3445    }
3446
3447    // Extract depth auxiliary image
3448    {
3449        let depth_item_id = meta
3450            .item_references
3451            .iter()
3452            .filter(|iref| {
3453                iref.to_item_id == meta.primary_item_id
3454                    && iref.from_item_id != meta.primary_item_id
3455                    && iref.item_type == b"auxl"
3456            })
3457            .map(|iref| iref.from_item_id)
3458            .find(|&item_id| {
3459                if alpha_item_id == Some(item_id) {
3460                    return false;
3461                }
3462                meta.properties.iter().any(|prop| {
3463                    prop.item_id == item_id
3464                        && match &prop.property {
3465                            ItemProperty::AuxiliaryType(urn) => {
3466                                is_depth_auxiliary_urn(urn.type_subtype().0)
3467                            }
3468                            _ => false,
3469                        }
3470                })
3471            });
3472
3473        if let Some(depth_id) = depth_item_id {
3474            if let Some(loc) = meta.iloc_items.iter().find(|l| l.item_id == depth_id) {
3475                let mut depth_data = TryVec::new();
3476                extract_item_data(loc, &mut depth_data)?;
3477                context.depth_item = Some(depth_data);
3478            }
3479            // Get dimensions from ispe
3480            if let Some((w, h)) = meta.properties.iter().find_map(|p| {
3481                if p.item_id == depth_id {
3482                    match &p.property {
3483                        ItemProperty::ImageSpatialExtents(e) => Some((e.width, e.height)),
3484                        _ => None,
3485                    }
3486                } else {
3487                    None
3488                }
3489            }) {
3490                context.depth_width = w;
3491                context.depth_height = h;
3492            }
3493            // Get av1C
3494            context.depth_av1_config = meta.properties.iter().find_map(|p| {
3495                if p.item_id == depth_id {
3496                    match &p.property {
3497                        ItemProperty::AV1Config(c) => Some(c.clone()),
3498                        _ => None,
3499                    }
3500                } else {
3501                    None
3502                }
3503            });
3504            // Get colr
3505            context.depth_color_info = meta.properties.iter().find_map(|p| {
3506                if p.item_id == depth_id {
3507                    match &p.property {
3508                        ItemProperty::ColorInformation(c) => Some(c.clone()),
3509                        _ => None,
3510                    }
3511                } else {
3512                    None
3513                }
3514            });
3515        }
3516    }
3517
3518    // Extract animation frames if this is an animated AVIF
3519    if let Some(anim) = animation_data {
3520        let frame_count = anim.color_sample_table.sample_sizes.len() as u32;
3521        tracker.validate_animation_frames(frame_count)?;
3522
3523        log::debug!("Animation: extracting frames (media_timescale={})", anim.color_timescale);
3524        match extract_animation_frames(&anim.color_sample_table, anim.color_timescale, &mut mdats) {
3525            Ok(frames) => {
3526                if !frames.is_empty() {
3527                    log::debug!("Animation: extracted {} frames", frames.len());
3528                    context.animation = Some(AnimationConfig {
3529                        loop_count: anim.loop_count,
3530                        frames,
3531                    });
3532                }
3533            }
3534            Err(e) => {
3535                log::warn!("Animation: failed to extract frames: {}", e);
3536            }
3537        }
3538    }
3539
3540    Ok(context)
3541}
3542
3543/// Read the contents of an AVIF file with custom parsing options
3544///
3545/// Uses unlimited resource limits for backwards compatibility.
3546///
3547/// # Arguments
3548///
3549/// * `f` - Reader for the AVIF file
3550/// * `options` - Parsing options (e.g., lenient mode)
3551#[cfg(feature = "eager")]
3552#[deprecated(since = "1.5.0", note = "Use `AvifParser::from_reader_with_config()` with `DecodeConfig::lenient()` instead")]
3553#[allow(deprecated)]
3554pub fn read_avif_with_options<T: Read>(f: &mut T, options: &ParseOptions) -> Result<AvifData> {
3555    let config = DecodeConfig::unlimited().lenient(options.lenient);
3556    read_avif_with_config(f, &config, &Unstoppable)
3557}
3558
3559/// Read the contents of an AVIF file
3560///
3561/// Metadata is accumulated and returned in [`AvifData`] struct.
3562/// Uses strict validation and unlimited resource limits by default.
3563///
3564/// For resource limits, use [`read_avif_with_config`].
3565/// For lenient parsing, use [`read_avif_with_options`].
3566#[cfg(feature = "eager")]
3567#[deprecated(since = "1.5.0", note = "Use `AvifParser::from_reader()` instead")]
3568#[allow(deprecated)]
3569pub fn read_avif<T: Read>(f: &mut T) -> Result<AvifData> {
3570    read_avif_with_options(f, &ParseOptions::default())
3571}
3572
3573/// An entity group from a GroupsListBox (`grpl`).
3574///
3575/// See ISO 14496-12:2024 § 8.15.3.
3576#[allow(dead_code)] // Parsed for future altr group support
3577struct EntityGroup {
3578    group_type: FourCC,
3579    group_id: u32,
3580    entity_ids: TryVec<u32>,
3581}
3582
3583/// Parse a GroupsListBox (`grpl`).
3584///
3585/// Each child box is an EntityToGroupBox with a grouping type given by its box type.
3586/// See ISO 14496-12:2024 § 8.15.3.
3587fn read_grpl<T: Read + Offset>(src: &mut BMFFBox<'_, T>) -> Result<TryVec<EntityGroup>> {
3588    let mut groups = TryVec::new();
3589    let mut iter = src.box_iter();
3590    while let Some(mut b) = iter.next_box()? {
3591        let group_type = FourCC::from(u32::from(b.head.name));
3592        // Read version and flags (not validated per spec flexibility)
3593        let _version = b.read_u8()?;
3594        let mut flags_buf = [0u8; 3];
3595        b.read_exact(&mut flags_buf)?;
3596
3597        let group_id = be_u32(&mut b)?;
3598        let num_entities = be_u32(&mut b)?;
3599        // Each entity_id is 4 bytes
3600        if (num_entities as u64) * 4 > b.bytes_left() {
3601            return Err(Error::InvalidData(
3602                "grpl num_entities exceeds remaining box bytes",
3603            ));
3604        }
3605
3606        let mut entity_ids = TryVec::new();
3607        for _ in 0..num_entities {
3608            entity_ids.push(be_u32(&mut b)?)?;
3609        }
3610
3611        groups.push(EntityGroup {
3612            group_type,
3613            group_id,
3614            entity_ids,
3615        })?;
3616
3617        skip_box_remain(&mut b)?;
3618        check_parser_state(&b.head, &b.content)?;
3619    }
3620    Ok(groups)
3621}
3622
3623/// Parse a ToneMapImage (`tmap`) item payload into gain map metadata.
3624///
3625/// See ISO 21496-1:2025 for the payload format.
3626fn parse_tone_map_image(data: &[u8]) -> Result<GainMapMetadata> {
3627    let mut cursor = std::io::Cursor::new(data);
3628
3629    // version (u8) — must be 0
3630    let version = cursor.read_u8()?;
3631    if version != 0 {
3632        return Err(Error::Unsupported("tmap version"));
3633    }
3634
3635    // minimum_version (u16 BE) — must be 0
3636    let minimum_version = be_u16(&mut cursor)?;
3637    if minimum_version > 0 {
3638        return Err(Error::Unsupported("tmap minimum version"));
3639    }
3640
3641    // writer_version (u16 BE) — informational, must be >= minimum_version
3642    let writer_version = be_u16(&mut cursor)?;
3643    if writer_version < minimum_version {
3644        return Err(Error::InvalidData("tmap writer_version < minimum_version"));
3645    }
3646
3647    // Flags byte: is_multichannel (bit 7), use_base_colour_space (bit 6), reserved (bits 0-5)
3648    let flags = cursor.read_u8()?;
3649    let is_multichannel = (flags & 0x80) != 0;
3650    let use_base_colour_space = (flags & 0x40) != 0;
3651
3652    // base_hdr_headroom and alternate_hdr_headroom
3653    let base_hdr_headroom_n = be_u32(&mut cursor)?;
3654    let base_hdr_headroom_d = be_u32(&mut cursor)?;
3655    let alternate_hdr_headroom_n = be_u32(&mut cursor)?;
3656    let alternate_hdr_headroom_d = be_u32(&mut cursor)?;
3657
3658    let channel_count = if is_multichannel { 3 } else { 1 };
3659    let mut channels = [GainMapChannel {
3660        gain_map_min_n: 0, gain_map_min_d: 0,
3661        gain_map_max_n: 0, gain_map_max_d: 0,
3662        gamma_n: 0, gamma_d: 0,
3663        base_offset_n: 0, base_offset_d: 0,
3664        alternate_offset_n: 0, alternate_offset_d: 0,
3665    }; 3];
3666
3667    for ch in channels.iter_mut().take(channel_count) {
3668        ch.gain_map_min_n = be_i32(&mut cursor)?;
3669        ch.gain_map_min_d = be_u32(&mut cursor)?;
3670        ch.gain_map_max_n = be_i32(&mut cursor)?;
3671        ch.gain_map_max_d = be_u32(&mut cursor)?;
3672        ch.gamma_n = be_u32(&mut cursor)?;
3673        ch.gamma_d = be_u32(&mut cursor)?;
3674        ch.base_offset_n = be_i32(&mut cursor)?;
3675        ch.base_offset_d = be_u32(&mut cursor)?;
3676        ch.alternate_offset_n = be_i32(&mut cursor)?;
3677        ch.alternate_offset_d = be_u32(&mut cursor)?;
3678    }
3679
3680    // Copy channel 0 to channels 1 and 2 if single-channel
3681    if !is_multichannel {
3682        channels[1] = channels[0];
3683        channels[2] = channels[0];
3684    }
3685
3686    Ok(GainMapMetadata {
3687        is_multichannel,
3688        use_base_colour_space,
3689        base_hdr_headroom_n,
3690        base_hdr_headroom_d,
3691        alternate_hdr_headroom_n,
3692        alternate_hdr_headroom_d,
3693        channels,
3694    })
3695}
3696
3697/// Parse a metadata box in the context of an AVIF
3698/// Currently requires the primary item to be an av01 item type and generates
3699/// an error otherwise.
3700/// See ISO 14496-12:2015 § 8.11.1
3701fn read_avif_meta<T: Read + Offset>(src: &mut BMFFBox<'_, T>, options: &ParseOptions) -> Result<AvifInternalMeta> {
3702    let version = read_fullbox_version_no_flags(src, options)?;
3703
3704    if version != 0 {
3705        return Err(Error::Unsupported("unsupported meta version"));
3706    }
3707
3708    let mut primary_item_id = None;
3709    let mut item_infos = None;
3710    let mut iloc_items = None;
3711    let mut item_references = TryVec::new();
3712    let mut properties = TryVec::new();
3713    let mut idat = None;
3714    let mut entity_groups = TryVec::new();
3715
3716    let mut iter = src.box_iter();
3717    while let Some(mut b) = iter.next_box()? {
3718        match b.head.name {
3719            BoxType::ItemInfoBox => {
3720                if item_infos.is_some() {
3721                    return Err(Error::InvalidData("There should be zero or one iinf boxes per ISO 14496-12:2015 § 8.11.6.1"));
3722                }
3723                item_infos = Some(read_iinf(&mut b, options)?);
3724            },
3725            BoxType::ItemLocationBox => {
3726                if iloc_items.is_some() {
3727                    return Err(Error::InvalidData("There should be zero or one iloc boxes per ISO 14496-12:2015 § 8.11.3.1"));
3728                }
3729                iloc_items = Some(read_iloc(&mut b, options)?);
3730            },
3731            BoxType::PrimaryItemBox => {
3732                if primary_item_id.is_some() {
3733                    return Err(Error::InvalidData("There should be zero or one iloc boxes per ISO 14496-12:2015 § 8.11.4.1"));
3734                }
3735                primary_item_id = Some(read_pitm(&mut b, options)?);
3736            },
3737            BoxType::ImageReferenceBox => {
3738                item_references.append(&mut read_iref(&mut b, options)?)?;
3739            },
3740            BoxType::ImagePropertiesBox => {
3741                properties = read_iprp(&mut b, options)?;
3742            },
3743            BoxType::ItemDataBox => {
3744                if idat.is_some() {
3745                    return Err(Error::InvalidData("There should be zero or one idat boxes"));
3746                }
3747                idat = Some(b.read_into_try_vec()?);
3748            },
3749            BoxType::GroupsListBox => {
3750                entity_groups.append(&mut read_grpl(&mut b)?)?;
3751            },
3752            BoxType::HandlerBox => {
3753                let hdlr = read_hdlr(&mut b)?;
3754                if hdlr.handler_type != b"pict" {
3755                    warn!("hdlr handler_type: {}", hdlr.handler_type);
3756                    return Err(Error::InvalidData("meta handler_type must be 'pict' for AVIF"));
3757                }
3758            },
3759            _ => skip_box_content(&mut b)?,
3760        }
3761
3762        check_parser_state(&b.head, &b.content)?;
3763    }
3764
3765    let primary_item_id = primary_item_id.ok_or(Error::InvalidData("Required pitm box not present in meta box"))?;
3766
3767    let item_infos = item_infos.ok_or(Error::InvalidData("iinf missing"))?;
3768
3769    if let Some(item_info) = item_infos.iter().find(|x| x.item_id == primary_item_id) {
3770        // Allow both "av01" (standard single-frame) and "grid" (tiled) types
3771        if item_info.item_type != b"av01" && item_info.item_type != b"grid" {
3772            warn!("primary_item_id type: {}", item_info.item_type);
3773            return Err(Error::InvalidData("primary_item_id type is not av01 or grid"));
3774        }
3775    } else {
3776        return Err(Error::InvalidData("primary_item_id not present in iinf box"));
3777    }
3778
3779    Ok(AvifInternalMeta {
3780        properties,
3781        item_references,
3782        primary_item_id,
3783        iloc_items: iloc_items.ok_or(Error::InvalidData("iloc missing"))?,
3784        item_infos,
3785        idat,
3786        entity_groups,
3787    })
3788}
3789
3790/// Parse a Handler Reference Box
3791/// See ISO 14496-12:2015 § 8.4.3
3792fn read_hdlr<T: Read>(src: &mut BMFFBox<'_, T>) -> Result<HandlerBox> {
3793    let (_version, _flags) = read_fullbox_extra(src)?;
3794    // pre_defined (4 bytes)
3795    skip(src, 4)?;
3796    // handler_type (4 bytes)
3797    let handler_type = be_u32(src)?;
3798    // reserved (3 × 4 bytes) + name (variable) — skip the rest
3799    skip_box_remain(src)?;
3800    Ok(HandlerBox {
3801        handler_type: FourCC::from(handler_type),
3802    })
3803}
3804
3805/// Parse a Primary Item Box
3806/// See ISO 14496-12:2015 § 8.11.4
3807fn read_pitm<T: Read>(src: &mut BMFFBox<'_, T>, options: &ParseOptions) -> Result<u32> {
3808    let version = read_fullbox_version_no_flags(src, options)?;
3809
3810    let item_id = match version {
3811        0 => be_u16(src)?.into(),
3812        1 => be_u32(src)?,
3813        _ => return Err(Error::Unsupported("unsupported pitm version")),
3814    };
3815
3816    Ok(item_id)
3817}
3818
3819/// Parse an Item Information Box
3820/// See ISO 14496-12:2015 § 8.11.6
3821fn read_iinf<T: Read>(src: &mut BMFFBox<'_, T>, options: &ParseOptions) -> Result<TryVec<ItemInfoEntry>> {
3822    let version = read_fullbox_version_no_flags(src, options)?;
3823
3824    match version {
3825        0 | 1 => (),
3826        _ => return Err(Error::Unsupported("unsupported iinf version")),
3827    }
3828
3829    let entry_count = if version == 0 {
3830        be_u16(src)?.to_usize()
3831    } else {
3832        be_u32(src)?.to_usize()
3833    };
3834    // Cap pre-allocation: entry_count is untrusted, actual items come from box_iter
3835    let mut item_infos = TryVec::with_capacity(entry_count.min(4096))?;
3836
3837    let mut iter = src.box_iter();
3838    while let Some(mut b) = iter.next_box()? {
3839        if b.head.name != BoxType::ItemInfoEntry {
3840            return Err(Error::InvalidData("iinf box should contain only infe boxes"));
3841        }
3842
3843        item_infos.push(read_infe(&mut b)?)?;
3844
3845        check_parser_state(&b.head, &b.content)?;
3846    }
3847
3848    Ok(item_infos)
3849}
3850
3851/// Parse an Item Info Entry
3852/// See ISO 14496-12:2015 § 8.11.6.2
3853fn read_infe<T: Read>(src: &mut BMFFBox<'_, T>) -> Result<ItemInfoEntry> {
3854    // According to the standard, it seems the flags field should be 0, but
3855    // at least one sample AVIF image has a nonzero value.
3856    let (version, _) = read_fullbox_extra(src)?;
3857
3858    // mif1 brand (see ISO 23008-12:2017 § 10.2.1) only requires v2 and 3
3859    let item_id = match version {
3860        2 => be_u16(src)?.into(),
3861        3 => be_u32(src)?,
3862        _ => return Err(Error::Unsupported("unsupported version in 'infe' box")),
3863    };
3864
3865    let item_protection_index = be_u16(src)?;
3866
3867    if item_protection_index != 0 {
3868        return Err(Error::Unsupported("protected items (infe.item_protection_index != 0) are not supported"));
3869    }
3870
3871    let item_type = FourCC::from(be_u32(src)?);
3872    debug!("infe item_id {item_id} item_type: {item_type}");
3873
3874    // There are some additional fields here, but they're not of interest to us
3875    skip_box_remain(src)?;
3876
3877    Ok(ItemInfoEntry { item_id, item_type })
3878}
3879
3880fn read_iref<T: Read>(src: &mut BMFFBox<'_, T>, options: &ParseOptions) -> Result<TryVec<SingleItemTypeReferenceBox>> {
3881    let mut item_references = TryVec::new();
3882    let version = read_fullbox_version_no_flags(src, options)?;
3883    if version > 1 {
3884        return Err(Error::Unsupported("iref version"));
3885    }
3886
3887    let mut iter = src.box_iter();
3888    while let Some(mut b) = iter.next_box()? {
3889        let from_item_id = if version == 0 {
3890            be_u16(&mut b)?.into()
3891        } else {
3892            be_u32(&mut b)?
3893        };
3894        let reference_count = be_u16(&mut b)?;
3895        // Each to_item_id is 2 bytes (version 0) or 4 bytes (version 1)
3896        let bytes_per_ref: u64 = if version == 0 { 2 } else { 4 };
3897        if (reference_count as u64) * bytes_per_ref > b.bytes_left() {
3898            return Err(Error::InvalidData(
3899                "iref reference_count exceeds remaining box bytes",
3900            ));
3901        }
3902        for reference_index in 0..reference_count {
3903            let to_item_id = if version == 0 {
3904                be_u16(&mut b)?.into()
3905            } else {
3906                be_u32(&mut b)?
3907            };
3908            if from_item_id == to_item_id {
3909                return Err(Error::InvalidData("from_item_id and to_item_id must be different"));
3910            }
3911            item_references.push(SingleItemTypeReferenceBox {
3912                item_type: b.head.name.into(),
3913                from_item_id,
3914                to_item_id,
3915                reference_index,
3916            })?;
3917        }
3918        check_parser_state(&b.head, &b.content)?;
3919    }
3920    Ok(item_references)
3921}
3922
3923/// Properties that MUST be marked essential when associated with an item.
3924/// See AVIF § 2.3.2.1.1 (a1op), HEIF § 6.5.11.1 (lsel), MIAF § 7.3.9 (clap, irot, imir).
3925const MUST_BE_ESSENTIAL: &[&[u8; 4]] = &[b"a1op", b"lsel", b"clap", b"irot", b"imir"];
3926
3927/// Properties that MUST NOT be marked essential when associated with an item.
3928/// See AVIF § 2.3.2.3.2 (a1lx).
3929const MUST_NOT_BE_ESSENTIAL: &[&[u8; 4]] = &[b"a1lx"];
3930
3931fn read_iprp<T: Read>(src: &mut BMFFBox<'_, T>, options: &ParseOptions) -> Result<TryVec<AssociatedProperty>> {
3932    let mut iter = src.box_iter();
3933    let mut properties = TryVec::new();
3934    let mut associations = TryVec::new();
3935
3936    while let Some(mut b) = iter.next_box()? {
3937        match b.head.name {
3938            BoxType::ItemPropertyContainerBox => {
3939                properties = read_ipco(&mut b, options)?;
3940            },
3941            BoxType::ItemPropertyAssociationBox => {
3942                associations = read_ipma(&mut b)?;
3943            },
3944            _ => return Err(Error::InvalidData("unexpected ipco child")),
3945        }
3946    }
3947
3948    let mut associated = TryVec::new();
3949    for a in associations {
3950        let index = match a.property_index {
3951            0 => {
3952                // property_index 0 means no association; essential must also be 0
3953                if a.essential {
3954                    return Err(Error::InvalidData(
3955                        "ipma property_index 0 must not be marked essential",
3956                    ));
3957                }
3958                continue;
3959            }
3960            x => x as usize - 1,
3961        };
3962
3963        let Some(entry) = properties.get(index) else {
3964            continue;
3965        };
3966
3967        let is_supported = entry.property != ItemProperty::Unsupported;
3968        let fourcc_bytes = &entry.fourcc.value;
3969
3970        if is_supported {
3971            // Validate essential flag for known property types
3972            if a.essential && MUST_NOT_BE_ESSENTIAL.contains(&fourcc_bytes) {
3973                warn!("item {} has {} marked essential (spec forbids it)", a.item_id, entry.fourcc);
3974                if !options.lenient {
3975                    return Err(Error::InvalidData(
3976                        "property must not be marked essential",
3977                    ));
3978                }
3979            }
3980            if !a.essential && MUST_BE_ESSENTIAL.contains(&fourcc_bytes) {
3981                warn!("item {} has {} not marked essential (spec requires it)", a.item_id, entry.fourcc);
3982                if !options.lenient {
3983                    return Err(Error::InvalidData(
3984                        "property must be marked essential",
3985                    ));
3986                }
3987            }
3988
3989            associated.push(AssociatedProperty {
3990                item_id: a.item_id,
3991                property: entry.property.try_clone()?,
3992            })?;
3993        } else if a.essential {
3994            // Unknown property marked essential — this item cannot be correctly processed
3995            warn!(
3996                "item {} has unsupported property {} marked essential; item will be unusable",
3997                a.item_id, entry.fourcc
3998            );
3999            if !options.lenient {
4000                return Err(Error::Unsupported(
4001                    "unsupported property marked as essential",
4002                ));
4003            }
4004        }
4005        // Unknown non-essential properties are silently skipped (they're optional)
4006    }
4007    Ok(associated)
4008}
4009
4010/// Image spatial extents (dimensions)
4011#[derive(Debug, Clone, Copy, PartialEq, Eq)]
4012pub(crate) struct ImageSpatialExtents {
4013    pub(crate) width: u32,
4014    pub(crate) height: u32,
4015}
4016
4017#[derive(Debug, PartialEq)]
4018pub(crate) enum ItemProperty {
4019    Channels(ArrayVec<u8, 16>),
4020    AuxiliaryType(AuxiliaryTypeProperty),
4021    ImageSpatialExtents(ImageSpatialExtents),
4022    ImageGrid(GridConfig),
4023    AV1Config(AV1Config),
4024    ColorInformation(ColorInformation),
4025    Rotation(ImageRotation),
4026    Mirror(ImageMirror),
4027    CleanAperture(CleanAperture),
4028    PixelAspectRatio(PixelAspectRatio),
4029    ContentLightLevel(ContentLightLevel),
4030    MasteringDisplayColourVolume(MasteringDisplayColourVolume),
4031    ContentColourVolume(ContentColourVolume),
4032    AmbientViewingEnvironment(AmbientViewingEnvironment),
4033    OperatingPointSelector(OperatingPointSelector),
4034    LayerSelector(LayerSelector),
4035    AV1LayeredImageIndexing(AV1LayeredImageIndexing),
4036    Unsupported,
4037}
4038
4039impl TryClone for ItemProperty {
4040    fn try_clone(&self) -> Result<Self, TryReserveError> {
4041        Ok(match self {
4042            Self::Channels(val) => Self::Channels(val.clone()),
4043            Self::AuxiliaryType(val) => Self::AuxiliaryType(val.try_clone()?),
4044            Self::ImageSpatialExtents(val) => Self::ImageSpatialExtents(*val),
4045            Self::ImageGrid(val) => Self::ImageGrid(val.clone()),
4046            Self::AV1Config(val) => Self::AV1Config(val.clone()),
4047            Self::ColorInformation(val) => Self::ColorInformation(val.clone()),
4048            Self::Rotation(val) => Self::Rotation(*val),
4049            Self::Mirror(val) => Self::Mirror(*val),
4050            Self::CleanAperture(val) => Self::CleanAperture(*val),
4051            Self::PixelAspectRatio(val) => Self::PixelAspectRatio(*val),
4052            Self::ContentLightLevel(val) => Self::ContentLightLevel(*val),
4053            Self::MasteringDisplayColourVolume(val) => Self::MasteringDisplayColourVolume(*val),
4054            Self::ContentColourVolume(val) => Self::ContentColourVolume(*val),
4055            Self::AmbientViewingEnvironment(val) => Self::AmbientViewingEnvironment(*val),
4056            Self::OperatingPointSelector(val) => Self::OperatingPointSelector(*val),
4057            Self::LayerSelector(val) => Self::LayerSelector(*val),
4058            Self::AV1LayeredImageIndexing(val) => Self::AV1LayeredImageIndexing(*val),
4059            Self::Unsupported => Self::Unsupported,
4060        })
4061    }
4062}
4063
4064struct Association {
4065    item_id: u32,
4066    essential: bool,
4067    property_index: u16,
4068}
4069
4070pub(crate) struct AssociatedProperty {
4071    pub item_id: u32,
4072    pub property: ItemProperty,
4073}
4074
4075fn read_ipma<T: Read>(src: &mut BMFFBox<'_, T>) -> Result<TryVec<Association>> {
4076    let (version, flags) = read_fullbox_extra(src)?;
4077
4078    let mut associations = TryVec::new();
4079
4080    let entry_count = be_u32(src)?;
4081    // Each entry has at minimum: item_id (2 or 4 bytes) + association_count (1 byte)
4082    let min_bytes_per_entry: u64 = if version == 0 { 3 } else { 5 };
4083    if (entry_count as u64) * min_bytes_per_entry > src.bytes_left() {
4084        return Err(Error::InvalidData(
4085            "ipma entry_count exceeds remaining box bytes",
4086        ));
4087    }
4088    for _ in 0..entry_count {
4089        let item_id = if version == 0 {
4090            be_u16(src)?.into()
4091        } else {
4092            be_u32(src)?
4093        };
4094        let association_count = src.read_u8()?;
4095        for _ in 0..association_count {
4096            let num_association_bytes = if flags & 1 == 1 { 2 } else { 1 };
4097            let association = &mut [0; 2][..num_association_bytes];
4098            src.read_exact(association)?;
4099            let mut association = BitReader::new(association);
4100            let essential = association.read_bool()?;
4101            let property_index = association.read_u16(association.remaining().try_into()?)?;
4102            associations.push(Association {
4103                item_id,
4104                essential,
4105                property_index,
4106            })?;
4107        }
4108    }
4109    Ok(associations)
4110}
4111
4112/// A parsed property with its box FourCC, for essential flag validation.
4113struct IndexedProperty {
4114    fourcc: FourCC,
4115    property: ItemProperty,
4116}
4117
4118fn read_ipco<T: Read>(src: &mut BMFFBox<'_, T>, options: &ParseOptions) -> Result<TryVec<IndexedProperty>> {
4119    let mut properties = TryVec::new();
4120
4121    let mut iter = src.box_iter();
4122    while let Some(mut b) = iter.next_box()? {
4123        let fourcc: FourCC = b.head.name.into();
4124        // Must push for every property to have correct index for them
4125        let prop = match b.head.name {
4126            BoxType::PixelInformationBox => ItemProperty::Channels(read_pixi(&mut b, options)?),
4127            BoxType::AuxiliaryTypeProperty => ItemProperty::AuxiliaryType(read_auxc(&mut b, options)?),
4128            BoxType::ImageSpatialExtentsBox => ItemProperty::ImageSpatialExtents(read_ispe(&mut b, options)?),
4129            BoxType::ImageGridBox => ItemProperty::ImageGrid(read_grid(&mut b, options)?),
4130            BoxType::AV1CodecConfigurationBox => ItemProperty::AV1Config(read_av1c(&mut b)?),
4131            BoxType::ColorInformationBox => {
4132                match read_colr(&mut b) {
4133                    Ok(colr) => ItemProperty::ColorInformation(colr),
4134                    Err(_) => ItemProperty::Unsupported,
4135                }
4136            },
4137            BoxType::ImageRotationBox => ItemProperty::Rotation(read_irot(&mut b)?),
4138            BoxType::ImageMirrorBox => ItemProperty::Mirror(read_imir(&mut b)?),
4139            BoxType::CleanApertureBox => ItemProperty::CleanAperture(read_clap(&mut b)?),
4140            BoxType::PixelAspectRatioBox => ItemProperty::PixelAspectRatio(read_pasp(&mut b)?),
4141            BoxType::ContentLightLevelBox => ItemProperty::ContentLightLevel(read_clli(&mut b)?),
4142            BoxType::MasteringDisplayColourVolumeBox => ItemProperty::MasteringDisplayColourVolume(read_mdcv(&mut b)?),
4143            BoxType::ContentColourVolumeBox => ItemProperty::ContentColourVolume(read_cclv(&mut b)?),
4144            BoxType::AmbientViewingEnvironmentBox => ItemProperty::AmbientViewingEnvironment(read_amve(&mut b)?),
4145            BoxType::OperatingPointSelectorBox => ItemProperty::OperatingPointSelector(read_a1op(&mut b)?),
4146            BoxType::LayerSelectorBox => ItemProperty::LayerSelector(read_lsel(&mut b)?),
4147            BoxType::AV1LayeredImageIndexingBox => ItemProperty::AV1LayeredImageIndexing(read_a1lx(&mut b)?),
4148            _ => {
4149                skip_box_remain(&mut b)?;
4150                ItemProperty::Unsupported
4151            },
4152        };
4153        properties.push(IndexedProperty { fourcc, property: prop })?;
4154    }
4155    Ok(properties)
4156}
4157
4158fn read_pixi<T: Read>(src: &mut BMFFBox<'_, T>, options: &ParseOptions) -> Result<ArrayVec<u8, 16>> {
4159    let version = read_fullbox_version_no_flags(src, options)?;
4160    if version != 0 {
4161        return Err(Error::Unsupported("pixi version"));
4162    }
4163
4164    let num_channels = usize::from(src.read_u8()?);
4165    let mut channels = ArrayVec::new();
4166    let clamped = num_channels.min(channels.capacity());
4167    channels.extend((0..clamped).map(|_| 0));
4168    src.read_exact(&mut channels).map_err(|_| Error::InvalidData("invalid num_channels"))?;
4169
4170    // In lenient mode, skip any extra bytes (e.g., extended_pixi.avif has 6 extra bytes)
4171    if options.lenient && src.bytes_left() > 0 {
4172        skip(src, src.bytes_left())?;
4173    }
4174
4175    check_parser_state(&src.head, &src.content)?;
4176    Ok(channels)
4177}
4178
4179#[derive(Debug, PartialEq)]
4180struct AuxiliaryTypeProperty {
4181    aux_data: TryString,
4182}
4183
4184impl AuxiliaryTypeProperty {
4185    #[must_use]
4186    fn type_subtype(&self) -> (&[u8], &[u8]) {
4187        let split = self.aux_data.iter().position(|&b| b == b'\0')
4188            .map(|pos| self.aux_data.split_at(pos));
4189        if let Some((aux_type, rest)) = split {
4190            (aux_type, &rest[1..])
4191        } else {
4192            (&self.aux_data, &[])
4193        }
4194    }
4195}
4196
4197impl TryClone for AuxiliaryTypeProperty {
4198    fn try_clone(&self) -> Result<Self, TryReserveError> {
4199        Ok(Self {
4200            aux_data: self.aux_data.try_clone()?,
4201        })
4202    }
4203}
4204
4205fn read_auxc<T: Read>(src: &mut BMFFBox<'_, T>, options: &ParseOptions) -> Result<AuxiliaryTypeProperty> {
4206    let version = read_fullbox_version_no_flags(src, options)?;
4207    if version != 0 {
4208        return Err(Error::Unsupported("auxC version"));
4209    }
4210
4211    let aux_data = src.read_into_try_vec()?;
4212
4213    Ok(AuxiliaryTypeProperty { aux_data })
4214}
4215
4216/// Check if an auxiliary type URN identifies a depth auxiliary image.
4217///
4218/// Recognizes two standard URNs:
4219/// - `urn:mpeg:mpegB:cicp:systems:auxiliary:depth` (MPEG-B Part 23 / ISO 23091-2)
4220/// - `urn:mpeg:hevc:2015:auxid:2` (HEVC-style, auxid 2 = depth)
4221fn is_depth_auxiliary_urn(urn: &[u8]) -> bool {
4222    urn == b"urn:mpeg:mpegB:cicp:systems:auxiliary:depth"
4223        || urn == b"urn:mpeg:hevc:2015:auxid:2"
4224}
4225
4226/// Parse an AV1 Codec Configuration property box
4227/// See AV1-ISOBMFF § 2.3
4228fn read_av1c<T: Read>(src: &mut BMFFBox<'_, T>) -> Result<AV1Config> {
4229    // av1C is NOT a FullBox — it has no version/flags
4230    let byte0 = src.read_u8()?;
4231    let marker = byte0 >> 7;
4232    let version = byte0 & 0x7F;
4233
4234    if marker != 1 {
4235        return Err(Error::InvalidData("av1C marker must be 1"));
4236    }
4237    if version != 1 {
4238        return Err(Error::Unsupported("av1C version must be 1"));
4239    }
4240
4241    let byte1 = src.read_u8()?;
4242    let profile = byte1 >> 5;
4243    let level = byte1 & 0x1F;
4244
4245    let byte2 = src.read_u8()?;
4246    let tier = byte2 >> 7;
4247    let high_bitdepth = (byte2 >> 6) & 1;
4248    let twelve_bit = (byte2 >> 5) & 1;
4249    let monochrome = (byte2 >> 4) & 1 != 0;
4250    let chroma_subsampling_x = (byte2 >> 3) & 1;
4251    let chroma_subsampling_y = (byte2 >> 2) & 1;
4252    let chroma_sample_position = byte2 & 0x03;
4253
4254    let byte3 = src.read_u8()?;
4255    // byte3: 3 bits reserved, 1 bit initial_presentation_delay_present, 4 bits delay/reserved
4256    // Not needed for image decoding.
4257    let _ = byte3;
4258
4259    let bit_depth = if high_bitdepth != 0 {
4260        if twelve_bit != 0 { 12 } else { 10 }
4261    } else {
4262        8
4263    };
4264
4265    // Skip any configOBUs (remainder of box)
4266    skip_box_remain(src)?;
4267
4268    Ok(AV1Config {
4269        profile,
4270        level,
4271        tier,
4272        bit_depth,
4273        monochrome,
4274        chroma_subsampling_x,
4275        chroma_subsampling_y,
4276        chroma_sample_position,
4277    })
4278}
4279
4280/// Parse a Colour Information property box
4281/// See ISOBMFF § 12.1.5
4282fn read_colr<T: Read>(src: &mut BMFFBox<'_, T>) -> Result<ColorInformation> {
4283    // colr is NOT a FullBox — no version/flags
4284    let colour_type = be_u32(src)?;
4285
4286    match &colour_type.to_be_bytes() {
4287        b"nclx" => {
4288            let color_primaries = be_u16(src)?;
4289            let transfer_characteristics = be_u16(src)?;
4290            let matrix_coefficients = be_u16(src)?;
4291            let full_range_byte = src.read_u8()?;
4292            let full_range = (full_range_byte >> 7) != 0;
4293            // Skip any remaining bytes
4294            skip_box_remain(src)?;
4295            Ok(ColorInformation::Nclx {
4296                color_primaries,
4297                transfer_characteristics,
4298                matrix_coefficients,
4299                full_range,
4300            })
4301        }
4302        b"rICC" | b"prof" => {
4303            let icc_data = src.read_into_try_vec()?;
4304            Ok(ColorInformation::IccProfile(icc_data.to_vec()))
4305        }
4306        _ => {
4307            skip_box_remain(src)?;
4308            Err(Error::Unsupported("unsupported colr colour_type"))
4309        }
4310    }
4311}
4312
4313/// Parse an Image Rotation property box.
4314/// See ISOBMFF § 12.1.4. NOT a FullBox.
4315fn read_irot<T: Read>(src: &mut BMFFBox<'_, T>) -> Result<ImageRotation> {
4316    let byte = src.read_u8()?;
4317    let angle_code = byte & 0x03;
4318    let angle = match angle_code {
4319        0 => 0,
4320        1 => 90,
4321        2 => 180,
4322        _ => 270, // angle_code & 0x03 can only be 0..=3
4323    };
4324    skip_box_remain(src)?;
4325    Ok(ImageRotation { angle })
4326}
4327
4328/// Parse an Image Mirror property box.
4329/// See ISOBMFF § 12.1.4. NOT a FullBox.
4330fn read_imir<T: Read>(src: &mut BMFFBox<'_, T>) -> Result<ImageMirror> {
4331    let byte = src.read_u8()?;
4332    let axis = byte & 0x01;
4333    skip_box_remain(src)?;
4334    Ok(ImageMirror { axis })
4335}
4336
4337/// Parse a Clean Aperture property box.
4338/// See ISOBMFF § 12.1.4. NOT a FullBox.
4339fn read_clap<T: Read>(src: &mut BMFFBox<'_, T>) -> Result<CleanAperture> {
4340    let width_n = be_u32(src)?;
4341    let width_d = be_u32(src)?;
4342    let height_n = be_u32(src)?;
4343    let height_d = be_u32(src)?;
4344    let horiz_off_n = be_i32(src)?;
4345    let horiz_off_d = be_u32(src)?;
4346    let vert_off_n = be_i32(src)?;
4347    let vert_off_d = be_u32(src)?;
4348    // Validate denominators are non-zero
4349    if width_d == 0 || height_d == 0 || horiz_off_d == 0 || vert_off_d == 0 {
4350        return Err(Error::InvalidData("clap denominator cannot be zero"));
4351    }
4352    skip_box_remain(src)?;
4353    Ok(CleanAperture {
4354        width_n, width_d,
4355        height_n, height_d,
4356        horiz_off_n, horiz_off_d,
4357        vert_off_n, vert_off_d,
4358    })
4359}
4360
4361/// Parse a Pixel Aspect Ratio property box.
4362/// See ISOBMFF § 12.1.4. NOT a FullBox.
4363fn read_pasp<T: Read>(src: &mut BMFFBox<'_, T>) -> Result<PixelAspectRatio> {
4364    let h_spacing = be_u32(src)?;
4365    let v_spacing = be_u32(src)?;
4366    skip_box_remain(src)?;
4367    Ok(PixelAspectRatio { h_spacing, v_spacing })
4368}
4369
4370/// Parse a Content Light Level Info property box.
4371/// See ISOBMFF § 12.1.5 / ITU-T H.274. NOT a FullBox.
4372fn read_clli<T: Read>(src: &mut BMFFBox<'_, T>) -> Result<ContentLightLevel> {
4373    let max_content_light_level = be_u16(src)?;
4374    let max_pic_average_light_level = be_u16(src)?;
4375    skip_box_remain(src)?;
4376    Ok(ContentLightLevel {
4377        max_content_light_level,
4378        max_pic_average_light_level,
4379    })
4380}
4381
4382/// Parse a Mastering Display Colour Volume property box.
4383/// See ISOBMFF § 12.1.5 / SMPTE ST 2086. NOT a FullBox.
4384fn read_mdcv<T: Read>(src: &mut BMFFBox<'_, T>) -> Result<MasteringDisplayColourVolume> {
4385    // 3 primaries, each (x, y) as u16
4386    let primaries = [
4387        (be_u16(src)?, be_u16(src)?),
4388        (be_u16(src)?, be_u16(src)?),
4389        (be_u16(src)?, be_u16(src)?),
4390    ];
4391    let white_point = (be_u16(src)?, be_u16(src)?);
4392    let max_luminance = be_u32(src)?;
4393    let min_luminance = be_u32(src)?;
4394    skip_box_remain(src)?;
4395    Ok(MasteringDisplayColourVolume {
4396        primaries,
4397        white_point,
4398        max_luminance,
4399        min_luminance,
4400    })
4401}
4402
4403/// Parse a Content Colour Volume property box.
4404/// See ISOBMFF § 12.1.5 / H.265 D.2.40. NOT a FullBox.
4405fn read_cclv<T: Read>(src: &mut BMFFBox<'_, T>) -> Result<ContentColourVolume> {
4406    let flags = src.read_u8()?;
4407    let primaries_present = flags & 0x20 != 0;
4408    let min_lum_present = flags & 0x10 != 0;
4409    let max_lum_present = flags & 0x08 != 0;
4410    let avg_lum_present = flags & 0x04 != 0;
4411
4412    let primaries = if primaries_present {
4413        Some([
4414            (be_i32(src)?, be_i32(src)?),
4415            (be_i32(src)?, be_i32(src)?),
4416            (be_i32(src)?, be_i32(src)?),
4417        ])
4418    } else {
4419        None
4420    };
4421
4422    let min_luminance = if min_lum_present { Some(be_u32(src)?) } else { None };
4423    let max_luminance = if max_lum_present { Some(be_u32(src)?) } else { None };
4424    let avg_luminance = if avg_lum_present { Some(be_u32(src)?) } else { None };
4425
4426    skip_box_remain(src)?;
4427    Ok(ContentColourVolume {
4428        primaries,
4429        min_luminance,
4430        max_luminance,
4431        avg_luminance,
4432    })
4433}
4434
4435/// Parse an Ambient Viewing Environment property box.
4436/// See ISOBMFF § 12.1.5 / H.265 D.2.39. NOT a FullBox.
4437fn read_amve<T: Read>(src: &mut BMFFBox<'_, T>) -> Result<AmbientViewingEnvironment> {
4438    let ambient_illuminance = be_u32(src)?;
4439    let ambient_light_x = be_u16(src)?;
4440    let ambient_light_y = be_u16(src)?;
4441    skip_box_remain(src)?;
4442    Ok(AmbientViewingEnvironment {
4443        ambient_illuminance,
4444        ambient_light_x,
4445        ambient_light_y,
4446    })
4447}
4448
4449/// Parse an Operating Point Selector property box.
4450/// See AVIF § 4.3.4. NOT a FullBox.
4451fn read_a1op<T: Read>(src: &mut BMFFBox<'_, T>) -> Result<OperatingPointSelector> {
4452    let op_index = src.read_u8()?;
4453    if op_index > 31 {
4454        return Err(Error::InvalidData("a1op op_index must be 0..31"));
4455    }
4456    skip_box_remain(src)?;
4457    Ok(OperatingPointSelector { op_index })
4458}
4459
4460/// Parse a Layer Selector property box.
4461/// See HEIF (ISO 23008-12). NOT a FullBox.
4462fn read_lsel<T: Read>(src: &mut BMFFBox<'_, T>) -> Result<LayerSelector> {
4463    let layer_id = be_u16(src)?;
4464    skip_box_remain(src)?;
4465    Ok(LayerSelector { layer_id })
4466}
4467
4468/// Parse an AV1 Layered Image Indexing property box.
4469/// See AVIF § 4.3.6. NOT a FullBox.
4470fn read_a1lx<T: Read>(src: &mut BMFFBox<'_, T>) -> Result<AV1LayeredImageIndexing> {
4471    let flags = src.read_u8()?;
4472    let large_size = flags & 0x01 != 0;
4473    let layer_sizes = if large_size {
4474        [be_u32(src)?, be_u32(src)?, be_u32(src)?]
4475    } else {
4476        [u32::from(be_u16(src)?), u32::from(be_u16(src)?), u32::from(be_u16(src)?)]
4477    };
4478    skip_box_remain(src)?;
4479    Ok(AV1LayeredImageIndexing { layer_sizes })
4480}
4481
4482/// Parse an Image Spatial Extents property box
4483/// See ISO/IEC 23008-12:2017 § 6.5.3
4484fn read_ispe<T: Read>(src: &mut BMFFBox<'_, T>, options: &ParseOptions) -> Result<ImageSpatialExtents> {
4485    let _version = read_fullbox_version_no_flags(src, options)?;
4486    // Version is always 0 for ispe
4487
4488    let width = be_u32(src)?;
4489    let height = be_u32(src)?;
4490
4491    // Validate dimensions are non-zero (0×0 images are invalid)
4492    if width == 0 || height == 0 {
4493        return Err(Error::InvalidData("ispe dimensions cannot be zero"));
4494    }
4495
4496    Ok(ImageSpatialExtents { width, height })
4497}
4498
4499/// Parse a Movie Header box (mvhd)
4500/// See ISO/IEC 14496-12:2015 § 8.2.2
4501fn read_mvhd<T: Read>(src: &mut BMFFBox<'_, T>) -> Result<MovieHeader> {
4502    let version = src.read_u8()?;
4503    let _flags = [src.read_u8()?, src.read_u8()?, src.read_u8()?];
4504
4505    let (timescale, duration) = if version == 1 {
4506        let _creation_time = be_u64(src)?;
4507        let _modification_time = be_u64(src)?;
4508        let timescale = be_u32(src)?;
4509        let duration = be_u64(src)?;
4510        (timescale, duration)
4511    } else {
4512        let _creation_time = be_u32(src)?;
4513        let _modification_time = be_u32(src)?;
4514        let timescale = be_u32(src)?;
4515        let duration = be_u32(src)?;
4516        (timescale, duration as u64)
4517    };
4518
4519    // Skip rest of mvhd (rate, volume, matrix, etc.)
4520    skip_box_remain(src)?;
4521
4522    Ok(MovieHeader { _timescale: timescale, _duration: duration })
4523}
4524
4525/// Parse a Media Header box (mdhd)
4526/// See ISO/IEC 14496-12:2015 § 8.4.2
4527fn read_mdhd<T: Read>(src: &mut BMFFBox<'_, T>) -> Result<MediaHeader> {
4528    let version = src.read_u8()?;
4529    let _flags = [src.read_u8()?, src.read_u8()?, src.read_u8()?];
4530
4531    let (timescale, duration) = if version == 1 {
4532        let _creation_time = be_u64(src)?;
4533        let _modification_time = be_u64(src)?;
4534        let timescale = be_u32(src)?;
4535        let duration = be_u64(src)?;
4536        (timescale, duration)
4537    } else {
4538        let _creation_time = be_u32(src)?;
4539        let _modification_time = be_u32(src)?;
4540        let timescale = be_u32(src)?;
4541        let duration = be_u32(src)?;
4542        (timescale, duration as u64)
4543    };
4544
4545    // Skip language and pre_defined
4546    skip_box_remain(src)?;
4547
4548    Ok(MediaHeader { timescale, _duration: duration })
4549}
4550
4551/// Parse Time To Sample box (stts)
4552/// See ISO/IEC 14496-12:2015 § 8.6.1.2
4553fn read_stts<T: Read>(src: &mut BMFFBox<'_, T>) -> Result<TryVec<TimeToSampleEntry>> {
4554    let _version = src.read_u8()?;
4555    let _flags = [src.read_u8()?, src.read_u8()?, src.read_u8()?];
4556    let entry_count = be_u32(src)?;
4557    // Each entry: sample_count (4) + sample_delta (4) = 8 bytes
4558    if (entry_count as u64) * 8 > src.bytes_left() {
4559        return Err(Error::InvalidData(
4560            "stts entry_count exceeds remaining box bytes",
4561        ));
4562    }
4563
4564    let mut entries = TryVec::new();
4565    for _ in 0..entry_count {
4566        entries.push(TimeToSampleEntry {
4567            sample_count: be_u32(src)?,
4568            sample_delta: be_u32(src)?,
4569        })?;
4570    }
4571
4572    Ok(entries)
4573}
4574
4575/// Parse Sample To Chunk box (stsc)
4576/// See ISO/IEC 14496-12:2015 § 8.7.4
4577fn read_stsc<T: Read>(src: &mut BMFFBox<'_, T>) -> Result<TryVec<SampleToChunkEntry>> {
4578    let _version = src.read_u8()?;
4579    let _flags = [src.read_u8()?, src.read_u8()?, src.read_u8()?];
4580    let entry_count = be_u32(src)?;
4581    // Each entry: first_chunk (4) + samples_per_chunk (4) + sample_desc_index (4) = 12 bytes
4582    if (entry_count as u64) * 12 > src.bytes_left() {
4583        return Err(Error::InvalidData(
4584            "stsc entry_count exceeds remaining box bytes",
4585        ));
4586    }
4587
4588    let mut entries = TryVec::new();
4589    for _ in 0..entry_count {
4590        entries.push(SampleToChunkEntry {
4591            first_chunk: be_u32(src)?,
4592            samples_per_chunk: be_u32(src)?,
4593            _sample_description_index: be_u32(src)?,
4594        })?;
4595    }
4596
4597    Ok(entries)
4598}
4599
4600/// Parse Sample Size box (stsz)
4601/// See ISO/IEC 14496-12:2015 § 8.7.3
4602fn read_stsz<T: Read>(src: &mut BMFFBox<'_, T>) -> Result<TryVec<u32>> {
4603    let _version = src.read_u8()?;
4604    let _flags = [src.read_u8()?, src.read_u8()?, src.read_u8()?];
4605    let sample_size = be_u32(src)?;
4606    let sample_count = be_u32(src)?;
4607
4608    // Cap sample_count to avoid multi-GB allocations from malformed data.
4609    // 64M entries * 4 bytes = 256 MB, a generous upper bound for real AVIF files.
4610    const MAX_SAMPLE_COUNT: u32 = 64 * 1024 * 1024;
4611    if sample_count > MAX_SAMPLE_COUNT {
4612        return Err(Error::InvalidData("stsz sample_count exceeds maximum"));
4613    }
4614
4615    let mut sizes = TryVec::new();
4616    if sample_size == 0 {
4617        // Variable sizes: each entry is 4 bytes
4618        if (sample_count as u64) * 4 > src.bytes_left() {
4619            return Err(Error::InvalidData(
4620                "stsz sample_count exceeds remaining box bytes",
4621            ));
4622        }
4623        // Variable sizes - read each one
4624        for _ in 0..sample_count {
4625            sizes.push(be_u32(src)?)?;
4626        }
4627    } else {
4628        // Constant size for all samples
4629        for _ in 0..sample_count {
4630            sizes.push(sample_size)?;
4631        }
4632    }
4633
4634    Ok(sizes)
4635}
4636
4637/// Parse Chunk Offset box (stco or co64)
4638/// See ISO/IEC 14496-12:2015 § 8.7.5
4639fn read_chunk_offsets<T: Read>(src: &mut BMFFBox<'_, T>, is_64bit: bool) -> Result<TryVec<u64>> {
4640    let _version = src.read_u8()?;
4641    let _flags = [src.read_u8()?, src.read_u8()?, src.read_u8()?];
4642    let entry_count = be_u32(src)?;
4643    let bytes_per_entry: u64 = if is_64bit { 8 } else { 4 };
4644    if (entry_count as u64) * bytes_per_entry > src.bytes_left() {
4645        return Err(Error::InvalidData(
4646            "chunk offset entry_count exceeds remaining box bytes",
4647        ));
4648    }
4649
4650    let mut offsets = TryVec::new();
4651    for _ in 0..entry_count {
4652        let offset = if is_64bit {
4653            be_u64(src)?
4654        } else {
4655            be_u32(src)? as u64
4656        };
4657        offsets.push(offset)?;
4658    }
4659
4660    Ok(offsets)
4661}
4662
4663/// Parse Sample Description box (stsd) to extract codec config from VisualSampleEntry.
4664/// See ISO/IEC 14496-12:2015 § 8.5.2
4665///
4666/// For AVIF sequences, the VisualSampleEntry is `av01` which contains sub-boxes
4667/// like `av1C` (codec config) and `colr` (color info), similar to ipco properties.
4668fn read_stsd<T: Read>(src: &mut BMFFBox<'_, T>) -> Result<TrackCodecConfig> {
4669    let _version = src.read_u8()?;
4670    let _flags = [src.read_u8()?, src.read_u8()?, src.read_u8()?];
4671    let entry_count = be_u32(src)?;
4672
4673    let mut config = TrackCodecConfig::default();
4674
4675    // Parse first entry only (AVIF tracks have one sample description)
4676    let mut iter = src.box_iter();
4677    for _ in 0..entry_count {
4678        let Some(mut entry_box) = iter.next_box()? else {
4679            break;
4680        };
4681
4682        // Check if this is an av01 VisualSampleEntry
4683        if entry_box.head.name != BoxType::AV1SampleEntry {
4684            skip_box_remain(&mut entry_box)?;
4685            continue;
4686        }
4687
4688        // Skip VisualSampleEntry fixed fields (78 bytes total):
4689        //   reserved[6] + data_ref_index[2] + pre_defined[2] + reserved[2] +
4690        //   pre_defined[12] + width[2] + height[2] + horiz_res[4] + vert_res[4] +
4691        //   reserved[4] + frame_count[2] + compressorname[32] + depth[2] + pre_defined[2]
4692        const VISUAL_SAMPLE_ENTRY_SIZE: u64 = 78;
4693        if entry_box.bytes_left() < VISUAL_SAMPLE_ENTRY_SIZE {
4694            skip_box_remain(&mut entry_box)?;
4695            continue;
4696        }
4697        skip(&mut entry_box, VISUAL_SAMPLE_ENTRY_SIZE)?;
4698
4699        // Parse sub-boxes within the VisualSampleEntry for av1C and colr
4700        let mut sub_iter = entry_box.box_iter();
4701        while let Some(mut sub_box) = sub_iter.next_box()? {
4702            match sub_box.head.name {
4703                BoxType::AV1CodecConfigurationBox => {
4704                    config.av1_config = Some(read_av1c(&mut sub_box)?);
4705                }
4706                BoxType::ColorInformationBox => {
4707                    if let Ok(colr) = read_colr(&mut sub_box) {
4708                        config.color_info = Some(colr);
4709                    } else {
4710                        skip_box_remain(&mut sub_box)?;
4711                    }
4712                }
4713                _ => {
4714                    skip_box_remain(&mut sub_box)?;
4715                }
4716            }
4717        }
4718
4719        // Only need the first av01 entry
4720        if config.av1_config.is_some() {
4721            break;
4722        }
4723    }
4724
4725    Ok(config)
4726}
4727
4728/// Parse Sample Table box (stbl)
4729/// See ISO/IEC 14496-12:2015 § 8.5
4730fn read_stbl<T: Read>(src: &mut BMFFBox<'_, T>) -> Result<(SampleTable, TrackCodecConfig)> {
4731    let mut time_to_sample = TryVec::new();
4732    let mut sample_to_chunk = TryVec::new();
4733    let mut sample_sizes = TryVec::new();
4734    let mut chunk_offsets = TryVec::new();
4735    let mut codec_config = TrackCodecConfig::default();
4736
4737    let mut iter = src.box_iter();
4738    while let Some(mut b) = iter.next_box()? {
4739        match b.head.name {
4740            BoxType::SampleDescriptionBox => {
4741                codec_config = read_stsd(&mut b)?;
4742            }
4743            BoxType::TimeToSampleBox => {
4744                time_to_sample = read_stts(&mut b)?;
4745            }
4746            BoxType::SampleToChunkBox => {
4747                sample_to_chunk = read_stsc(&mut b)?;
4748            }
4749            BoxType::SampleSizeBox => {
4750                sample_sizes = read_stsz(&mut b)?;
4751            }
4752            BoxType::ChunkOffsetBox => {
4753                chunk_offsets = read_chunk_offsets(&mut b, false)?;
4754            }
4755            BoxType::ChunkLargeOffsetBox => {
4756                chunk_offsets = read_chunk_offsets(&mut b, true)?;
4757            }
4758            _ => {
4759                skip_box_remain(&mut b)?;
4760            }
4761        }
4762    }
4763
4764    // Precompute per-sample byte offsets from sample_to_chunk + chunk_offsets + sample_sizes.
4765    // This flattens the ISOBMFF indirection into a simple array for O(1) frame lookup.
4766    let mut sample_offsets = TryVec::new();
4767    let mut sample_idx = 0usize;
4768    for (i, entry) in sample_to_chunk.iter().enumerate() {
4769        let next_first_chunk = sample_to_chunk
4770            .get(i + 1)
4771            .map(|e| e.first_chunk)
4772            .unwrap_or(u32::MAX);
4773
4774        for chunk_no in entry.first_chunk..next_first_chunk {
4775            if chunk_no == 0 {
4776                break;
4777            }
4778            let co_idx = (chunk_no - 1) as usize;
4779            let chunk_offset = match chunk_offsets.get(co_idx) {
4780                Some(&o) => o,
4781                None => break,
4782            };
4783
4784            let mut offset = chunk_offset;
4785            for _ in 0..entry.samples_per_chunk {
4786                if sample_idx >= sample_sizes.len() {
4787                    break;
4788                }
4789                sample_offsets.push(offset)?;
4790                offset += *sample_sizes.get(sample_idx)
4791                    .ok_or(Error::InvalidData("sample index mismatch"))? as u64;
4792                sample_idx += 1;
4793            }
4794        }
4795    }
4796
4797    Ok((SampleTable {
4798        time_to_sample,
4799        sample_sizes,
4800        sample_offsets,
4801    }, codec_config))
4802}
4803
4804/// Parse Track Header box (tkhd)
4805/// See ISO/IEC 14496-12:2015 § 8.3.2
4806fn read_tkhd<T: Read>(src: &mut BMFFBox<'_, T>) -> Result<u32> {
4807    let version = src.read_u8()?;
4808    let _flags = [src.read_u8()?, src.read_u8()?, src.read_u8()?];
4809
4810    let track_id = if version == 1 {
4811        let _creation_time = be_u64(src)?;
4812        let _modification_time = be_u64(src)?;
4813        let track_id = be_u32(src)?;
4814        let _reserved = be_u32(src)?;
4815        let _duration = be_u64(src)?;
4816        track_id
4817    } else {
4818        let _creation_time = be_u32(src)?;
4819        let _modification_time = be_u32(src)?;
4820        let track_id = be_u32(src)?;
4821        let _reserved = be_u32(src)?;
4822        let _duration = be_u32(src)?;
4823        track_id
4824    };
4825
4826    // Skip rest (reserved, layer, alternate_group, volume, matrix, width, height)
4827    skip_box_remain(src)?;
4828    Ok(track_id)
4829}
4830
4831/// Parse Track Reference box (tref)
4832/// See ISO/IEC 14496-12:2015 § 8.3.3
4833///
4834/// Contains sub-boxes typed by FourCC (e.g., `auxl`, `cdsc`), each with a list of track IDs.
4835fn read_tref<T: Read>(src: &mut BMFFBox<'_, T>) -> Result<TryVec<TrackReference>> {
4836    let mut refs = TryVec::new();
4837    let mut iter = src.box_iter();
4838    while let Some(mut b) = iter.next_box()? {
4839        let reference_type = FourCC::from(u32::from(b.head.name));
4840        let bytes_left = b.bytes_left();
4841        if bytes_left < 4 || bytes_left % 4 != 0 {
4842            skip_box_remain(&mut b)?;
4843            continue;
4844        }
4845        let count = bytes_left / 4;
4846        let mut track_ids = TryVec::new();
4847        for _ in 0..count {
4848            track_ids.push(be_u32(&mut b)?)?;
4849        }
4850        refs.push(TrackReference { reference_type, track_ids })?;
4851    }
4852    Ok(refs)
4853}
4854
4855/// Parse Edit List box (elst) to extract loop count from flags.
4856/// See ISO/IEC 14496-12:2015 § 8.6.6
4857///
4858/// Returns the loop count: flags bit 0 set = infinite looping (0), otherwise 1.
4859fn read_elst<T: Read>(src: &mut BMFFBox<'_, T>) -> Result<u32> {
4860    let (version, flags) = read_fullbox_extra(src)?;
4861
4862    let entry_count = be_u32(src)?;
4863    // Skip all entries
4864    let entry_size: u64 = if version == 1 { 20 } else { 12 };
4865    skip(src, (entry_count as u64).checked_mul(entry_size)
4866        .ok_or(Error::InvalidData("edit list entry count overflow"))?)?;
4867    skip_box_remain(src)?;
4868
4869    // Bit 0 of flags: repeat (1 = infinite loop → loop_count=0, 0 = play once → loop_count=1)
4870    if flags & 1 != 0 {
4871        Ok(0) // infinite
4872    } else {
4873        Ok(1) // play once
4874    }
4875}
4876
4877/// Parse animation from moov box.
4878/// Returns all parsed tracks.
4879fn read_moov<T: Read>(src: &mut BMFFBox<'_, T>) -> Result<TryVec<ParsedTrack>> {
4880    let mut tracks = TryVec::new();
4881
4882    let mut iter = src.box_iter();
4883    while let Some(mut b) = iter.next_box()? {
4884        match b.head.name {
4885            BoxType::MovieHeaderBox => {
4886                let _mvhd = read_mvhd(&mut b)?;
4887            }
4888            BoxType::TrackBox => {
4889                if let Some(track) = read_trak(&mut b)? {
4890                    tracks.push(track)?;
4891                }
4892            }
4893            _ => {
4894                skip_box_remain(&mut b)?;
4895            }
4896        }
4897    }
4898
4899    Ok(tracks)
4900}
4901
4902/// Parse track box (trak).
4903/// Returns a ParsedTrack if this track has a valid sample table.
4904fn read_trak<T: Read>(src: &mut BMFFBox<'_, T>) -> Result<Option<ParsedTrack>> {
4905    let mut track_id = 0u32;
4906    let mut references = TryVec::new();
4907    let mut loop_count = 1u32; // default: play once
4908    let mut mdia_result: Option<(FourCC, u32, SampleTable, TrackCodecConfig)> = None;
4909
4910    let mut iter = src.box_iter();
4911    while let Some(mut b) = iter.next_box()? {
4912        match b.head.name {
4913            BoxType::TrackHeaderBox => {
4914                track_id = read_tkhd(&mut b)?;
4915            }
4916            BoxType::TrackReferenceBox => {
4917                references = read_tref(&mut b)?;
4918            }
4919            BoxType::EditBox => {
4920                // Parse edts to find elst
4921                let mut edts_iter = b.box_iter();
4922                while let Some(mut eb) = edts_iter.next_box()? {
4923                    if eb.head.name == BoxType::EditListBox {
4924                        loop_count = read_elst(&mut eb)?;
4925                    } else {
4926                        skip_box_remain(&mut eb)?;
4927                    }
4928                }
4929            }
4930            BoxType::MediaBox => {
4931                mdia_result = read_mdia(&mut b)?;
4932            }
4933            _ => {
4934                skip_box_remain(&mut b)?;
4935            }
4936        }
4937    }
4938
4939    if let Some((handler_type, media_timescale, sample_table, codec_config)) = mdia_result {
4940        Ok(Some(ParsedTrack {
4941            track_id,
4942            handler_type,
4943            media_timescale,
4944            sample_table,
4945            references,
4946            loop_count,
4947            codec_config,
4948        }))
4949    } else {
4950        Ok(None)
4951    }
4952}
4953
4954/// Parse media box (mdia).
4955/// Returns (handler_type, media_timescale, sample_table, codec_config) if valid.
4956fn read_mdia<T: Read>(src: &mut BMFFBox<'_, T>) -> Result<Option<(FourCC, u32, SampleTable, TrackCodecConfig)>> {
4957    let mut media_timescale = 1000; // default
4958    let mut handler_type = FourCC::default();
4959    let mut stbl_result: Option<(SampleTable, TrackCodecConfig)> = None;
4960
4961    let mut iter = src.box_iter();
4962    while let Some(mut b) = iter.next_box()? {
4963        match b.head.name {
4964            BoxType::MediaHeaderBox => {
4965                let mdhd = read_mdhd(&mut b)?;
4966                media_timescale = mdhd.timescale;
4967            }
4968            BoxType::HandlerBox => {
4969                let hdlr = read_hdlr(&mut b)?;
4970                handler_type = hdlr.handler_type;
4971            }
4972            BoxType::MediaInformationBox => {
4973                stbl_result = read_minf(&mut b)?;
4974            }
4975            _ => {
4976                skip_box_remain(&mut b)?;
4977            }
4978        }
4979    }
4980
4981    if let Some((stbl, codec_config)) = stbl_result {
4982        Ok(Some((handler_type, media_timescale, stbl, codec_config)))
4983    } else {
4984        Ok(None)
4985    }
4986}
4987
4988/// Associate parsed tracks into color + optional alpha animation data.
4989///
4990/// - Color track: first with handler `pict` (fallback: first track with a sample table)
4991/// - Alpha track: handler `auxv` with `tref/auxl` referencing color's track_id
4992/// - Audio tracks (handler `soun`) are skipped
4993fn associate_tracks(tracks: TryVec<ParsedTrack>) -> Result<ParsedAnimationData> {
4994    // Find color track: first with handler_type == "pict"
4995    let color_idx = tracks
4996        .iter()
4997        .position(|t| t.handler_type == b"pict")
4998        .or_else(|| {
4999            // Fallback: first track that isn't audio
5000            tracks.iter().position(|t| t.handler_type != b"soun")
5001        })
5002        .ok_or(Error::InvalidData("no color track found in moov"))?;
5003
5004    let color_track = tracks.get(color_idx)
5005        .ok_or(Error::InvalidData("color track index out of bounds"))?;
5006    let color_track_id = color_track.track_id;
5007
5008    // Find alpha track: handler_type == "auxv" with tref/auxl referencing color track
5009    let alpha_idx = tracks.iter().position(|t| {
5010        t.handler_type == b"auxv"
5011            && t.references.iter().any(|r| {
5012                r.reference_type == b"auxl"
5013                    && r.track_ids.iter().any(|&id| id == color_track_id)
5014            })
5015    });
5016
5017    if let Some(ai) = alpha_idx {
5018        let alpha_track = tracks.get(ai)
5019            .ok_or(Error::InvalidData("alpha track index out of bounds"))?;
5020        let color_track = tracks.get(color_idx)
5021            .ok_or(Error::InvalidData("color track index out of bounds"))?;
5022        let alpha_frames = alpha_track.sample_table.sample_sizes.len();
5023        let color_frames = color_track.sample_table.sample_sizes.len();
5024        if alpha_frames != color_frames {
5025            warn!(
5026                "alpha track has {} frames but color track has {} frames",
5027                alpha_frames, color_frames
5028            );
5029        }
5030    }
5031
5032    // Destructure — we need to consume the vec
5033    // Convert to a std vec so we can remove by index
5034    let mut tracks_vec: std::vec::Vec<ParsedTrack> = tracks.into_iter().collect();
5035
5036    // Remove alpha first if it has a higher index to avoid shifting
5037    let (color_track, alpha_track) = if let Some(ai) = alpha_idx {
5038        if ai > color_idx {
5039            let alpha = tracks_vec.remove(ai);
5040            let color = tracks_vec.remove(color_idx);
5041            (color, Some(alpha))
5042        } else {
5043            let color = tracks_vec.remove(color_idx);
5044            let alpha = tracks_vec.remove(ai);
5045            (color, Some(alpha))
5046        }
5047    } else {
5048        let color = tracks_vec.remove(color_idx);
5049        (color, None)
5050    };
5051
5052    let (alpha_timescale, alpha_sample_table) = match alpha_track {
5053        Some(t) => (Some(t.media_timescale), Some(t.sample_table)),
5054        None => (None, None),
5055    };
5056
5057    Ok(ParsedAnimationData {
5058        color_timescale: color_track.media_timescale,
5059        color_codec_config: color_track.codec_config,
5060        color_sample_table: color_track.sample_table,
5061        alpha_timescale,
5062        alpha_sample_table,
5063        loop_count: color_track.loop_count,
5064    })
5065}
5066
5067/// Parse media information box (minf)
5068fn read_minf<T: Read>(src: &mut BMFFBox<'_, T>) -> Result<Option<(SampleTable, TrackCodecConfig)>> {
5069    let mut iter = src.box_iter();
5070    while let Some(mut b) = iter.next_box()? {
5071        if b.head.name == BoxType::SampleTableBox {
5072            return Ok(Some(read_stbl(&mut b)?));
5073        } else {
5074            skip_box_remain(&mut b)?;
5075        }
5076    }
5077    Ok(None)
5078}
5079
5080/// Extract animation frames using sample table
5081#[cfg(feature = "eager")]
5082#[allow(deprecated)]
5083fn extract_animation_frames(
5084    sample_table: &SampleTable,
5085    media_timescale: u32,
5086    mdats: &mut [MediaDataBox],
5087) -> Result<TryVec<AnimationFrame>> {
5088    let mut frames = TryVec::new();
5089
5090    // Calculate frame durations from time-to-sample
5091    let mut frame_durations = TryVec::new();
5092    for entry in &sample_table.time_to_sample {
5093        for _ in 0..entry.sample_count {
5094            let duration_ms = if media_timescale > 0 {
5095                ((entry.sample_delta as u64) * 1000) / (media_timescale as u64)
5096            } else {
5097                0
5098            };
5099            frame_durations.push(u32::try_from(duration_ms).unwrap_or(u32::MAX))?;
5100        }
5101    }
5102
5103    // Extract each frame using precomputed sample offsets
5104    for i in 0..sample_table.sample_sizes.len() {
5105        let sample_offset = *sample_table.sample_offsets.get(i)
5106            .ok_or(Error::InvalidData("sample offset index out of bounds"))?;
5107        let sample_size = *sample_table.sample_sizes.get(i)
5108            .ok_or(Error::InvalidData("sample size index out of bounds"))?;
5109        let duration_ms = frame_durations.get(i).copied().unwrap_or(0);
5110
5111        let mut frame_data = TryVec::new();
5112        let mut found = false;
5113
5114        for mdat in mdats.iter_mut() {
5115            let range = ExtentRange::WithLength(Range {
5116                start: sample_offset,
5117                end: sample_offset + sample_size as u64,
5118            });
5119
5120            if mdat.contains_extent(&range) {
5121                mdat.read_extent(&range, &mut frame_data)?;
5122                found = true;
5123                break;
5124            }
5125        }
5126
5127        if !found {
5128            log::warn!("Animation frame {} not found in mdat", i);
5129        }
5130
5131        frames.push(AnimationFrame {
5132            data: frame_data,
5133            duration_ms,
5134        })?;
5135    }
5136
5137    Ok(frames)
5138}
5139
5140/// Parse an ImageGrid property box
5141/// See ISO/IEC 23008-12:2017 § 6.6.2.3
5142fn read_grid<T: Read>(src: &mut BMFFBox<'_, T>, options: &ParseOptions) -> Result<GridConfig> {
5143    let version = read_fullbox_version_no_flags(src, options)?;
5144    if version > 0 {
5145        return Err(Error::Unsupported("grid version > 0"));
5146    }
5147
5148    let flags_byte = src.read_u8()?;
5149    let rows = src.read_u8()?;
5150    let columns = src.read_u8()?;
5151
5152    // flags & 1 determines field size: 0 = 16-bit, 1 = 32-bit
5153    let (output_width, output_height) = if flags_byte & 1 == 0 {
5154        // 16-bit fields
5155        (u32::from(be_u16(src)?), u32::from(be_u16(src)?))
5156    } else {
5157        // 32-bit fields
5158        (be_u32(src)?, be_u32(src)?)
5159    };
5160
5161    Ok(GridConfig {
5162        rows,
5163        columns,
5164        output_width,
5165        output_height,
5166    })
5167}
5168
5169/// Parse an item location box inside a meta box
5170/// See ISO 14496-12:2015 § 8.11.3
5171fn read_iloc<T: Read>(src: &mut BMFFBox<'_, T>, options: &ParseOptions) -> Result<TryVec<ItemLocationBoxItem>> {
5172    let version: IlocVersion = read_fullbox_version_no_flags(src, options)?.try_into()?;
5173
5174    let iloc = src.read_into_try_vec()?;
5175    let mut iloc = BitReader::new(&iloc);
5176
5177    let offset_size: IlocFieldSize = iloc.read_u8(4)?.try_into()?;
5178    let length_size: IlocFieldSize = iloc.read_u8(4)?.try_into()?;
5179    let base_offset_size: IlocFieldSize = iloc.read_u8(4)?.try_into()?;
5180
5181    let index_size: Option<IlocFieldSize> = match version {
5182        IlocVersion::One | IlocVersion::Two => Some(iloc.read_u8(4)?.try_into()?),
5183        IlocVersion::Zero => {
5184            let _reserved = iloc.read_u8(4)?;
5185            None
5186        },
5187    };
5188
5189    let item_count = match version {
5190        IlocVersion::Zero | IlocVersion::One => iloc.read_u32(16)?,
5191        IlocVersion::Two => iloc.read_u32(32)?,
5192    };
5193
5194    // Cap pre-allocation: item_count is untrusted, actual data is bounded by bitstream
5195    let mut items = TryVec::with_capacity(item_count.to_usize().min(4096))?;
5196
5197    for _ in 0..item_count {
5198        let item_id = match version {
5199            IlocVersion::Zero | IlocVersion::One => iloc.read_u32(16)?,
5200            IlocVersion::Two => iloc.read_u32(32)?,
5201        };
5202
5203        // The spec isn't entirely clear how an `iloc` should be interpreted for version 0,
5204        // which has no `construction_method` field. It does say:
5205        // "For maximum compatibility, version 0 of this box should be used in preference to
5206        //  version 1 with `construction_method==0`, or version 2 when possible."
5207        // We take this to imply version 0 can be interpreted as using file offsets.
5208        let construction_method = match version {
5209            IlocVersion::Zero => ConstructionMethod::File,
5210            IlocVersion::One | IlocVersion::Two => {
5211                let _reserved = iloc.read_u16(12)?;
5212                match iloc.read_u16(4)? {
5213                    0 => ConstructionMethod::File,
5214                    1 => ConstructionMethod::Idat,
5215                    2 => return Err(Error::Unsupported("construction_method 'item_offset' is not supported")),
5216                    _ => return Err(Error::InvalidData("construction_method is taken from the set 0, 1 or 2 per ISO 14496-12:2015 § 8.11.3.3")),
5217                }
5218            },
5219        };
5220
5221        let data_reference_index = iloc.read_u16(16)?;
5222
5223        if data_reference_index != 0 {
5224            return Err(Error::Unsupported("external file references (iloc.data_reference_index != 0) are not supported"));
5225        }
5226
5227        let base_offset = iloc.read_u64(base_offset_size.to_bits())?;
5228        let extent_count = iloc.read_u16(16)?;
5229
5230        if extent_count < 1 {
5231            return Err(Error::InvalidData("extent_count must have a value 1 or greater per ISO 14496-12:2015 § 8.11.3.3"));
5232        }
5233
5234        let mut extents = TryVec::with_capacity(extent_count.to_usize())?;
5235
5236        for _ in 0..extent_count {
5237            // Parsed but currently ignored, see `ItemLocationBoxExtent`
5238            let _extent_index = match &index_size {
5239                None | Some(IlocFieldSize::Zero) => None,
5240                Some(index_size) => {
5241                    debug_assert!(version == IlocVersion::One || version == IlocVersion::Two);
5242                    Some(iloc.read_u64(index_size.to_bits())?)
5243                },
5244            };
5245
5246            // Per ISO 14496-12:2015 § 8.11.3.1:
5247            // "If the offset is not identified (the field has a length of zero), then the
5248            //  beginning of the source (offset 0) is implied"
5249            // This behavior will follow from BitReader::read_u64(0) -> 0.
5250            let extent_offset = iloc.read_u64(offset_size.to_bits())?;
5251            let extent_length = iloc.read_u64(length_size.to_bits())?;
5252
5253            // "If the length is not specified, or specified as zero, then the entire length of
5254            //  the source is implied" (ibid)
5255            let start = base_offset
5256                .checked_add(extent_offset)
5257                .ok_or(Error::InvalidData("offset calculation overflow"))?;
5258            let extent_range = if extent_length == 0 {
5259                ExtentRange::ToEnd(RangeFrom { start })
5260            } else {
5261                let end = start
5262                    .checked_add(extent_length)
5263                    .ok_or(Error::InvalidData("end calculation overflow"))?;
5264                ExtentRange::WithLength(Range { start, end })
5265            };
5266
5267            extents.push(ItemLocationBoxExtent { extent_range })?;
5268        }
5269
5270        items.push(ItemLocationBoxItem { item_id, construction_method, extents })?;
5271    }
5272
5273    if iloc.remaining() == 0 {
5274        Ok(items)
5275    } else {
5276        Err(Error::InvalidData("invalid iloc size"))
5277    }
5278}
5279
5280/// Parse an ftyp box.
5281/// See ISO 14496-12:2015 § 4.3
5282fn read_ftyp<T: Read>(src: &mut BMFFBox<'_, T>) -> Result<FileTypeBox> {
5283    let major = be_u32(src)?;
5284    let minor = be_u32(src)?;
5285    let bytes_left = src.bytes_left();
5286    if !bytes_left.is_multiple_of(4) {
5287        return Err(Error::InvalidData("invalid ftyp size"));
5288    }
5289    // Is a brand_count of zero valid?
5290    let brand_count = bytes_left / 4;
5291    let mut brands = TryVec::with_capacity(brand_count.try_into()?)?;
5292    for _ in 0..brand_count {
5293        brands.push(be_u32(src)?.into())?;
5294    }
5295    Ok(FileTypeBox {
5296        major_brand: From::from(major),
5297        minor_version: minor,
5298        compatible_brands: brands,
5299    })
5300}
5301
5302#[cfg_attr(debug_assertions, track_caller)]
5303fn check_parser_state<T>(header: &BoxHeader, left: &Take<T>) -> Result<(), Error> {
5304    let limit = left.limit();
5305    // Allow fully consumed boxes, or size=0 boxes (where original size was u64::MAX)
5306    if limit == 0 || header.size == u64::MAX {
5307        Ok(())
5308    } else {
5309        Err(Error::InvalidData("unread box content or bad parser sync"))
5310    }
5311}
5312
5313/// Skip a number of bytes that we don't care to parse.
5314fn skip<T: Read>(src: &mut T, bytes: u64) -> Result<()> {
5315    std::io::copy(&mut src.take(bytes), &mut std::io::sink())?;
5316    Ok(())
5317}
5318
5319fn be_u16<T: ReadBytesExt>(src: &mut T) -> Result<u16> {
5320    src.read_u16::<byteorder::BigEndian>().map_err(From::from)
5321}
5322
5323fn be_u32<T: ReadBytesExt>(src: &mut T) -> Result<u32> {
5324    src.read_u32::<byteorder::BigEndian>().map_err(From::from)
5325}
5326
5327fn be_i32<T: ReadBytesExt>(src: &mut T) -> Result<i32> {
5328    src.read_i32::<byteorder::BigEndian>().map_err(From::from)
5329}
5330
5331fn be_u64<T: ReadBytesExt>(src: &mut T) -> Result<u64> {
5332    src.read_u64::<byteorder::BigEndian>().map_err(From::from)
5333}