Skip to main content

zenwebp/decoder/
api.rs

1use alloc::string::String;
2use thiserror::Error;
3
4/// Errors that can occur when attempting to decode a WebP image
5#[derive(Debug, Error)]
6#[non_exhaustive]
7pub enum DecodeError {
8    /// An IO error occurred while reading the file
9    #[cfg(feature = "std")]
10    #[error("IO Error: {0}")]
11    IoError(#[from] std::io::Error),
12
13    /// RIFF's "RIFF" signature not found or invalid
14    #[error("Invalid RIFF signature: {0:x?}")]
15    RiffSignatureInvalid([u8; 4]),
16
17    /// WebP's "WEBP" signature not found or invalid
18    #[error("Invalid WebP signature: {0:x?}")]
19    WebpSignatureInvalid([u8; 4]),
20
21    /// An expected chunk was missing
22    #[error("An expected chunk was missing")]
23    ChunkMissing,
24
25    /// Chunk Header was incorrect or invalid in its usage
26    #[error("Invalid Chunk header: {0:x?}")]
27    ChunkHeaderInvalid([u8; 4]),
28
29    /// The ALPH chunk preprocessing info flag was invalid
30    #[error("Alpha chunk preprocessing flag invalid")]
31    InvalidAlphaPreprocessing,
32
33    /// Invalid compression method
34    #[error("Invalid compression method")]
35    InvalidCompressionMethod,
36
37    /// Alpha chunk doesn't match the frame's size
38    #[error("Alpha chunk size mismatch")]
39    AlphaChunkSizeMismatch,
40
41    /// Image is too large, either for the platform's pointer size or generally
42    #[error("Image too large")]
43    ImageTooLarge,
44
45    /// Frame would go out of the canvas
46    #[error("Frame outside image")]
47    FrameOutsideImage,
48
49    /// Signature of 0x2f not found
50    #[error("Invalid lossless signature: {0:x?}")]
51    LosslessSignatureInvalid(u8),
52
53    /// Version Number was not zero
54    #[error("Invalid lossless version number: {0}")]
55    VersionNumberInvalid(u8),
56
57    /// Invalid color cache bits
58    #[error("Invalid color cache bits: {0}")]
59    InvalidColorCacheBits(u8),
60
61    /// An invalid Huffman code was encountered
62    #[error("Invalid Huffman code")]
63    HuffmanError,
64
65    /// The bitstream was somehow corrupt
66    #[error("Corrupt bitstream")]
67    BitStreamError,
68
69    /// The transforms specified were invalid
70    #[error("Invalid transform")]
71    TransformError,
72
73    /// VP8's `[0x9D, 0x01, 0x2A]` magic not found or invalid
74    #[error("Invalid VP8 magic: {0:x?}")]
75    Vp8MagicInvalid([u8; 3]),
76
77    /// VP8 Decoder initialisation wasn't provided with enough data
78    #[error("Not enough VP8 init data")]
79    NotEnoughInitData,
80
81    /// At time of writing, only the YUV colour-space encoded as `0` is specified
82    #[error("Invalid VP8 color space: {0}")]
83    ColorSpaceInvalid(u8),
84
85    /// LUMA prediction mode was not recognised
86    #[error("Invalid VP8 luma prediction mode: {0}")]
87    LumaPredictionModeInvalid(i8),
88
89    /// Intra-prediction mode was not recognised
90    #[error("Invalid VP8 intra prediction mode: {0}")]
91    IntraPredictionModeInvalid(i8),
92
93    /// Chroma prediction mode was not recognised
94    #[error("Invalid VP8 chroma prediction mode: {0}")]
95    ChromaPredictionModeInvalid(i8),
96
97    /// Inconsistent image sizes
98    #[error("Inconsistent image sizes")]
99    InconsistentImageSizes,
100
101    /// The file may be valid, but this crate doesn't support decoding it.
102    #[error("Unsupported feature: {0}")]
103    UnsupportedFeature(String),
104
105    /// Invalid function call or parameter
106    #[error("Invalid parameter: {0}")]
107    InvalidParameter(String),
108
109    /// Memory limit exceeded
110    #[error("Memory limit exceeded")]
111    MemoryLimitExceeded,
112
113    /// Invalid chunk size
114    #[error("Invalid chunk size")]
115    InvalidChunkSize,
116
117    /// No more frames in image
118    #[error("No more frames")]
119    NoMoreFrames,
120
121    /// Decoding was cancelled via a [`enough::Stop`] token.
122    #[error("Decoding cancelled: {0}")]
123    Cancelled(enough::StopReason),
124}
125
126/// Result type alias using `At<DecodeError>` for automatic location tracking.
127///
128/// Errors wrapped in `At<>` automatically capture file and line information,
129/// making debugging easier in production environments.
130pub type DecodeResult<T> = core::result::Result<T, whereat::At<DecodeError>>;
131
132impl From<enough::StopReason> for DecodeError {
133    fn from(reason: enough::StopReason) -> Self {
134        Self::Cancelled(reason)
135    }
136}
137
138// Core decoder implementation using SliceReader for no_std compatibility
139use alloc::format;
140use alloc::vec;
141use alloc::vec::Vec;
142use core::num::NonZeroU16;
143use core::ops::Range;
144
145use hashbrown::HashMap;
146
147use super::extended::{self, get_alpha_predictor, read_alpha_chunk, WebPExtendedInfo};
148use super::lossless::LosslessDecoder;
149use super::vp8::Vp8Decoder;
150use crate::slice_reader::SliceReader;
151
152/// All possible RIFF chunks in a WebP image file
153#[allow(clippy::upper_case_acronyms)]
154#[derive(Debug, Clone, Copy, PartialEq, Hash, Eq)]
155pub(crate) enum WebPRiffChunk {
156    RIFF,
157    WEBP,
158    VP8,
159    VP8L,
160    VP8X,
161    ANIM,
162    ANMF,
163    ALPH,
164    ICCP,
165    EXIF,
166    XMP,
167    Unknown([u8; 4]),
168}
169
170impl WebPRiffChunk {
171    pub(crate) const fn from_fourcc(chunk_fourcc: [u8; 4]) -> Self {
172        match &chunk_fourcc {
173            b"RIFF" => Self::RIFF,
174            b"WEBP" => Self::WEBP,
175            b"VP8 " => Self::VP8,
176            b"VP8L" => Self::VP8L,
177            b"VP8X" => Self::VP8X,
178            b"ANIM" => Self::ANIM,
179            b"ANMF" => Self::ANMF,
180            b"ALPH" => Self::ALPH,
181            b"ICCP" => Self::ICCP,
182            b"EXIF" => Self::EXIF,
183            b"XMP " => Self::XMP,
184            _ => Self::Unknown(chunk_fourcc),
185        }
186    }
187
188    pub(crate) const fn to_fourcc(self) -> [u8; 4] {
189        match self {
190            Self::RIFF => *b"RIFF",
191            Self::WEBP => *b"WEBP",
192            Self::VP8 => *b"VP8 ",
193            Self::VP8L => *b"VP8L",
194            Self::VP8X => *b"VP8X",
195            Self::ANIM => *b"ANIM",
196            Self::ANMF => *b"ANMF",
197            Self::ALPH => *b"ALPH",
198            Self::ICCP => *b"ICCP",
199            Self::EXIF => *b"EXIF",
200            Self::XMP => *b"XMP ",
201            Self::Unknown(fourcc) => fourcc,
202        }
203    }
204
205    pub(crate) const fn is_unknown(self) -> bool {
206        matches!(self, Self::Unknown(_))
207    }
208}
209
210// enum WebPImage {
211//     Lossy(VP8Frame),
212//     Lossless(LosslessFrame),
213//     Extended(ExtendedImage),
214// }
215
216enum ImageKind {
217    Lossy,
218    Lossless,
219    Extended(WebPExtendedInfo),
220}
221
222struct AnimationState {
223    next_frame: u32,
224    next_frame_start: u64,
225    dispose_next_frame: bool,
226    previous_frame_width: u32,
227    previous_frame_height: u32,
228    previous_frame_x_offset: u32,
229    previous_frame_y_offset: u32,
230    canvas: Option<Vec<u8>>,
231}
232impl Default for AnimationState {
233    fn default() -> Self {
234        Self {
235            next_frame: 0,
236            next_frame_start: 0,
237            dispose_next_frame: true,
238            previous_frame_width: 0,
239            previous_frame_height: 0,
240            previous_frame_x_offset: 0,
241            previous_frame_y_offset: 0,
242            canvas: None,
243        }
244    }
245}
246
247/// Number of times that an animation loops.
248#[derive(Copy, Clone, Debug, Eq, PartialEq)]
249pub enum LoopCount {
250    /// The animation loops forever.
251    Forever,
252    /// Each frame of the animation is displayed the specified number of times.
253    Times(NonZeroU16),
254}
255
256impl core::fmt::Display for LoopCount {
257    fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
258        match self {
259            LoopCount::Forever => f.write_str("infinite"),
260            LoopCount::Times(n) => write!(f, "{} time{}", n, if n.get() == 1 { "" } else { "s" }),
261        }
262    }
263}
264
265impl From<u16> for LoopCount {
266    fn from(n: u16) -> Self {
267        match NonZeroU16::new(n) {
268            None => LoopCount::Forever,
269            Some(n) => LoopCount::Times(n),
270        }
271    }
272}
273
274/// WebP decoder configuration. Reusable across requests.
275#[derive(Clone, Debug, PartialEq)]
276pub struct DecodeConfig {
277    /// Upsampling method for lossy chroma reconstruction. Default: `Bilinear`.
278    pub upsampling: UpsamplingMethod,
279
280    /// Decode limits for dimensions, memory, frame count, etc.
281    pub limits: super::limits::Limits,
282}
283
284impl Default for DecodeConfig {
285    fn default() -> Self {
286        Self {
287            upsampling: UpsamplingMethod::Bilinear,
288            limits: super::limits::Limits::default(),
289        }
290    }
291}
292
293impl DecodeConfig {
294    /// Set the upsampling method.
295    #[must_use]
296    pub fn upsampling(mut self, method: UpsamplingMethod) -> Self {
297        self.upsampling = method;
298        self
299    }
300
301    /// Set decode limits.
302    #[must_use]
303    pub fn limits(mut self, limits: super::limits::Limits) -> Self {
304        self.limits = limits;
305        self
306    }
307
308    /// Set maximum dimensions.
309    #[must_use]
310    pub fn max_dimensions(mut self, width: u32, height: u32) -> Self {
311        self.limits = self.limits.max_dimensions(width, height);
312        self
313    }
314
315    /// Set maximum memory usage.
316    #[must_use]
317    pub fn max_memory(mut self, bytes: u64) -> Self {
318        self.limits = self.limits.max_memory(bytes);
319        self
320    }
321
322    /// Disable fancy upsampling.
323    #[must_use]
324    pub fn no_fancy_upsampling(mut self) -> Self {
325        self.upsampling = UpsamplingMethod::Simple;
326        self
327    }
328
329    pub(crate) fn to_options(&self) -> WebPDecodeOptions {
330        WebPDecodeOptions {
331            lossy_upsampling: self.upsampling,
332        }
333    }
334}
335
336/// Decoding request that borrows configuration and input data.
337///
338/// # Example
339///
340/// ```rust,no_run
341/// use zenwebp::{DecodeConfig, DecodeRequest};
342///
343/// let config = DecodeConfig::default();
344/// let webp_data: &[u8] = &[]; // your WebP data
345/// let (pixels, w, h) = DecodeRequest::new(&config, webp_data).decode_rgba()?;
346/// # Ok::<(), zenwebp::DecodeError>(())
347/// ```
348pub struct DecodeRequest<'a> {
349    config: &'a DecodeConfig,
350    data: &'a [u8],
351    stop: Option<&'a dyn enough::Stop>,
352    stride_pixels: Option<u32>,
353}
354
355impl<'a> DecodeRequest<'a> {
356    /// Create a new decoding request.
357    #[must_use]
358    pub fn new(config: &'a DecodeConfig, data: &'a [u8]) -> Self {
359        Self {
360            config,
361            data,
362            stop: None,
363            stride_pixels: None,
364        }
365    }
366
367    /// Set a cooperative cancellation token.
368    #[must_use]
369    pub fn stop(mut self, stop: &'a dyn enough::Stop) -> Self {
370        self.stop = Some(stop);
371        self
372    }
373
374    /// Set row stride in pixels for `_into` methods. Must be >= image width.
375    #[must_use]
376    pub fn stride(mut self, stride_pixels: u32) -> Self {
377        self.stride_pixels = Some(stride_pixels);
378        self
379    }
380
381    /// Decode to the image's native pixel format (RGB or RGBA).
382    ///
383    /// Returns RGBA if the image has alpha, RGB otherwise. This avoids
384    /// both unnecessary alpha expansion and alpha stripping.
385    ///
386    /// The returned [`PixelLayout`](crate::PixelLayout) indicates the format.
387    pub fn decode(self) -> Result<(Vec<u8>, u32, u32, crate::PixelLayout), DecodeError> {
388        let mut decoder = WebPDecoder::new_with_options(self.data, self.config.to_options())?;
389        decoder.set_limits(self.config.limits.clone());
390        decoder.set_stop(self.stop);
391        let (w, h) = decoder.dimensions();
392        let output_size = decoder
393            .output_buffer_size()
394            .ok_or(DecodeError::ImageTooLarge)?;
395        let mut pixels = alloc::vec![0u8; output_size];
396        decoder.read_image(&mut pixels)?;
397        let layout = if decoder.has_alpha() {
398            crate::PixelLayout::Rgba8
399        } else {
400            crate::PixelLayout::Rgb8
401        };
402        Ok((pixels, w, h, layout))
403    }
404
405    /// Decode to RGBA pixels. If the image has no alpha channel, alpha is set to 255.
406    pub fn decode_rgba(self) -> Result<(Vec<u8>, u32, u32), DecodeError> {
407        let mut decoder = WebPDecoder::new_with_options(self.data, self.config.to_options())?;
408        decoder.set_limits(self.config.limits.clone());
409        decoder.set_stop(self.stop);
410        let (w, h) = decoder.dimensions();
411        let output_size = decoder
412            .output_buffer_size()
413            .ok_or(DecodeError::ImageTooLarge)?;
414        let mut native = alloc::vec![0u8; output_size];
415        decoder.read_image(&mut native)?;
416
417        if decoder.has_alpha() {
418            Ok((native, w, h))
419        } else {
420            // Expand RGB to RGBA
421            let pixel_count = (w as usize) * (h as usize);
422            let mut rgba = Vec::with_capacity(pixel_count * 4);
423            for chunk in native.chunks_exact(3) {
424                rgba.extend_from_slice(chunk);
425                rgba.push(255);
426            }
427            Ok((rgba, w, h))
428        }
429    }
430
431    /// Decode to RGB pixels (no alpha). If the image has alpha, it is discarded.
432    pub fn decode_rgb(self) -> Result<(Vec<u8>, u32, u32), DecodeError> {
433        let mut decoder = WebPDecoder::new_with_options(self.data, self.config.to_options())?;
434        decoder.set_limits(self.config.limits.clone());
435        decoder.set_stop(self.stop);
436        let (w, h) = decoder.dimensions();
437        let output_size = decoder
438            .output_buffer_size()
439            .ok_or(DecodeError::ImageTooLarge)?;
440        let mut native = alloc::vec![0u8; output_size];
441        decoder.read_image(&mut native)?;
442
443        if !decoder.has_alpha() {
444            Ok((native, w, h))
445        } else {
446            // Strip alpha from RGBA
447            let pixel_count = (w as usize) * (h as usize);
448            let mut rgb = Vec::with_capacity(pixel_count * 3);
449            for chunk in native.chunks_exact(4) {
450                rgb.extend_from_slice(&chunk[..3]);
451            }
452            Ok((rgb, w, h))
453        }
454    }
455
456    /// Decode to RGBA, writing into a pre-allocated buffer.
457    ///
458    /// If [`stride`](Self::stride) is set, rows are written with that pixel stride.
459    /// Otherwise rows are packed (stride == width).
460    pub fn decode_rgba_into(self, output: &mut [u8]) -> Result<(u32, u32), DecodeError> {
461        let mut decoder = WebPDecoder::new_with_options(self.data, self.config.to_options())?;
462        decoder.set_limits(self.config.limits.clone());
463        decoder.set_stop(self.stop);
464        let (w, h) = decoder.dimensions();
465
466        if let Some(stride_px) = self.stride_pixels {
467            if stride_px < w {
468                return Err(DecodeError::InvalidParameter(format!(
469                    "stride_pixels {} < width {}",
470                    stride_px, w
471                )));
472            }
473            let stride_bytes = stride_px as usize * 4;
474            let required = stride_bytes * h as usize;
475            if output.len() < required {
476                return Err(DecodeError::InvalidParameter(format!(
477                    "output buffer too small: got {}, need {}",
478                    output.len(),
479                    required
480                )));
481            }
482            // Decode to temp, then scatter with stride
483            let buf_size = decoder
484                .output_buffer_size()
485                .ok_or(DecodeError::ImageTooLarge)?;
486            let mut temp = alloc::vec![0u8; buf_size];
487            decoder.read_image(&mut temp)?;
488            let has_alpha = decoder.has_alpha();
489            let src_bpp = if has_alpha { 4 } else { 3 };
490            for y in 0..(h as usize) {
491                let dst_row = &mut output[y * stride_bytes..][..w as usize * 4];
492                let src_row = &temp[y * (w as usize) * src_bpp..][..w as usize * src_bpp];
493                if has_alpha {
494                    dst_row.copy_from_slice(src_row);
495                } else {
496                    for (dst, src) in dst_row.chunks_exact_mut(4).zip(src_row.chunks_exact(3)) {
497                        dst[..3].copy_from_slice(src);
498                        dst[3] = 255;
499                    }
500                }
501            }
502        } else {
503            decoder.read_image(output)?;
504        }
505        Ok((w, h))
506    }
507
508    /// Decode to RGB, writing into a pre-allocated buffer.
509    ///
510    /// If [`stride`](Self::stride) is set, rows are written with that pixel stride.
511    /// Otherwise rows are packed (stride == width).
512    pub fn decode_rgb_into(self, output: &mut [u8]) -> Result<(u32, u32), DecodeError> {
513        let (w, h) = {
514            let mut decoder = WebPDecoder::new_with_options(self.data, self.config.to_options())?;
515            decoder.set_limits(self.config.limits.clone());
516            decoder.set_stop(self.stop);
517            let dims = decoder.dimensions();
518            let buf_size = decoder
519                .output_buffer_size()
520                .ok_or(DecodeError::ImageTooLarge)?;
521            let mut rgba = alloc::vec![0u8; buf_size];
522            decoder.read_image(&mut rgba)?;
523
524            let stride_px = self.stride_pixels.unwrap_or(dims.0) as usize;
525            if stride_px < dims.0 as usize {
526                return Err(DecodeError::InvalidParameter(alloc::format!(
527                    "stride_pixels {} < width {}",
528                    stride_px,
529                    dims.0
530                )));
531            }
532            let stride_bytes = stride_px * 3;
533            let required = stride_bytes * dims.1 as usize;
534            if output.len() < required {
535                return Err(DecodeError::InvalidParameter(alloc::format!(
536                    "output buffer too small: got {}, need {}",
537                    output.len(),
538                    required
539                )));
540            }
541            for y in 0..(dims.1 as usize) {
542                let dst_row = &mut output[y * stride_bytes..][..dims.0 as usize * 3];
543                for (i, chunk) in rgba[y * dims.0 as usize * 4..(y + 1) * dims.0 as usize * 4]
544                    .chunks_exact(4)
545                    .enumerate()
546                {
547                    dst_row[i * 3] = chunk[0];
548                    dst_row[i * 3 + 1] = chunk[1];
549                    dst_row[i * 3 + 2] = chunk[2];
550                }
551            }
552            dims
553        };
554        Ok((w, h))
555    }
556
557    /// Read image info without decoding pixel data.
558    pub fn info(self) -> Result<ImageInfo, DecodeError> {
559        ImageInfo::from_webp(self.data)
560    }
561
562    /// Decode to YUV 4:2:0 planes (lossy only).
563    pub fn decode_yuv420(self) -> Result<YuvPlanes, DecodeError> {
564        decode_yuv420(self.data)
565    }
566}
567
568/// WebP decoder configuration options (internal, used by AnimationDecoder)
569#[derive(Clone)]
570#[non_exhaustive]
571pub(crate) struct WebPDecodeOptions {
572    /// The upsampling method used in conversion from lossy yuv to rgb
573    pub lossy_upsampling: UpsamplingMethod,
574}
575
576impl Default for WebPDecodeOptions {
577    fn default() -> Self {
578        Self {
579            lossy_upsampling: UpsamplingMethod::Bilinear,
580        }
581    }
582}
583
584/// Methods for upsampling the chroma values in lossy decoding
585///
586/// The chroma red and blue planes are encoded in VP8 as half the size of the luma plane
587/// Therefore we need to upsample these values up to fit each pixel in the image.
588#[derive(Clone, Copy, Debug, Default, PartialEq, Eq)]
589pub enum UpsamplingMethod {
590    /// Fancy upsampling
591    ///
592    /// Does bilinear interpolation using the 4 values nearest to the pixel, weighting based on the distance
593    /// from the pixel.
594    #[default]
595    Bilinear,
596    /// Simple upsampling, just uses the closest u/v value to the pixel when upsampling
597    ///
598    /// Matches the -nofancy option in dwebp.
599    /// Should be faster but may lead to slightly jagged edges.
600    Simple,
601}
602
603/// WebP image format decoder.
604pub struct WebPDecoder<'a> {
605    r: SliceReader<'a>,
606    memory_limit: usize,
607    limits: super::limits::Limits,
608
609    width: u32,
610    height: u32,
611
612    kind: ImageKind,
613    animation: AnimationState,
614
615    is_lossy: bool,
616    has_alpha: bool,
617    num_frames: u32,
618    loop_count: LoopCount,
619    loop_duration: u64,
620
621    chunks: HashMap<WebPRiffChunk, Range<u64>>,
622
623    webp_decode_options: WebPDecodeOptions,
624
625    stop: Option<&'a dyn enough::Stop>,
626}
627
628impl<'a> WebPDecoder<'a> {
629    /// Create a new `WebPDecoder` from the data slice (alias for [`new`](Self::new)).
630    ///
631    /// This method parses the WebP headers and prepares for decoding. Use [`info()`](Self::info)
632    /// to inspect metadata before calling decode methods.
633    ///
634    /// # Example - Two-phase decoding
635    ///
636    /// ```rust,no_run
637    /// use zenwebp::WebPDecoder;
638    ///
639    /// # let webp_data: &[u8] = &[]; // your WebP data
640    /// // Phase 1: Parse headers
641    /// let mut decoder = WebPDecoder::build(webp_data)?;
642    ///
643    /// // Phase 2: Inspect metadata
644    /// let info = decoder.info();
645    /// println!("{}x{}, alpha={}", info.width, info.height, info.has_alpha);
646    ///
647    /// // Phase 3: Decode (no re-parsing)
648    /// let mut output = vec![0u8; decoder.output_buffer_size().unwrap()];
649    /// decoder.read_image(&mut output)?;
650    /// # Ok::<(), zenwebp::DecodeError>(())
651    /// ```
652    pub fn build(data: &'a [u8]) -> Result<Self, DecodeError> {
653        Self::new(data)
654    }
655
656    /// Create a new `WebPDecoder` from the data slice.
657    pub fn new(data: &'a [u8]) -> Result<Self, DecodeError> {
658        Self::new_with_options(data, WebPDecodeOptions::default())
659    }
660
661    /// Get image information without decoding the full image.
662    ///
663    /// Returns metadata that was parsed during construction. This is a zero-cost
664    /// operation that doesn't re-parse or decode any data.
665    ///
666    /// # Example
667    ///
668    /// ```rust,no_run
669    /// use zenwebp::WebPDecoder;
670    ///
671    /// let webp_data: &[u8] = &[]; // your WebP data
672    /// let decoder = WebPDecoder::new(webp_data)?;
673    /// let info = decoder.info();
674    /// println!("Format: {:?}, {}x{}", info.format, info.width, info.height);
675    /// # Ok::<(), zenwebp::DecodeError>(())
676    /// ```
677    pub fn info(&self) -> ImageInfo {
678        let icc_profile = self
679            .read_chunk_direct(WebPRiffChunk::ICCP, self.memory_limit)
680            .unwrap_or(None);
681        let exif = self
682            .read_chunk_direct(WebPRiffChunk::EXIF, self.memory_limit)
683            .unwrap_or(None);
684        let xmp = self
685            .read_chunk_direct(WebPRiffChunk::XMP, self.memory_limit)
686            .unwrap_or(None);
687        ImageInfo {
688            width: self.width,
689            height: self.height,
690            has_alpha: self.has_alpha,
691            is_lossy: self.is_lossy,
692            has_animation: self.is_animated(),
693            frame_count: self.num_frames,
694            format: if self.is_lossy {
695                BitstreamFormat::Lossy
696            } else {
697                BitstreamFormat::Lossless
698            },
699            icc_profile,
700            exif,
701            xmp,
702        }
703    }
704
705    /// Create a new `WebPDecoder` from the data slice with the given options.
706    pub(crate) fn new_with_options(
707        data: &'a [u8],
708        webp_decode_options: WebPDecodeOptions,
709    ) -> Result<Self, DecodeError> {
710        let mut decoder = Self {
711            r: SliceReader::new(data),
712            width: 0,
713            height: 0,
714            num_frames: 0,
715            kind: ImageKind::Lossy,
716            chunks: HashMap::new(),
717            animation: Default::default(),
718            memory_limit: usize::MAX,
719            limits: super::limits::Limits::none(), // No limits by default
720            is_lossy: false,
721            has_alpha: false,
722            loop_count: LoopCount::Times(NonZeroU16::new(1).unwrap()),
723            loop_duration: 0,
724            webp_decode_options,
725            stop: None,
726        };
727        decoder.read_data()?;
728        Ok(decoder)
729    }
730
731    fn read_data(&mut self) -> Result<(), DecodeError> {
732        let (WebPRiffChunk::RIFF, riff_size, _) = read_chunk_header(&mut self.r)? else {
733            return Err(DecodeError::ChunkHeaderInvalid(*b"RIFF"));
734        };
735
736        match &read_fourcc(&mut self.r)? {
737            WebPRiffChunk::WEBP => {}
738            fourcc => return Err(DecodeError::WebpSignatureInvalid(fourcc.to_fourcc())),
739        }
740
741        let (chunk, chunk_size, chunk_size_rounded) = read_chunk_header(&mut self.r)?;
742        let start = self.r.stream_position();
743
744        match chunk {
745            WebPRiffChunk::VP8 => {
746                let tag = self.r.read_u24_le()?;
747
748                let keyframe = tag & 1 == 0;
749                if !keyframe {
750                    return Err(DecodeError::UnsupportedFeature(
751                        "Non-keyframe frames".into(),
752                    ));
753                }
754
755                let mut tag = [0u8; 3];
756                self.r.read_exact(&mut tag)?;
757                if tag != [0x9d, 0x01, 0x2a] {
758                    return Err(DecodeError::Vp8MagicInvalid(tag));
759                }
760
761                let w = self.r.read_u16_le()?;
762                let h = self.r.read_u16_le()?;
763
764                self.width = u32::from(w & 0x3FFF);
765                self.height = u32::from(h & 0x3FFF);
766                if self.width == 0 || self.height == 0 {
767                    return Err(DecodeError::InconsistentImageSizes);
768                }
769
770                self.limits.check_dimensions(self.width, self.height)?;
771
772                self.chunks
773                    .insert(WebPRiffChunk::VP8, start..start + chunk_size);
774                self.kind = ImageKind::Lossy;
775                self.is_lossy = true;
776            }
777            WebPRiffChunk::VP8L => {
778                let signature = self.r.read_u8()?;
779                if signature != 0x2f {
780                    return Err(DecodeError::LosslessSignatureInvalid(signature));
781                }
782
783                let header = self.r.read_u32_le()?;
784                let version = header >> 29;
785                if version != 0 {
786                    return Err(DecodeError::VersionNumberInvalid(version as u8));
787                }
788
789                self.width = (1 + header) & 0x3FFF;
790                self.height = (1 + (header >> 14)) & 0x3FFF;
791                self.limits.check_dimensions(self.width, self.height)?;
792                self.chunks
793                    .insert(WebPRiffChunk::VP8L, start..start + chunk_size);
794                self.kind = ImageKind::Lossless;
795                self.has_alpha = (header >> 28) & 1 != 0;
796            }
797            WebPRiffChunk::VP8X => {
798                let mut info = extended::read_extended_header(&mut self.r)?;
799                self.width = info.canvas_width;
800                self.height = info.canvas_height;
801                self.limits.check_dimensions(self.width, self.height)?;
802
803                let mut position = start + chunk_size_rounded;
804                let max_position = position + riff_size.saturating_sub(12);
805                self.r.seek_from_start(position)?;
806
807                while position < max_position {
808                    match read_chunk_header(&mut self.r) {
809                        Ok((chunk, chunk_size, chunk_size_rounded)) => {
810                            let range = position + 8..position + 8 + chunk_size;
811                            position += 8 + chunk_size_rounded;
812
813                            if !chunk.is_unknown() {
814                                self.chunks.entry(chunk).or_insert(range);
815                            }
816
817                            if chunk == WebPRiffChunk::ANMF {
818                                self.num_frames += 1;
819                                if chunk_size < 24 {
820                                    return Err(DecodeError::InvalidChunkSize);
821                                }
822
823                                self.r.seek_relative(12)?;
824                                let duration = self.r.read_u32_le()? & 0xffffff;
825                                self.loop_duration =
826                                    self.loop_duration.wrapping_add(u64::from(duration));
827
828                                // If the image is animated, the image data chunk will be inside the
829                                // ANMF chunks, so we must inspect them to determine whether the
830                                // image contains any lossy image data. VP8 chunks store lossy data
831                                // and the spec says that lossless images SHOULD NOT contain ALPH
832                                // chunks, so we treat both as indicators of lossy images.
833                                if !self.is_lossy {
834                                    let (subchunk, ..) = read_chunk_header(&mut self.r)?;
835                                    if let WebPRiffChunk::VP8 | WebPRiffChunk::ALPH = subchunk {
836                                        self.is_lossy = true;
837                                    }
838                                    self.r.seek_relative(chunk_size_rounded as i64 - 24)?;
839                                } else {
840                                    self.r.seek_relative(chunk_size_rounded as i64 - 16)?;
841                                }
842
843                                continue;
844                            }
845
846                            self.r.seek_relative(chunk_size_rounded as i64)?;
847                        }
848                        Err(DecodeError::BitStreamError) => {
849                            break;
850                        }
851                        Err(e) => return Err(e),
852                    }
853                }
854                self.is_lossy = self.is_lossy || self.chunks.contains_key(&WebPRiffChunk::VP8);
855
856                // NOTE: We allow malformed images that have `info.icc_profile` set without a ICCP chunk,
857                // because this is relatively common.
858                if info.animation
859                    && (!self.chunks.contains_key(&WebPRiffChunk::ANIM)
860                        || !self.chunks.contains_key(&WebPRiffChunk::ANMF))
861                    || info.exif_metadata && !self.chunks.contains_key(&WebPRiffChunk::EXIF)
862                    || info.xmp_metadata && !self.chunks.contains_key(&WebPRiffChunk::XMP)
863                    || !info.animation
864                        && self.chunks.contains_key(&WebPRiffChunk::VP8)
865                            == self.chunks.contains_key(&WebPRiffChunk::VP8L)
866                {
867                    return Err(DecodeError::ChunkMissing);
868                }
869
870                // Decode ANIM chunk.
871                if info.animation {
872                    match self.read_chunk(WebPRiffChunk::ANIM, 6) {
873                        Ok(Some(chunk)) => {
874                            let mut cursor = SliceReader::new(&chunk);
875                            cursor.read_exact(&mut info.background_color_hint)?;
876                            self.loop_count = match cursor.read_u16_le()? {
877                                0 => LoopCount::Forever,
878                                n => LoopCount::Times(NonZeroU16::new(n).unwrap()),
879                            };
880                            self.animation.next_frame_start =
881                                self.chunks.get(&WebPRiffChunk::ANMF).unwrap().start - 8;
882                        }
883                        Ok(None) => return Err(DecodeError::ChunkMissing),
884                        Err(DecodeError::MemoryLimitExceeded) => {
885                            return Err(DecodeError::InvalidChunkSize)
886                        }
887                        Err(e) => return Err(e),
888                    }
889                }
890
891                // If the image is animated, the image data chunk will be inside the ANMF chunks. We
892                // store the ALPH, VP8, and VP8L chunks (as applicable) of the first frame in the
893                // hashmap so that we can read them later.
894                if let Some(range) = self.chunks.get(&WebPRiffChunk::ANMF).cloned() {
895                    let mut position = range.start + 16;
896                    self.r.seek_from_start(position)?;
897                    for _ in 0..2 {
898                        let (subchunk, subchunk_size, subchunk_size_rounded) =
899                            read_chunk_header(&mut self.r)?;
900                        let subrange = position + 8..position + 8 + subchunk_size;
901                        self.chunks.entry(subchunk).or_insert(subrange.clone());
902
903                        position += 8 + subchunk_size_rounded;
904                        if position + 8 > range.end {
905                            break;
906                        }
907                    }
908                }
909
910                self.has_alpha = info.alpha;
911                self.kind = ImageKind::Extended(info);
912            }
913            _ => return Err(DecodeError::ChunkHeaderInvalid(chunk.to_fourcc())),
914        };
915
916        Ok(())
917    }
918
919    /// Sets the maximum amount of memory that the decoder is allowed to allocate at once.
920    ///
921    /// TODO: Some allocations currently ignore this limit.
922    /// Set a cooperative cancellation token for decoding.
923    pub fn set_stop(&mut self, stop: Option<&'a dyn enough::Stop>) {
924        self.stop = stop;
925    }
926
927    /// Sets the memory limit in bytes for decoded image buffers.
928    pub fn set_memory_limit(&mut self, limit: usize) {
929        self.memory_limit = limit;
930    }
931
932    /// Set decode limits for validation.
933    pub fn set_limits(&mut self, limits: super::limits::Limits) {
934        self.limits = limits;
935    }
936
937    /// Get the background color specified in the image file if the image is extended and animated webp.
938    pub fn background_color_hint(&self) -> Option<[u8; 4]> {
939        if let ImageKind::Extended(info) = &self.kind {
940            Some(info.background_color_hint)
941        } else {
942            None
943        }
944    }
945
946    /// Sets the background color if the image is an extended and animated webp.
947    pub fn set_background_color(&mut self, color: [u8; 4]) -> Result<(), DecodeError> {
948        if let ImageKind::Extended(info) = &mut self.kind {
949            info.background_color = Some(color);
950            Ok(())
951        } else {
952            Err(DecodeError::InvalidParameter(
953                "Background color can only be set on animated webp".into(),
954            ))
955        }
956    }
957
958    /// Returns the (width, height) of the image in pixels.
959    pub fn dimensions(&self) -> (u32, u32) {
960        (self.width, self.height)
961    }
962
963    /// Returns whether the image has an alpha channel. If so, the pixel format is Rgba8 and
964    /// otherwise Rgb8.
965    pub fn has_alpha(&self) -> bool {
966        self.has_alpha
967    }
968
969    /// Returns true if the image is animated.
970    pub fn is_animated(&self) -> bool {
971        match &self.kind {
972            ImageKind::Lossy | ImageKind::Lossless => false,
973            ImageKind::Extended(extended) => extended.animation,
974        }
975    }
976
977    /// Returns whether the image is lossy. For animated images, this is true if any frame is lossy.
978    pub fn is_lossy(&self) -> bool {
979        self.is_lossy
980    }
981
982    /// Returns the number of frames of a single loop of the animation, or zero if the image is not
983    /// animated.
984    pub fn num_frames(&self) -> u32 {
985        self.num_frames
986    }
987
988    /// Returns the number of times the animation should loop.
989    pub fn loop_count(&self) -> LoopCount {
990        self.loop_count
991    }
992
993    /// Returns the total duration of one loop through the animation in milliseconds, or zero if the
994    /// image is not animated.
995    ///
996    /// This is the sum of the durations of all individual frames of the image.
997    pub fn loop_duration(&self) -> u64 {
998        self.loop_duration
999    }
1000
1001    fn read_chunk(
1002        &mut self,
1003        chunk: WebPRiffChunk,
1004        max_size: usize,
1005    ) -> Result<Option<Vec<u8>>, DecodeError> {
1006        self.read_chunk_direct(chunk, max_size)
1007    }
1008
1009    fn read_chunk_direct(
1010        &self,
1011        chunk: WebPRiffChunk,
1012        max_size: usize,
1013    ) -> Result<Option<Vec<u8>>, DecodeError> {
1014        match self.chunks.get(&chunk) {
1015            Some(range) => {
1016                let len = (range.end - range.start) as usize;
1017                if len > max_size {
1018                    return Err(DecodeError::MemoryLimitExceeded);
1019                }
1020                let start = range.start as usize;
1021                let end = range.end as usize;
1022                Ok(Some(self.r.get_ref()[start..end].to_vec()))
1023            }
1024            None => Ok(None),
1025        }
1026    }
1027
1028    /// Returns the raw bytes of the ICC profile, or None if there is no ICC profile.
1029    pub fn icc_profile(&mut self) -> Result<Option<Vec<u8>>, DecodeError> {
1030        self.read_chunk(WebPRiffChunk::ICCP, self.memory_limit)
1031    }
1032
1033    /// Returns the raw bytes of the EXIF metadata, or None if there is no EXIF metadata.
1034    pub fn exif_metadata(&mut self) -> Result<Option<Vec<u8>>, DecodeError> {
1035        self.read_chunk(WebPRiffChunk::EXIF, self.memory_limit)
1036    }
1037
1038    /// Returns the raw bytes of the XMP metadata, or None if there is no XMP metadata.
1039    pub fn xmp_metadata(&mut self) -> Result<Option<Vec<u8>>, DecodeError> {
1040        self.read_chunk(WebPRiffChunk::XMP, self.memory_limit)
1041    }
1042
1043    /// Returns the number of bytes required to store the image or a single frame, or None if that
1044    /// would take more than `usize::MAX` bytes.
1045    pub fn output_buffer_size(&self) -> Option<usize> {
1046        let bytes_per_pixel = if self.has_alpha() { 4 } else { 3 };
1047        (self.width as usize)
1048            .checked_mul(self.height as usize)?
1049            .checked_mul(bytes_per_pixel)
1050    }
1051
1052    /// Returns the raw bytes of the image. For animated images, this is the first frame.
1053    ///
1054    /// Fails with `ImageTooLarge` if `buf` has length different than `output_buffer_size()`
1055    pub fn read_image(&mut self, buf: &mut [u8]) -> Result<(), DecodeError> {
1056        if Some(buf.len()) != self.output_buffer_size() {
1057            return Err(DecodeError::ImageTooLarge);
1058        }
1059
1060        if self.is_animated() {
1061            let saved = core::mem::take(&mut self.animation);
1062            self.animation.next_frame_start =
1063                self.chunks.get(&WebPRiffChunk::ANMF).unwrap().start - 8;
1064            let result = self.read_frame(buf);
1065            self.animation = saved;
1066            result?;
1067        } else if let Some(range) = self.chunks.get(&WebPRiffChunk::VP8L) {
1068            let data_slice = &self.r.get_ref()[range.start as usize..range.end as usize];
1069            let mut decoder = LosslessDecoder::new(data_slice);
1070            decoder.set_stop(self.stop);
1071
1072            if self.has_alpha {
1073                decoder.decode_frame(self.width, self.height, false, buf)?;
1074            } else {
1075                let mut data = vec![0; self.width as usize * self.height as usize * 4];
1076                decoder.decode_frame(self.width, self.height, false, &mut data)?;
1077                for (rgba_val, chunk) in data.chunks_exact(4).zip(buf.chunks_exact_mut(3)) {
1078                    chunk.copy_from_slice(&rgba_val[..3]);
1079                }
1080            }
1081        } else {
1082            let range = self
1083                .chunks
1084                .get(&WebPRiffChunk::VP8)
1085                .ok_or(DecodeError::ChunkMissing)?;
1086            let data_slice = &self.r.get_ref()[range.start as usize..range.end as usize];
1087            let frame = Vp8Decoder::decode_frame_with_stop(data_slice, self.stop)?;
1088            if u32::from(frame.width) != self.width || u32::from(frame.height) != self.height {
1089                return Err(DecodeError::InconsistentImageSizes);
1090            }
1091
1092            if self.has_alpha() {
1093                frame.fill_rgba(buf, self.webp_decode_options.lossy_upsampling);
1094
1095                let range = self
1096                    .chunks
1097                    .get(&WebPRiffChunk::ALPH)
1098                    .ok_or(DecodeError::ChunkMissing)?
1099                    .clone();
1100                let alpha_slice = &self.r.get_ref()[range.start as usize..range.end as usize];
1101                let alpha_chunk =
1102                    read_alpha_chunk(alpha_slice, self.width as u16, self.height as u16)?;
1103
1104                for y in 0..frame.height {
1105                    for x in 0..frame.width {
1106                        let predictor: u8 = get_alpha_predictor(
1107                            x.into(),
1108                            y.into(),
1109                            frame.width.into(),
1110                            alpha_chunk.filtering_method,
1111                            buf,
1112                        );
1113
1114                        let alpha_index =
1115                            usize::from(y) * usize::from(frame.width) + usize::from(x);
1116                        let buffer_index = alpha_index * 4 + 3;
1117
1118                        buf[buffer_index] = predictor.wrapping_add(alpha_chunk.data[alpha_index]);
1119                    }
1120                }
1121            } else {
1122                frame.fill_rgb(buf, self.webp_decode_options.lossy_upsampling);
1123            }
1124        }
1125
1126        Ok(())
1127    }
1128
1129    /// Reads the next frame of the animation.
1130    ///
1131    /// The frame contents are written into `buf` and the method returns the duration of the frame
1132    /// in milliseconds. If there are no more frames, the method returns
1133    /// `DecodeError::NoMoreFrames` and `buf` is left unchanged.
1134    ///
1135    pub fn read_frame(&mut self, buf: &mut [u8]) -> Result<u32, DecodeError> {
1136        if !self.is_animated() {
1137            return Err(DecodeError::InvalidParameter(String::from(
1138                "not an animated WebP",
1139            )));
1140        }
1141        if Some(buf.len()) != self.output_buffer_size() {
1142            return Err(DecodeError::ImageTooLarge);
1143        }
1144
1145        if self.animation.next_frame == self.num_frames {
1146            return Err(DecodeError::NoMoreFrames);
1147        }
1148
1149        let ImageKind::Extended(info) = &self.kind else {
1150            unreachable!()
1151        };
1152
1153        self.r.seek_from_start(self.animation.next_frame_start)?;
1154
1155        let anmf_size = match read_chunk_header(&mut self.r)? {
1156            (WebPRiffChunk::ANMF, size, _) if size >= 32 => size,
1157            _ => return Err(DecodeError::ChunkHeaderInvalid(*b"ANMF")),
1158        };
1159
1160        // Read ANMF chunk
1161        let frame_x = extended::read_3_bytes(&mut self.r)? * 2;
1162        let frame_y = extended::read_3_bytes(&mut self.r)? * 2;
1163        let frame_width = extended::read_3_bytes(&mut self.r)? + 1;
1164        let frame_height = extended::read_3_bytes(&mut self.r)? + 1;
1165        if frame_width > 16384 || frame_height > 16384 {
1166            return Err(DecodeError::ImageTooLarge);
1167        }
1168        if frame_x + frame_width > self.width || frame_y + frame_height > self.height {
1169            return Err(DecodeError::FrameOutsideImage);
1170        }
1171        let duration = extended::read_3_bytes(&mut self.r)?;
1172        let frame_info = self.r.read_u8()?;
1173        let use_alpha_blending = frame_info & 0b00000010 == 0;
1174        let dispose = frame_info & 0b00000001 != 0;
1175
1176        let clear_color = if self.animation.dispose_next_frame {
1177            info.background_color
1178        } else {
1179            None
1180        };
1181
1182        // Read normal bitstream now
1183        let (chunk, chunk_size, chunk_size_rounded) = read_chunk_header(&mut self.r)?;
1184        if chunk_size_rounded + 24 > anmf_size {
1185            return Err(DecodeError::ChunkHeaderInvalid(chunk.to_fourcc()));
1186        }
1187
1188        let (frame, frame_has_alpha): (Vec<u8>, bool) = match chunk {
1189            WebPRiffChunk::VP8 => {
1190                let data_slice = self.r.take_slice(chunk_size as usize)?;
1191                let raw_frame = Vp8Decoder::decode_frame_with_stop(data_slice, self.stop)?;
1192                if u32::from(raw_frame.width) != frame_width
1193                    || u32::from(raw_frame.height) != frame_height
1194                {
1195                    return Err(DecodeError::InconsistentImageSizes);
1196                }
1197                let mut rgb_frame = vec![0; frame_width as usize * frame_height as usize * 3];
1198                raw_frame.fill_rgb(&mut rgb_frame, self.webp_decode_options.lossy_upsampling);
1199                (rgb_frame, false)
1200            }
1201            WebPRiffChunk::VP8L => {
1202                let data_slice = self.r.take_slice(chunk_size as usize)?;
1203                let mut lossless_decoder = LosslessDecoder::new(data_slice);
1204                lossless_decoder.set_stop(self.stop);
1205                let mut rgba_frame = vec![0; frame_width as usize * frame_height as usize * 4];
1206                lossless_decoder.decode_frame(frame_width, frame_height, false, &mut rgba_frame)?;
1207                (rgba_frame, true)
1208            }
1209            WebPRiffChunk::ALPH => {
1210                if chunk_size_rounded + 32 > anmf_size {
1211                    return Err(DecodeError::ChunkHeaderInvalid(chunk.to_fourcc()));
1212                }
1213
1214                // read alpha
1215                let alpha_slice = self.r.take_slice(chunk_size as usize)?;
1216                // Skip padding if chunk_size is odd
1217                if chunk_size_rounded > chunk_size {
1218                    self.r
1219                        .seek_relative((chunk_size_rounded - chunk_size) as i64)?;
1220                }
1221                let alpha_chunk =
1222                    read_alpha_chunk(alpha_slice, frame_width as u16, frame_height as u16)?;
1223
1224                // read opaque
1225                let (next_chunk, next_chunk_size, _) = read_chunk_header(&mut self.r)?;
1226                if chunk_size + next_chunk_size + 32 > anmf_size {
1227                    return Err(DecodeError::ChunkHeaderInvalid(next_chunk.to_fourcc()));
1228                }
1229
1230                let vp8_slice = self.r.take_slice(next_chunk_size as usize)?;
1231                let frame = Vp8Decoder::decode_frame_with_stop(vp8_slice, self.stop)?;
1232
1233                let mut rgba_frame = vec![0; frame_width as usize * frame_height as usize * 4];
1234                frame.fill_rgba(&mut rgba_frame, self.webp_decode_options.lossy_upsampling);
1235
1236                for y in 0..frame.height {
1237                    for x in 0..frame.width {
1238                        let predictor: u8 = get_alpha_predictor(
1239                            x.into(),
1240                            y.into(),
1241                            frame.width.into(),
1242                            alpha_chunk.filtering_method,
1243                            &rgba_frame,
1244                        );
1245
1246                        let alpha_index =
1247                            usize::from(y) * usize::from(frame.width) + usize::from(x);
1248                        let buffer_index = alpha_index * 4 + 3;
1249
1250                        rgba_frame[buffer_index] =
1251                            predictor.wrapping_add(alpha_chunk.data[alpha_index]);
1252                    }
1253                }
1254
1255                (rgba_frame, true)
1256            }
1257            _ => return Err(DecodeError::ChunkHeaderInvalid(chunk.to_fourcc())),
1258        };
1259
1260        // fill starting canvas with clear color
1261        if self.animation.canvas.is_none() {
1262            self.animation.canvas = {
1263                let mut canvas = vec![0; (self.width * self.height * 4) as usize];
1264                if let Some(color) = info.background_color.as_ref() {
1265                    canvas
1266                        .chunks_exact_mut(4)
1267                        .for_each(|c| c.copy_from_slice(color))
1268                }
1269                Some(canvas)
1270            }
1271        }
1272        extended::composite_frame(
1273            self.animation.canvas.as_mut().unwrap(),
1274            self.width,
1275            self.height,
1276            clear_color,
1277            &frame,
1278            frame_x,
1279            frame_y,
1280            frame_width,
1281            frame_height,
1282            frame_has_alpha,
1283            use_alpha_blending,
1284            self.animation.previous_frame_width,
1285            self.animation.previous_frame_height,
1286            self.animation.previous_frame_x_offset,
1287            self.animation.previous_frame_y_offset,
1288        );
1289
1290        self.animation.previous_frame_width = frame_width;
1291        self.animation.previous_frame_height = frame_height;
1292        self.animation.previous_frame_x_offset = frame_x;
1293        self.animation.previous_frame_y_offset = frame_y;
1294
1295        self.animation.dispose_next_frame = dispose;
1296        self.animation.next_frame_start += anmf_size + 8;
1297        self.animation.next_frame += 1;
1298
1299        if self.has_alpha() {
1300            buf.copy_from_slice(self.animation.canvas.as_ref().unwrap());
1301        } else {
1302            for (b, c) in buf
1303                .chunks_exact_mut(3)
1304                .zip(self.animation.canvas.as_ref().unwrap().chunks_exact(4))
1305            {
1306                b.copy_from_slice(&c[..3]);
1307            }
1308        }
1309
1310        Ok(duration)
1311    }
1312
1313    /// Resets the animation to the first frame.
1314    ///
1315    pub fn reset_animation(&mut self) -> Result<(), DecodeError> {
1316        if !self.is_animated() {
1317            return Err(DecodeError::InvalidParameter(String::from(
1318                "not an animated WebP",
1319            )));
1320        }
1321        self.animation.next_frame = 0;
1322        self.animation.next_frame_start = self.chunks.get(&WebPRiffChunk::ANMF).unwrap().start - 8;
1323        self.animation.dispose_next_frame = true;
1324        Ok(())
1325    }
1326
1327    /// Sets the upsampling method that is used in lossy decoding
1328    pub fn set_lossy_upsampling(&mut self, upsampling_method: UpsamplingMethod) {
1329        self.webp_decode_options.lossy_upsampling = upsampling_method;
1330    }
1331}
1332
1333pub(crate) fn read_fourcc(r: &mut SliceReader) -> Result<WebPRiffChunk, DecodeError> {
1334    let mut chunk_fourcc = [0; 4];
1335    r.read_exact(&mut chunk_fourcc)?;
1336    Ok(WebPRiffChunk::from_fourcc(chunk_fourcc))
1337}
1338
1339pub(crate) fn read_chunk_header(
1340    r: &mut SliceReader,
1341) -> Result<(WebPRiffChunk, u64, u64), DecodeError> {
1342    let chunk = read_fourcc(r)?;
1343    let chunk_size = r.read_u32_le()?;
1344    let chunk_size_rounded = chunk_size.saturating_add(chunk_size & 1);
1345    Ok((chunk, chunk_size.into(), chunk_size_rounded.into()))
1346}
1347
1348// ============================================================================
1349// Convenience decode functions (webpx-compatible API)
1350// ============================================================================
1351
1352/// Decode WebP data to RGBA pixels.
1353///
1354/// Returns the decoded pixels and dimensions.
1355///
1356/// # Example
1357///
1358/// ```rust,no_run
1359/// let webp_data: &[u8] = &[]; // your WebP data
1360/// let (pixels, width, height) = zenwebp::decode_rgba(webp_data)?;
1361/// # Ok::<(), zenwebp::DecodeError>(())
1362/// ```
1363pub fn decode_rgba(data: &[u8]) -> Result<(Vec<u8>, u32, u32), DecodeError> {
1364    let mut decoder = WebPDecoder::new(data)?;
1365    let (width, height) = decoder.dimensions();
1366    let output_size = decoder
1367        .output_buffer_size()
1368        .ok_or(DecodeError::ImageTooLarge)?;
1369
1370    // Get output in native format (RGB or RGBA)
1371    let mut output = vec![0u8; output_size];
1372    decoder.read_image(&mut output)?;
1373
1374    // If the decoder outputs RGB, convert to RGBA
1375    if !decoder.has_alpha() {
1376        let mut rgba = Vec::with_capacity((width * height * 4) as usize);
1377        for chunk in output.chunks_exact(3) {
1378            rgba.extend_from_slice(chunk);
1379            rgba.push(255);
1380        }
1381        return Ok((rgba, width, height));
1382    }
1383
1384    Ok((output, width, height))
1385}
1386
1387/// Decode WebP data to RGB pixels (no alpha).
1388///
1389/// Returns the decoded pixels and dimensions.
1390///
1391/// # Example
1392///
1393/// ```rust,no_run
1394/// let webp_data: &[u8] = &[]; // your WebP data
1395/// let (pixels, width, height) = zenwebp::decode_rgb(webp_data)?;
1396/// # Ok::<(), zenwebp::DecodeError>(())
1397/// ```
1398pub fn decode_rgb(data: &[u8]) -> Result<(Vec<u8>, u32, u32), DecodeError> {
1399    let mut decoder = WebPDecoder::new(data)?;
1400    let (width, height) = decoder.dimensions();
1401    let output_size = decoder
1402        .output_buffer_size()
1403        .ok_or(DecodeError::ImageTooLarge)?;
1404
1405    let mut output = vec![0u8; output_size];
1406    decoder.read_image(&mut output)?;
1407
1408    // If the decoder outputs RGBA, convert to RGB
1409    if decoder.has_alpha() {
1410        let mut rgb = Vec::with_capacity((width * height * 3) as usize);
1411        for chunk in output.chunks_exact(4) {
1412            rgb.extend_from_slice(&chunk[..3]);
1413        }
1414        return Ok((rgb, width, height));
1415    }
1416
1417    Ok((output, width, height))
1418}
1419
1420/// Decode WebP data directly into a pre-allocated RGBA buffer.
1421///
1422/// # Arguments
1423/// * `data` - WebP encoded data
1424/// * `output` - Pre-allocated output buffer (must be at least `stride_pixels * height * 4` bytes)
1425/// * `stride_pixels` - Row stride in pixels (must be >= width)
1426///
1427/// # Returns
1428/// Width and height of the decoded image.
1429pub fn decode_rgba_into(
1430    data: &[u8],
1431    output: &mut [u8],
1432    stride_pixels: u32,
1433) -> Result<(u32, u32), DecodeError> {
1434    let mut decoder = WebPDecoder::new(data)?;
1435    let (width, height) = decoder.dimensions();
1436
1437    if stride_pixels < width {
1438        return Err(DecodeError::InvalidParameter(format!(
1439            "stride_pixels {} < width {}",
1440            stride_pixels, width
1441        )));
1442    }
1443
1444    let stride_bytes = stride_pixels as usize * 4;
1445    let required = stride_bytes * (height as usize);
1446    if output.len() < required {
1447        return Err(DecodeError::InvalidParameter(format!(
1448            "output buffer too small: got {}, need {}",
1449            output.len(),
1450            required
1451        )));
1452    }
1453
1454    // Decode into temporary buffer
1455    let output_size = decoder
1456        .output_buffer_size()
1457        .ok_or(DecodeError::ImageTooLarge)?;
1458    let mut temp = vec![0u8; output_size];
1459    decoder.read_image(&mut temp)?;
1460
1461    // Copy to output with stride
1462    let has_alpha = decoder.has_alpha();
1463    let src_bpp = if has_alpha { 4 } else { 3 };
1464
1465    for y in 0..(height as usize) {
1466        let dst_row = &mut output[y * stride_bytes..][..width as usize * 4];
1467        let src_row = &temp[y * (width as usize) * src_bpp..][..width as usize * src_bpp];
1468
1469        if has_alpha {
1470            dst_row.copy_from_slice(src_row);
1471        } else {
1472            for (dst, src) in dst_row.chunks_exact_mut(4).zip(src_row.chunks_exact(3)) {
1473                dst[..3].copy_from_slice(src);
1474                dst[3] = 255;
1475            }
1476        }
1477    }
1478
1479    Ok((width, height))
1480}
1481
1482/// Decode WebP data directly into a pre-allocated RGB buffer.
1483///
1484/// # Arguments
1485/// * `data` - WebP encoded data
1486/// * `output` - Pre-allocated output buffer (must be at least `stride_pixels * height * 3` bytes)
1487/// * `stride_pixels` - Row stride in pixels (must be >= width)
1488///
1489/// # Returns
1490/// Width and height of the decoded image.
1491pub fn decode_rgb_into(
1492    data: &[u8],
1493    output: &mut [u8],
1494    stride_pixels: u32,
1495) -> Result<(u32, u32), DecodeError> {
1496    let mut decoder = WebPDecoder::new(data)?;
1497    let (width, height) = decoder.dimensions();
1498
1499    if stride_pixels < width {
1500        return Err(DecodeError::InvalidParameter(format!(
1501            "stride_pixels {} < width {}",
1502            stride_pixels, width
1503        )));
1504    }
1505
1506    let stride_bytes = stride_pixels as usize * 3;
1507    let required = stride_bytes * (height as usize);
1508    if output.len() < required {
1509        return Err(DecodeError::InvalidParameter(format!(
1510            "output buffer too small: got {}, need {}",
1511            output.len(),
1512            required
1513        )));
1514    }
1515
1516    // Decode into temporary buffer
1517    let output_size = decoder
1518        .output_buffer_size()
1519        .ok_or(DecodeError::ImageTooLarge)?;
1520    let mut temp = vec![0u8; output_size];
1521    decoder.read_image(&mut temp)?;
1522
1523    // Copy to output with stride
1524    let has_alpha = decoder.has_alpha();
1525    let src_bpp = if has_alpha { 4 } else { 3 };
1526
1527    for y in 0..(height as usize) {
1528        let dst_row = &mut output[y * stride_bytes..][..width as usize * 3];
1529        let src_row = &temp[y * (width as usize) * src_bpp..][..width as usize * src_bpp];
1530
1531        if has_alpha {
1532            for (dst, src) in dst_row.chunks_exact_mut(3).zip(src_row.chunks_exact(4)) {
1533                dst.copy_from_slice(&src[..3]);
1534            }
1535        } else {
1536            dst_row.copy_from_slice(src_row);
1537        }
1538    }
1539
1540    Ok((width, height))
1541}
1542
1543/// Image information obtained from WebP data header.
1544#[derive(Debug, Clone)]
1545pub struct ImageInfo {
1546    /// Image width in pixels.
1547    pub width: u32,
1548    /// Image height in pixels.
1549    pub height: u32,
1550    /// Whether the image has an alpha channel.
1551    pub has_alpha: bool,
1552    /// Whether the image uses lossy compression.
1553    pub is_lossy: bool,
1554    /// Whether the image is animated.
1555    pub has_animation: bool,
1556    /// Number of frames (1 for static images).
1557    pub frame_count: u32,
1558    /// Bitstream format (lossy or lossless).
1559    pub format: BitstreamFormat,
1560    /// ICC color profile, if present.
1561    pub icc_profile: Option<Vec<u8>>,
1562    /// EXIF metadata, if present.
1563    pub exif: Option<Vec<u8>>,
1564    /// XMP metadata, if present.
1565    pub xmp: Option<Vec<u8>>,
1566}
1567
1568impl ImageInfo {
1569    /// Minimum bytes needed to probe WebP metadata.
1570    ///
1571    /// This is a conservative estimate that covers the RIFF header, VP8/VP8L chunk
1572    /// header, and enough data to read basic image metadata. Actual images may be
1573    /// larger, but this is sufficient for probing.
1574    pub const PROBE_BYTES: usize = 64;
1575
1576    /// Parse image information from WebP data (alias for [`from_webp`](Self::from_webp)).
1577    ///
1578    /// This is a fast probing operation that only parses headers without decoding
1579    /// the full image data.
1580    ///
1581    /// # Example
1582    ///
1583    /// ```rust,no_run
1584    /// use zenwebp::ImageInfo;
1585    ///
1586    /// let webp_data: &[u8] = &[]; // your WebP data
1587    /// let info = ImageInfo::from_bytes(webp_data)?;
1588    /// println!("{}x{}, alpha={}", info.width, info.height, info.has_alpha);
1589    /// # Ok::<(), zenwebp::DecodeError>(())
1590    /// ```
1591    pub fn from_bytes(data: &[u8]) -> Result<Self, DecodeError> {
1592        Self::from_webp(data)
1593    }
1594
1595    /// Parse image information from WebP data.
1596    ///
1597    /// Extracts dimensions, format info, and metadata (ICC, EXIF, XMP) in a single
1598    /// pass. This replaces the need to use both [`WebPDecoder`] and
1599    /// [`WebPDemuxer`](crate::WebPDemuxer) for probing.
1600    pub fn from_webp(data: &[u8]) -> Result<Self, DecodeError> {
1601        let mut decoder = WebPDecoder::new(data)?;
1602        let (width, height) = decoder.dimensions();
1603        let is_lossy = decoder.is_lossy();
1604        let is_animated = decoder.is_animated();
1605        let frame_count = if is_animated { decoder.num_frames() } else { 1 };
1606        let format = if is_lossy {
1607            BitstreamFormat::Lossy
1608        } else {
1609            BitstreamFormat::Lossless
1610        };
1611        let icc_profile = decoder.icc_profile().unwrap_or(None);
1612        let exif = decoder.exif_metadata().unwrap_or(None);
1613        let xmp = decoder.xmp_metadata().unwrap_or(None);
1614        Ok(Self {
1615            width,
1616            height,
1617            has_alpha: decoder.has_alpha(),
1618            is_lossy,
1619            has_animation: is_animated,
1620            frame_count,
1621            format,
1622            icc_profile,
1623            exif,
1624            xmp,
1625        })
1626    }
1627
1628    /// Estimate resource consumption for decoding this image.
1629    ///
1630    /// Returns memory, time, and output size estimates. See
1631    /// [`heuristics::estimate_decode`](crate::heuristics::estimate_decode) for details.
1632    #[must_use]
1633    pub fn estimate_decode(&self, output_bpp: u8) -> crate::heuristics::DecodeEstimate {
1634        if self.has_animation {
1635            crate::heuristics::estimate_animation_decode(self.width, self.height, self.frame_count)
1636        } else {
1637            crate::heuristics::estimate_decode(self.width, self.height, output_bpp)
1638        }
1639    }
1640}
1641
1642/// Bitstream compression format.
1643#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)]
1644#[non_exhaustive]
1645pub enum BitstreamFormat {
1646    /// Lossy compression (VP8).
1647    #[default]
1648    Lossy,
1649    /// Lossless compression (VP8L).
1650    Lossless,
1651}
1652
1653impl core::fmt::Display for BitstreamFormat {
1654    fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
1655        match self {
1656            BitstreamFormat::Lossy => f.write_str("lossy"),
1657            BitstreamFormat::Lossless => f.write_str("lossless"),
1658        }
1659    }
1660}
1661
1662/// Decoded YUV 4:2:0 planar image data.
1663///
1664/// Contains separate Y, U, and V planes at their native resolutions.
1665/// Y is full resolution, U and V are half resolution in each dimension.
1666#[derive(Debug, Clone)]
1667pub struct YuvPlanes {
1668    /// Luma plane (full resolution).
1669    pub y: Vec<u8>,
1670    /// Chroma blue plane (half resolution in each dimension).
1671    pub u: Vec<u8>,
1672    /// Chroma red plane (half resolution in each dimension).
1673    pub v: Vec<u8>,
1674    /// Width of the luma plane in pixels.
1675    pub y_width: u32,
1676    /// Height of the luma plane in pixels.
1677    pub y_height: u32,
1678    /// Width of each chroma plane in pixels.
1679    pub uv_width: u32,
1680    /// Height of each chroma plane in pixels.
1681    pub uv_height: u32,
1682}
1683
1684/// Decode WebP data to BGRA pixels (blue, green, red, alpha order).
1685///
1686/// Returns the decoded pixels and dimensions.
1687pub fn decode_bgra(data: &[u8]) -> Result<(Vec<u8>, u32, u32), DecodeError> {
1688    let (mut pixels, width, height) = decode_rgba(data)?;
1689    // Swap R and B channels in-place
1690    for chunk in pixels.chunks_exact_mut(4) {
1691        chunk.swap(0, 2);
1692    }
1693    Ok((pixels, width, height))
1694}
1695
1696/// Decode WebP data to BGR pixels (blue, green, red order, no alpha).
1697///
1698/// Returns the decoded pixels and dimensions.
1699pub fn decode_bgr(data: &[u8]) -> Result<(Vec<u8>, u32, u32), DecodeError> {
1700    let (mut pixels, width, height) = decode_rgb(data)?;
1701    // Swap R and B channels in-place
1702    for chunk in pixels.chunks_exact_mut(3) {
1703        chunk.swap(0, 2);
1704    }
1705    Ok((pixels, width, height))
1706}
1707
1708/// Decode WebP data directly into a pre-allocated BGRA buffer.
1709///
1710/// # Arguments
1711/// * `data` - WebP encoded data
1712/// * `output` - Pre-allocated output buffer (must be at least `stride_pixels * height * 4` bytes)
1713/// * `stride_pixels` - Row stride in pixels (must be >= width)
1714///
1715/// # Returns
1716/// Width and height of the decoded image.
1717pub fn decode_bgra_into(
1718    data: &[u8],
1719    output: &mut [u8],
1720    stride_pixels: u32,
1721) -> Result<(u32, u32), DecodeError> {
1722    let (width, height) = decode_rgba_into(data, output, stride_pixels)?;
1723    let stride_bytes = stride_pixels as usize * 4;
1724    for y in 0..(height as usize) {
1725        let row = &mut output[y * stride_bytes..][..width as usize * 4];
1726        for chunk in row.chunks_exact_mut(4) {
1727            chunk.swap(0, 2);
1728        }
1729    }
1730    Ok((width, height))
1731}
1732
1733/// Decode WebP data directly into a pre-allocated BGR buffer.
1734///
1735/// # Arguments
1736/// * `data` - WebP encoded data
1737/// * `output` - Pre-allocated output buffer (must be at least `stride_pixels * height * 3` bytes)
1738/// * `stride_pixels` - Row stride in pixels (must be >= width)
1739///
1740/// # Returns
1741/// Width and height of the decoded image.
1742pub fn decode_bgr_into(
1743    data: &[u8],
1744    output: &mut [u8],
1745    stride_pixels: u32,
1746) -> Result<(u32, u32), DecodeError> {
1747    let (width, height) = decode_rgb_into(data, output, stride_pixels)?;
1748    let stride_bytes = stride_pixels as usize * 3;
1749    for y in 0..(height as usize) {
1750        let row = &mut output[y * stride_bytes..][..width as usize * 3];
1751        for chunk in row.chunks_exact_mut(3) {
1752            chunk.swap(0, 2);
1753        }
1754    }
1755    Ok((width, height))
1756}
1757
1758/// Decode WebP data to raw YUV 4:2:0 planes.
1759///
1760/// For VP8 lossy images, returns the native YUV planes without upsampling.
1761/// For VP8L lossless images, decodes to RGBA then converts to YUV.
1762///
1763/// # Returns
1764/// [`YuvPlanes`] containing separate Y, U, and V buffers.
1765pub fn decode_yuv420(data: &[u8]) -> Result<YuvPlanes, DecodeError> {
1766    let decoder = WebPDecoder::new(data)?;
1767
1768    if decoder.is_lossy() && !decoder.is_animated() {
1769        // For lossy images, extract the native YUV planes from the VP8 frame
1770        if let Some(range) = decoder.chunks.get(&WebPRiffChunk::VP8) {
1771            let data_slice = &decoder.r.get_ref()[range.start as usize..range.end as usize];
1772            let frame = Vp8Decoder::decode_frame(data_slice)?;
1773
1774            let w = u32::from(frame.width);
1775            let h = u32::from(frame.height);
1776            let uv_w = w.div_ceil(2);
1777            let uv_h = h.div_ceil(2);
1778
1779            // Macroblock-aligned buffer width (same as frame.buffer_width())
1780            let buffer_width = {
1781                let diff = w % 16;
1782                if diff > 0 {
1783                    (w + 16 - diff) as usize
1784                } else {
1785                    w as usize
1786                }
1787            };
1788            let chroma_bw = buffer_width / 2;
1789
1790            // Crop from macroblock-aligned buffers to actual dimensions
1791            let mut y = Vec::with_capacity((w * h) as usize);
1792            for row in 0..h as usize {
1793                y.extend_from_slice(
1794                    &frame.ybuf[row * buffer_width..row * buffer_width + w as usize],
1795                );
1796            }
1797
1798            let mut u = Vec::with_capacity((uv_w * uv_h) as usize);
1799            let mut v = Vec::with_capacity((uv_w * uv_h) as usize);
1800            for row in 0..uv_h as usize {
1801                u.extend_from_slice(&frame.ubuf[row * chroma_bw..row * chroma_bw + uv_w as usize]);
1802                v.extend_from_slice(&frame.vbuf[row * chroma_bw..row * chroma_bw + uv_w as usize]);
1803            }
1804
1805            return Ok(YuvPlanes {
1806                y,
1807                u,
1808                v,
1809                y_width: w,
1810                y_height: h,
1811                uv_width: uv_w,
1812                uv_height: uv_h,
1813            });
1814        }
1815    }
1816
1817    // For lossless or animated images, decode to RGBA then convert to YUV
1818    let (rgba, w, h) = decode_rgba(data)?;
1819    let (y_bytes, u_bytes, v_bytes) = super::yuv::convert_image_yuv::<4>(&rgba, w as u16, h as u16);
1820
1821    let uv_w = w.div_ceil(2);
1822    let uv_h = h.div_ceil(2);
1823    let mb_width = (w as usize).div_ceil(16);
1824
1825    let luma_width = 16 * mb_width;
1826    let chroma_width = 8 * mb_width;
1827
1828    // Crop from macroblock-aligned buffers
1829    let mut y = Vec::with_capacity((w * h) as usize);
1830    for row in 0..h as usize {
1831        y.extend_from_slice(&y_bytes[row * luma_width..row * luma_width + w as usize]);
1832    }
1833
1834    let mut u = Vec::with_capacity((uv_w * uv_h) as usize);
1835    let mut v = Vec::with_capacity((uv_w * uv_h) as usize);
1836    for row in 0..uv_h as usize {
1837        u.extend_from_slice(&u_bytes[row * chroma_width..row * chroma_width + uv_w as usize]);
1838        v.extend_from_slice(&v_bytes[row * chroma_width..row * chroma_width + uv_w as usize]);
1839    }
1840
1841    Ok(YuvPlanes {
1842        y,
1843        u,
1844        v,
1845        y_width: w,
1846        y_height: h,
1847        uv_width: uv_w,
1848        uv_height: uv_h,
1849    })
1850}
1851
1852#[cfg(test)]
1853mod tests {
1854    use super::*;
1855    const RGB_BPP: usize = 3;
1856
1857    #[test]
1858    fn add_with_overflow_size() {
1859        let bytes = vec![
1860            0x52, 0x49, 0x46, 0x46, 0xaf, 0x37, 0x80, 0x47, 0x57, 0x45, 0x42, 0x50, 0x6c, 0x64,
1861            0x00, 0x00, 0xff, 0xff, 0xff, 0xff, 0xfb, 0x7e, 0x73, 0x00, 0x06, 0x00, 0x00, 0x00,
1862            0x00, 0x00, 0x00, 0x00, 0x05, 0x00, 0x00, 0x00, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65,
1863            0x40, 0xfb, 0xff, 0xff, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65,
1864            0x00, 0x00, 0x00, 0x00, 0x62, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x49,
1865            0x49, 0x54, 0x55, 0x50, 0x4c, 0x54, 0x59, 0x50, 0x45, 0x33, 0x37, 0x44, 0x4d, 0x46,
1866        ];
1867
1868        let _ = WebPDecoder::new(&bytes);
1869    }
1870
1871    #[test]
1872    fn decode_2x2_single_color_image() {
1873        // Image data created from imagemagick and output of xxd:
1874        // $ convert -size 2x2 xc:#f00 red.webp
1875        // $ xxd -g 1 red.webp | head
1876
1877        const NUM_PIXELS: usize = 2 * 2 * RGB_BPP;
1878        // 2x2 red pixel image
1879        let bytes = [
1880            0x52, 0x49, 0x46, 0x46, 0x3c, 0x00, 0x00, 0x00, 0x57, 0x45, 0x42, 0x50, 0x56, 0x50,
1881            0x38, 0x20, 0x30, 0x00, 0x00, 0x00, 0xd0, 0x01, 0x00, 0x9d, 0x01, 0x2a, 0x02, 0x00,
1882            0x02, 0x00, 0x02, 0x00, 0x34, 0x25, 0xa0, 0x02, 0x74, 0xba, 0x01, 0xf8, 0x00, 0x03,
1883            0xb0, 0x00, 0xfe, 0xf0, 0xc4, 0x0b, 0xff, 0x20, 0xb9, 0x61, 0x75, 0xc8, 0xd7, 0xff,
1884            0x20, 0x3f, 0xe4, 0x07, 0xfc, 0x80, 0xff, 0xf8, 0xf2, 0x00, 0x00, 0x00,
1885        ];
1886
1887        let mut data = [0; NUM_PIXELS];
1888        let mut decoder = WebPDecoder::new(&bytes).unwrap();
1889        decoder.read_image(&mut data).unwrap();
1890
1891        // All pixels are the same value
1892        let first_pixel = &data[..RGB_BPP];
1893        assert!(data.chunks_exact(3).all(|ch| ch.iter().eq(first_pixel)));
1894    }
1895
1896    #[test]
1897    fn decode_3x3_single_color_image() {
1898        // Test that any odd pixel "tail" is decoded properly
1899
1900        const NUM_PIXELS: usize = 3 * 3 * RGB_BPP;
1901        // 3x3 red pixel image
1902        let bytes = [
1903            0x52, 0x49, 0x46, 0x46, 0x3c, 0x00, 0x00, 0x00, 0x57, 0x45, 0x42, 0x50, 0x56, 0x50,
1904            0x38, 0x20, 0x30, 0x00, 0x00, 0x00, 0xd0, 0x01, 0x00, 0x9d, 0x01, 0x2a, 0x03, 0x00,
1905            0x03, 0x00, 0x02, 0x00, 0x34, 0x25, 0xa0, 0x02, 0x74, 0xba, 0x01, 0xf8, 0x00, 0x03,
1906            0xb0, 0x00, 0xfe, 0xf0, 0xc4, 0x0b, 0xff, 0x20, 0xb9, 0x61, 0x75, 0xc8, 0xd7, 0xff,
1907            0x20, 0x3f, 0xe4, 0x07, 0xfc, 0x80, 0xff, 0xf8, 0xf2, 0x00, 0x00, 0x00,
1908        ];
1909
1910        let mut data = [0; NUM_PIXELS];
1911        let mut decoder = WebPDecoder::new(&bytes).unwrap();
1912        decoder.read_image(&mut data).unwrap();
1913
1914        // All pixels are the same value
1915        let first_pixel = &data[..RGB_BPP];
1916        assert!(data.chunks_exact(3).all(|ch| ch.iter().eq(first_pixel)));
1917    }
1918}