Skip to main content

zenwebp/decoder/
api.rs

1use alloc::string::String;
2use thiserror::Error;
3
4/// Errors that can occur when attempting to decode a WebP image
5#[derive(Debug, Error)]
6#[non_exhaustive]
7pub enum DecodingError {
8    /// An IO error occurred while reading the file
9    #[cfg(feature = "std")]
10    #[error("IO Error: {0}")]
11    IoError(#[from] std::io::Error),
12
13    /// RIFF's "RIFF" signature not found or invalid
14    #[error("Invalid RIFF signature: {0:x?}")]
15    RiffSignatureInvalid([u8; 4]),
16
17    /// WebP's "WEBP" signature not found or invalid
18    #[error("Invalid WebP signature: {0:x?}")]
19    WebpSignatureInvalid([u8; 4]),
20
21    /// An expected chunk was missing
22    #[error("An expected chunk was missing")]
23    ChunkMissing,
24
25    /// Chunk Header was incorrect or invalid in its usage
26    #[error("Invalid Chunk header: {0:x?}")]
27    ChunkHeaderInvalid([u8; 4]),
28
29    /// Some bits were invalid
30    #[deprecated]
31    #[error("Reserved bits set")]
32    ReservedBitSet,
33
34    /// The ALPH chunk preprocessing info flag was invalid
35    #[error("Alpha chunk preprocessing flag invalid")]
36    InvalidAlphaPreprocessing,
37
38    /// Invalid compression method
39    #[error("Invalid compression method")]
40    InvalidCompressionMethod,
41
42    /// Alpha chunk doesn't match the frame's size
43    #[error("Alpha chunk size mismatch")]
44    AlphaChunkSizeMismatch,
45
46    /// Image is too large, either for the platform's pointer size or generally
47    #[error("Image too large")]
48    ImageTooLarge,
49
50    /// Frame would go out of the canvas
51    #[error("Frame outside image")]
52    FrameOutsideImage,
53
54    /// Signature of 0x2f not found
55    #[error("Invalid lossless signature: {0:x?}")]
56    LosslessSignatureInvalid(u8),
57
58    /// Version Number was not zero
59    #[error("Invalid lossless version number: {0}")]
60    VersionNumberInvalid(u8),
61
62    /// Invalid color cache bits
63    #[error("Invalid color cache bits: {0}")]
64    InvalidColorCacheBits(u8),
65
66    /// An invalid Huffman code was encountered
67    #[error("Invalid Huffman code")]
68    HuffmanError,
69
70    /// The bitstream was somehow corrupt
71    #[error("Corrupt bitstream")]
72    BitStreamError,
73
74    /// The transforms specified were invalid
75    #[error("Invalid transform")]
76    TransformError,
77
78    /// VP8's `[0x9D, 0x01, 0x2A]` magic not found or invalid
79    #[error("Invalid VP8 magic: {0:x?}")]
80    Vp8MagicInvalid([u8; 3]),
81
82    /// VP8 Decoder initialisation wasn't provided with enough data
83    #[error("Not enough VP8 init data")]
84    NotEnoughInitData,
85
86    /// At time of writing, only the YUV colour-space encoded as `0` is specified
87    #[error("Invalid VP8 color space: {0}")]
88    ColorSpaceInvalid(u8),
89
90    /// LUMA prediction mode was not recognised
91    #[error("Invalid VP8 luma prediction mode: {0}")]
92    LumaPredictionModeInvalid(i8),
93
94    /// Intra-prediction mode was not recognised
95    #[error("Invalid VP8 intra prediction mode: {0}")]
96    IntraPredictionModeInvalid(i8),
97
98    /// Chroma prediction mode was not recognised
99    #[error("Invalid VP8 chroma prediction mode: {0}")]
100    ChromaPredictionModeInvalid(i8),
101
102    /// Inconsistent image sizes
103    #[error("Inconsistent image sizes")]
104    InconsistentImageSizes,
105
106    /// The file may be valid, but this crate doesn't support decoding it.
107    #[error("Unsupported feature: {0}")]
108    UnsupportedFeature(String),
109
110    /// Invalid function call or parameter
111    #[error("Invalid parameter: {0}")]
112    InvalidParameter(String),
113
114    /// Memory limit exceeded
115    #[error("Memory limit exceeded")]
116    MemoryLimitExceeded,
117
118    /// Invalid chunk size
119    #[error("Invalid chunk size")]
120    InvalidChunkSize,
121
122    /// No more frames in image
123    #[error("No more frames")]
124    NoMoreFrames,
125}
126
127// Core decoder implementation using SliceReader for no_std compatibility
128use alloc::format;
129use alloc::vec;
130use alloc::vec::Vec;
131use core::num::NonZeroU16;
132use core::ops::Range;
133
134use hashbrown::HashMap;
135
136use super::extended::{self, get_alpha_predictor, read_alpha_chunk, WebPExtendedInfo};
137use super::lossless::LosslessDecoder;
138use super::vp8::Vp8Decoder;
139use crate::slice_reader::SliceReader;
140
141/// All possible RIFF chunks in a WebP image file
142#[allow(clippy::upper_case_acronyms)]
143#[derive(Debug, Clone, Copy, PartialEq, Hash, Eq)]
144pub(crate) enum WebPRiffChunk {
145    RIFF,
146    WEBP,
147    VP8,
148    VP8L,
149    VP8X,
150    ANIM,
151    ANMF,
152    ALPH,
153    ICCP,
154    EXIF,
155    XMP,
156    Unknown([u8; 4]),
157}
158
159impl WebPRiffChunk {
160    pub(crate) const fn from_fourcc(chunk_fourcc: [u8; 4]) -> Self {
161        match &chunk_fourcc {
162            b"RIFF" => Self::RIFF,
163            b"WEBP" => Self::WEBP,
164            b"VP8 " => Self::VP8,
165            b"VP8L" => Self::VP8L,
166            b"VP8X" => Self::VP8X,
167            b"ANIM" => Self::ANIM,
168            b"ANMF" => Self::ANMF,
169            b"ALPH" => Self::ALPH,
170            b"ICCP" => Self::ICCP,
171            b"EXIF" => Self::EXIF,
172            b"XMP " => Self::XMP,
173            _ => Self::Unknown(chunk_fourcc),
174        }
175    }
176
177    pub(crate) const fn to_fourcc(self) -> [u8; 4] {
178        match self {
179            Self::RIFF => *b"RIFF",
180            Self::WEBP => *b"WEBP",
181            Self::VP8 => *b"VP8 ",
182            Self::VP8L => *b"VP8L",
183            Self::VP8X => *b"VP8X",
184            Self::ANIM => *b"ANIM",
185            Self::ANMF => *b"ANMF",
186            Self::ALPH => *b"ALPH",
187            Self::ICCP => *b"ICCP",
188            Self::EXIF => *b"EXIF",
189            Self::XMP => *b"XMP ",
190            Self::Unknown(fourcc) => fourcc,
191        }
192    }
193
194    pub(crate) const fn is_unknown(self) -> bool {
195        matches!(self, Self::Unknown(_))
196    }
197}
198
199// enum WebPImage {
200//     Lossy(VP8Frame),
201//     Lossless(LosslessFrame),
202//     Extended(ExtendedImage),
203// }
204
205enum ImageKind {
206    Lossy,
207    Lossless,
208    Extended(WebPExtendedInfo),
209}
210
211struct AnimationState {
212    next_frame: u32,
213    next_frame_start: u64,
214    dispose_next_frame: bool,
215    previous_frame_width: u32,
216    previous_frame_height: u32,
217    previous_frame_x_offset: u32,
218    previous_frame_y_offset: u32,
219    canvas: Option<Vec<u8>>,
220}
221impl Default for AnimationState {
222    fn default() -> Self {
223        Self {
224            next_frame: 0,
225            next_frame_start: 0,
226            dispose_next_frame: true,
227            previous_frame_width: 0,
228            previous_frame_height: 0,
229            previous_frame_x_offset: 0,
230            previous_frame_y_offset: 0,
231            canvas: None,
232        }
233    }
234}
235
236/// Number of times that an animation loops.
237#[derive(Copy, Clone, Debug, Eq, PartialEq)]
238pub enum LoopCount {
239    /// The animation loops forever.
240    Forever,
241    /// Each frame of the animation is displayed the specified number of times.
242    Times(NonZeroU16),
243}
244
245/// WebP decoder configuration options
246#[derive(Clone)]
247#[non_exhaustive]
248pub struct WebPDecodeOptions {
249    /// The upsampling method used in conversion from lossy yuv to rgb
250    ///
251    /// Defaults to `Bilinear`.
252    pub lossy_upsampling: UpsamplingMethod,
253}
254
255impl Default for WebPDecodeOptions {
256    fn default() -> Self {
257        Self {
258            lossy_upsampling: UpsamplingMethod::Bilinear,
259        }
260    }
261}
262
263/// Methods for upsampling the chroma values in lossy decoding
264///
265/// The chroma red and blue planes are encoded in VP8 as half the size of the luma plane
266/// Therefore we need to upsample these values up to fit each pixel in the image.
267#[derive(Clone, Copy, Default)]
268pub enum UpsamplingMethod {
269    /// Fancy upsampling
270    ///
271    /// Does bilinear interpolation using the 4 values nearest to the pixel, weighting based on the distance
272    /// from the pixel.
273    #[default]
274    Bilinear,
275    /// Simple upsampling, just uses the closest u/v value to the pixel when upsampling
276    ///
277    /// Matches the -nofancy option in dwebp.
278    /// Should be faster but may lead to slightly jagged edges.
279    Simple,
280}
281
282/// WebP image format decoder.
283pub struct WebPDecoder<'a> {
284    r: SliceReader<'a>,
285    memory_limit: usize,
286
287    width: u32,
288    height: u32,
289
290    kind: ImageKind,
291    animation: AnimationState,
292
293    is_lossy: bool,
294    has_alpha: bool,
295    num_frames: u32,
296    loop_count: LoopCount,
297    loop_duration: u64,
298
299    chunks: HashMap<WebPRiffChunk, Range<u64>>,
300
301    webp_decode_options: WebPDecodeOptions,
302}
303
304impl<'a> WebPDecoder<'a> {
305    /// Create a new `WebPDecoder` from the data slice.
306    pub fn new(data: &'a [u8]) -> Result<Self, DecodingError> {
307        Self::new_with_options(data, WebPDecodeOptions::default())
308    }
309
310    /// Create a new `WebPDecoder` from the data slice with the given options.
311    pub fn new_with_options(
312        data: &'a [u8],
313        webp_decode_options: WebPDecodeOptions,
314    ) -> Result<Self, DecodingError> {
315        let mut decoder = Self {
316            r: SliceReader::new(data),
317            width: 0,
318            height: 0,
319            num_frames: 0,
320            kind: ImageKind::Lossy,
321            chunks: HashMap::new(),
322            animation: Default::default(),
323            memory_limit: usize::MAX,
324            is_lossy: false,
325            has_alpha: false,
326            loop_count: LoopCount::Times(NonZeroU16::new(1).unwrap()),
327            loop_duration: 0,
328            webp_decode_options,
329        };
330        decoder.read_data()?;
331        Ok(decoder)
332    }
333
334    fn read_data(&mut self) -> Result<(), DecodingError> {
335        let (WebPRiffChunk::RIFF, riff_size, _) = read_chunk_header(&mut self.r)? else {
336            return Err(DecodingError::ChunkHeaderInvalid(*b"RIFF"));
337        };
338
339        match &read_fourcc(&mut self.r)? {
340            WebPRiffChunk::WEBP => {}
341            fourcc => return Err(DecodingError::WebpSignatureInvalid(fourcc.to_fourcc())),
342        }
343
344        let (chunk, chunk_size, chunk_size_rounded) = read_chunk_header(&mut self.r)?;
345        let start = self.r.stream_position();
346
347        match chunk {
348            WebPRiffChunk::VP8 => {
349                let tag = self.r.read_u24_le()?;
350
351                let keyframe = tag & 1 == 0;
352                if !keyframe {
353                    return Err(DecodingError::UnsupportedFeature(
354                        "Non-keyframe frames".into(),
355                    ));
356                }
357
358                let mut tag = [0u8; 3];
359                self.r.read_exact(&mut tag)?;
360                if tag != [0x9d, 0x01, 0x2a] {
361                    return Err(DecodingError::Vp8MagicInvalid(tag));
362                }
363
364                let w = self.r.read_u16_le()?;
365                let h = self.r.read_u16_le()?;
366
367                self.width = u32::from(w & 0x3FFF);
368                self.height = u32::from(h & 0x3FFF);
369                if self.width == 0 || self.height == 0 {
370                    return Err(DecodingError::InconsistentImageSizes);
371                }
372
373                self.chunks
374                    .insert(WebPRiffChunk::VP8, start..start + chunk_size);
375                self.kind = ImageKind::Lossy;
376                self.is_lossy = true;
377            }
378            WebPRiffChunk::VP8L => {
379                let signature = self.r.read_u8()?;
380                if signature != 0x2f {
381                    return Err(DecodingError::LosslessSignatureInvalid(signature));
382                }
383
384                let header = self.r.read_u32_le()?;
385                let version = header >> 29;
386                if version != 0 {
387                    return Err(DecodingError::VersionNumberInvalid(version as u8));
388                }
389
390                self.width = (1 + header) & 0x3FFF;
391                self.height = (1 + (header >> 14)) & 0x3FFF;
392                self.chunks
393                    .insert(WebPRiffChunk::VP8L, start..start + chunk_size);
394                self.kind = ImageKind::Lossless;
395                self.has_alpha = (header >> 28) & 1 != 0;
396            }
397            WebPRiffChunk::VP8X => {
398                let mut info = extended::read_extended_header(&mut self.r)?;
399                self.width = info.canvas_width;
400                self.height = info.canvas_height;
401
402                let mut position = start + chunk_size_rounded;
403                let max_position = position + riff_size.saturating_sub(12);
404                self.r.seek_from_start(position)?;
405
406                while position < max_position {
407                    match read_chunk_header(&mut self.r) {
408                        Ok((chunk, chunk_size, chunk_size_rounded)) => {
409                            let range = position + 8..position + 8 + chunk_size;
410                            position += 8 + chunk_size_rounded;
411
412                            if !chunk.is_unknown() {
413                                self.chunks.entry(chunk).or_insert(range);
414                            }
415
416                            if chunk == WebPRiffChunk::ANMF {
417                                self.num_frames += 1;
418                                if chunk_size < 24 {
419                                    return Err(DecodingError::InvalidChunkSize);
420                                }
421
422                                self.r.seek_relative(12)?;
423                                let duration = self.r.read_u32_le()? & 0xffffff;
424                                self.loop_duration =
425                                    self.loop_duration.wrapping_add(u64::from(duration));
426
427                                // If the image is animated, the image data chunk will be inside the
428                                // ANMF chunks, so we must inspect them to determine whether the
429                                // image contains any lossy image data. VP8 chunks store lossy data
430                                // and the spec says that lossless images SHOULD NOT contain ALPH
431                                // chunks, so we treat both as indicators of lossy images.
432                                if !self.is_lossy {
433                                    let (subchunk, ..) = read_chunk_header(&mut self.r)?;
434                                    if let WebPRiffChunk::VP8 | WebPRiffChunk::ALPH = subchunk {
435                                        self.is_lossy = true;
436                                    }
437                                    self.r.seek_relative(chunk_size_rounded as i64 - 24)?;
438                                } else {
439                                    self.r.seek_relative(chunk_size_rounded as i64 - 16)?;
440                                }
441
442                                continue;
443                            }
444
445                            self.r.seek_relative(chunk_size_rounded as i64)?;
446                        }
447                        Err(DecodingError::BitStreamError) => {
448                            break;
449                        }
450                        Err(e) => return Err(e),
451                    }
452                }
453                self.is_lossy = self.is_lossy || self.chunks.contains_key(&WebPRiffChunk::VP8);
454
455                // NOTE: We allow malformed images that have `info.icc_profile` set without a ICCP chunk,
456                // because this is relatively common.
457                if info.animation
458                    && (!self.chunks.contains_key(&WebPRiffChunk::ANIM)
459                        || !self.chunks.contains_key(&WebPRiffChunk::ANMF))
460                    || info.exif_metadata && !self.chunks.contains_key(&WebPRiffChunk::EXIF)
461                    || info.xmp_metadata && !self.chunks.contains_key(&WebPRiffChunk::XMP)
462                    || !info.animation
463                        && self.chunks.contains_key(&WebPRiffChunk::VP8)
464                            == self.chunks.contains_key(&WebPRiffChunk::VP8L)
465                {
466                    return Err(DecodingError::ChunkMissing);
467                }
468
469                // Decode ANIM chunk.
470                if info.animation {
471                    match self.read_chunk(WebPRiffChunk::ANIM, 6) {
472                        Ok(Some(chunk)) => {
473                            let mut cursor = SliceReader::new(&chunk);
474                            cursor.read_exact(&mut info.background_color_hint)?;
475                            self.loop_count = match cursor.read_u16_le()? {
476                                0 => LoopCount::Forever,
477                                n => LoopCount::Times(NonZeroU16::new(n).unwrap()),
478                            };
479                            self.animation.next_frame_start =
480                                self.chunks.get(&WebPRiffChunk::ANMF).unwrap().start - 8;
481                        }
482                        Ok(None) => return Err(DecodingError::ChunkMissing),
483                        Err(DecodingError::MemoryLimitExceeded) => {
484                            return Err(DecodingError::InvalidChunkSize)
485                        }
486                        Err(e) => return Err(e),
487                    }
488                }
489
490                // If the image is animated, the image data chunk will be inside the ANMF chunks. We
491                // store the ALPH, VP8, and VP8L chunks (as applicable) of the first frame in the
492                // hashmap so that we can read them later.
493                if let Some(range) = self.chunks.get(&WebPRiffChunk::ANMF).cloned() {
494                    let mut position = range.start + 16;
495                    self.r.seek_from_start(position)?;
496                    for _ in 0..2 {
497                        let (subchunk, subchunk_size, subchunk_size_rounded) =
498                            read_chunk_header(&mut self.r)?;
499                        let subrange = position + 8..position + 8 + subchunk_size;
500                        self.chunks.entry(subchunk).or_insert(subrange.clone());
501
502                        position += 8 + subchunk_size_rounded;
503                        if position + 8 > range.end {
504                            break;
505                        }
506                    }
507                }
508
509                self.has_alpha = info.alpha;
510                self.kind = ImageKind::Extended(info);
511            }
512            _ => return Err(DecodingError::ChunkHeaderInvalid(chunk.to_fourcc())),
513        };
514
515        Ok(())
516    }
517
518    /// Sets the maximum amount of memory that the decoder is allowed to allocate at once.
519    ///
520    /// TODO: Some allocations currently ignore this limit.
521    pub fn set_memory_limit(&mut self, limit: usize) {
522        self.memory_limit = limit;
523    }
524
525    /// Get the background color specified in the image file if the image is extended and animated webp.
526    pub fn background_color_hint(&self) -> Option<[u8; 4]> {
527        if let ImageKind::Extended(info) = &self.kind {
528            Some(info.background_color_hint)
529        } else {
530            None
531        }
532    }
533
534    /// Sets the background color if the image is an extended and animated webp.
535    pub fn set_background_color(&mut self, color: [u8; 4]) -> Result<(), DecodingError> {
536        if let ImageKind::Extended(info) = &mut self.kind {
537            info.background_color = Some(color);
538            Ok(())
539        } else {
540            Err(DecodingError::InvalidParameter(
541                "Background color can only be set on animated webp".into(),
542            ))
543        }
544    }
545
546    /// Returns the (width, height) of the image in pixels.
547    pub fn dimensions(&self) -> (u32, u32) {
548        (self.width, self.height)
549    }
550
551    /// Returns whether the image has an alpha channel. If so, the pixel format is Rgba8 and
552    /// otherwise Rgb8.
553    pub fn has_alpha(&self) -> bool {
554        self.has_alpha
555    }
556
557    /// Returns true if the image is animated.
558    pub fn is_animated(&self) -> bool {
559        match &self.kind {
560            ImageKind::Lossy | ImageKind::Lossless => false,
561            ImageKind::Extended(extended) => extended.animation,
562        }
563    }
564
565    /// Returns whether the image is lossy. For animated images, this is true if any frame is lossy.
566    pub fn is_lossy(&self) -> bool {
567        self.is_lossy
568    }
569
570    /// Returns the number of frames of a single loop of the animation, or zero if the image is not
571    /// animated.
572    pub fn num_frames(&self) -> u32 {
573        self.num_frames
574    }
575
576    /// Returns the number of times the animation should loop.
577    pub fn loop_count(&self) -> LoopCount {
578        self.loop_count
579    }
580
581    /// Returns the total duration of one loop through the animation in milliseconds, or zero if the
582    /// image is not animated.
583    ///
584    /// This is the sum of the durations of all individual frames of the image.
585    pub fn loop_duration(&self) -> u64 {
586        self.loop_duration
587    }
588
589    fn read_chunk(
590        &mut self,
591        chunk: WebPRiffChunk,
592        max_size: usize,
593    ) -> Result<Option<Vec<u8>>, DecodingError> {
594        match self.chunks.get(&chunk) {
595            Some(range) => {
596                if range.end - range.start > max_size as u64 {
597                    return Err(DecodingError::MemoryLimitExceeded);
598                }
599
600                self.r.seek_from_start(range.start)?;
601                let mut data = vec![0; (range.end - range.start) as usize];
602                self.r.read_exact(&mut data)?;
603                Ok(Some(data))
604            }
605            None => Ok(None),
606        }
607    }
608
609    /// Returns the raw bytes of the ICC profile, or None if there is no ICC profile.
610    pub fn icc_profile(&mut self) -> Result<Option<Vec<u8>>, DecodingError> {
611        self.read_chunk(WebPRiffChunk::ICCP, self.memory_limit)
612    }
613
614    /// Returns the raw bytes of the EXIF metadata, or None if there is no EXIF metadata.
615    pub fn exif_metadata(&mut self) -> Result<Option<Vec<u8>>, DecodingError> {
616        self.read_chunk(WebPRiffChunk::EXIF, self.memory_limit)
617    }
618
619    /// Returns the raw bytes of the XMP metadata, or None if there is no XMP metadata.
620    pub fn xmp_metadata(&mut self) -> Result<Option<Vec<u8>>, DecodingError> {
621        self.read_chunk(WebPRiffChunk::XMP, self.memory_limit)
622    }
623
624    /// Returns the number of bytes required to store the image or a single frame, or None if that
625    /// would take more than `usize::MAX` bytes.
626    pub fn output_buffer_size(&self) -> Option<usize> {
627        let bytes_per_pixel = if self.has_alpha() { 4 } else { 3 };
628        (self.width as usize)
629            .checked_mul(self.height as usize)?
630            .checked_mul(bytes_per_pixel)
631    }
632
633    /// Returns the raw bytes of the image. For animated images, this is the first frame.
634    ///
635    /// Fails with `ImageTooLarge` if `buf` has length different than `output_buffer_size()`
636    pub fn read_image(&mut self, buf: &mut [u8]) -> Result<(), DecodingError> {
637        if Some(buf.len()) != self.output_buffer_size() {
638            return Err(DecodingError::ImageTooLarge);
639        }
640
641        if self.is_animated() {
642            let saved = core::mem::take(&mut self.animation);
643            self.animation.next_frame_start =
644                self.chunks.get(&WebPRiffChunk::ANMF).unwrap().start - 8;
645            let result = self.read_frame(buf);
646            self.animation = saved;
647            result?;
648        } else if let Some(range) = self.chunks.get(&WebPRiffChunk::VP8L) {
649            let data_slice = &self.r.get_ref()[range.start as usize..range.end as usize];
650            let mut decoder = LosslessDecoder::new(data_slice);
651
652            if self.has_alpha {
653                decoder.decode_frame(self.width, self.height, false, buf)?;
654            } else {
655                let mut data = vec![0; self.width as usize * self.height as usize * 4];
656                decoder.decode_frame(self.width, self.height, false, &mut data)?;
657                for (rgba_val, chunk) in data.chunks_exact(4).zip(buf.chunks_exact_mut(3)) {
658                    chunk.copy_from_slice(&rgba_val[..3]);
659                }
660            }
661        } else {
662            let range = self
663                .chunks
664                .get(&WebPRiffChunk::VP8)
665                .ok_or(DecodingError::ChunkMissing)?;
666            let data_slice = &self.r.get_ref()[range.start as usize..range.end as usize];
667            let frame = Vp8Decoder::decode_frame(data_slice)?;
668            if u32::from(frame.width) != self.width || u32::from(frame.height) != self.height {
669                return Err(DecodingError::InconsistentImageSizes);
670            }
671
672            if self.has_alpha() {
673                frame.fill_rgba(buf, self.webp_decode_options.lossy_upsampling);
674
675                let range = self
676                    .chunks
677                    .get(&WebPRiffChunk::ALPH)
678                    .ok_or(DecodingError::ChunkMissing)?
679                    .clone();
680                let alpha_slice = &self.r.get_ref()[range.start as usize..range.end as usize];
681                let alpha_chunk =
682                    read_alpha_chunk(alpha_slice, self.width as u16, self.height as u16)?;
683
684                for y in 0..frame.height {
685                    for x in 0..frame.width {
686                        let predictor: u8 = get_alpha_predictor(
687                            x.into(),
688                            y.into(),
689                            frame.width.into(),
690                            alpha_chunk.filtering_method,
691                            buf,
692                        );
693
694                        let alpha_index =
695                            usize::from(y) * usize::from(frame.width) + usize::from(x);
696                        let buffer_index = alpha_index * 4 + 3;
697
698                        buf[buffer_index] = predictor.wrapping_add(alpha_chunk.data[alpha_index]);
699                    }
700                }
701            } else {
702                frame.fill_rgb(buf, self.webp_decode_options.lossy_upsampling);
703            }
704        }
705
706        Ok(())
707    }
708
709    /// Reads the next frame of the animation.
710    ///
711    /// The frame contents are written into `buf` and the method returns the duration of the frame
712    /// in milliseconds. If there are no more frames, the method returns
713    /// `DecodingError::NoMoreFrames` and `buf` is left unchanged.
714    ///
715    /// # Panics
716    ///
717    /// Panics if the image is not animated.
718    pub fn read_frame(&mut self, buf: &mut [u8]) -> Result<u32, DecodingError> {
719        assert!(self.is_animated());
720        assert_eq!(Some(buf.len()), self.output_buffer_size());
721
722        if self.animation.next_frame == self.num_frames {
723            return Err(DecodingError::NoMoreFrames);
724        }
725
726        let ImageKind::Extended(info) = &self.kind else {
727            unreachable!()
728        };
729
730        self.r.seek_from_start(self.animation.next_frame_start)?;
731
732        let anmf_size = match read_chunk_header(&mut self.r)? {
733            (WebPRiffChunk::ANMF, size, _) if size >= 32 => size,
734            _ => return Err(DecodingError::ChunkHeaderInvalid(*b"ANMF")),
735        };
736
737        // Read ANMF chunk
738        let frame_x = extended::read_3_bytes(&mut self.r)? * 2;
739        let frame_y = extended::read_3_bytes(&mut self.r)? * 2;
740        let frame_width = extended::read_3_bytes(&mut self.r)? + 1;
741        let frame_height = extended::read_3_bytes(&mut self.r)? + 1;
742        if frame_width > 16384 || frame_height > 16384 {
743            return Err(DecodingError::ImageTooLarge);
744        }
745        if frame_x + frame_width > self.width || frame_y + frame_height > self.height {
746            return Err(DecodingError::FrameOutsideImage);
747        }
748        let duration = extended::read_3_bytes(&mut self.r)?;
749        let frame_info = self.r.read_u8()?;
750        let use_alpha_blending = frame_info & 0b00000010 == 0;
751        let dispose = frame_info & 0b00000001 != 0;
752
753        let clear_color = if self.animation.dispose_next_frame {
754            info.background_color
755        } else {
756            None
757        };
758
759        // Read normal bitstream now
760        let (chunk, chunk_size, chunk_size_rounded) = read_chunk_header(&mut self.r)?;
761        if chunk_size_rounded + 24 > anmf_size {
762            return Err(DecodingError::ChunkHeaderInvalid(chunk.to_fourcc()));
763        }
764
765        let (frame, frame_has_alpha): (Vec<u8>, bool) = match chunk {
766            WebPRiffChunk::VP8 => {
767                let data_slice = self.r.take_slice(chunk_size as usize)?;
768                let raw_frame = Vp8Decoder::decode_frame(data_slice)?;
769                if u32::from(raw_frame.width) != frame_width
770                    || u32::from(raw_frame.height) != frame_height
771                {
772                    return Err(DecodingError::InconsistentImageSizes);
773                }
774                let mut rgb_frame = vec![0; frame_width as usize * frame_height as usize * 3];
775                raw_frame.fill_rgb(&mut rgb_frame, self.webp_decode_options.lossy_upsampling);
776                (rgb_frame, false)
777            }
778            WebPRiffChunk::VP8L => {
779                let data_slice = self.r.take_slice(chunk_size as usize)?;
780                let mut lossless_decoder = LosslessDecoder::new(data_slice);
781                let mut rgba_frame = vec![0; frame_width as usize * frame_height as usize * 4];
782                lossless_decoder.decode_frame(frame_width, frame_height, false, &mut rgba_frame)?;
783                (rgba_frame, true)
784            }
785            WebPRiffChunk::ALPH => {
786                if chunk_size_rounded + 32 > anmf_size {
787                    return Err(DecodingError::ChunkHeaderInvalid(chunk.to_fourcc()));
788                }
789
790                // read alpha
791                let alpha_slice = self.r.take_slice(chunk_size as usize)?;
792                // Skip padding if chunk_size is odd
793                if chunk_size_rounded > chunk_size {
794                    self.r
795                        .seek_relative((chunk_size_rounded - chunk_size) as i64)?;
796                }
797                let alpha_chunk =
798                    read_alpha_chunk(alpha_slice, frame_width as u16, frame_height as u16)?;
799
800                // read opaque
801                let (next_chunk, next_chunk_size, _) = read_chunk_header(&mut self.r)?;
802                if chunk_size + next_chunk_size + 32 > anmf_size {
803                    return Err(DecodingError::ChunkHeaderInvalid(next_chunk.to_fourcc()));
804                }
805
806                let vp8_slice = self.r.take_slice(next_chunk_size as usize)?;
807                let frame = Vp8Decoder::decode_frame(vp8_slice)?;
808
809                let mut rgba_frame = vec![0; frame_width as usize * frame_height as usize * 4];
810                frame.fill_rgba(&mut rgba_frame, self.webp_decode_options.lossy_upsampling);
811
812                for y in 0..frame.height {
813                    for x in 0..frame.width {
814                        let predictor: u8 = get_alpha_predictor(
815                            x.into(),
816                            y.into(),
817                            frame.width.into(),
818                            alpha_chunk.filtering_method,
819                            &rgba_frame,
820                        );
821
822                        let alpha_index =
823                            usize::from(y) * usize::from(frame.width) + usize::from(x);
824                        let buffer_index = alpha_index * 4 + 3;
825
826                        rgba_frame[buffer_index] =
827                            predictor.wrapping_add(alpha_chunk.data[alpha_index]);
828                    }
829                }
830
831                (rgba_frame, true)
832            }
833            _ => return Err(DecodingError::ChunkHeaderInvalid(chunk.to_fourcc())),
834        };
835
836        // fill starting canvas with clear color
837        if self.animation.canvas.is_none() {
838            self.animation.canvas = {
839                let mut canvas = vec![0; (self.width * self.height * 4) as usize];
840                if let Some(color) = info.background_color.as_ref() {
841                    canvas
842                        .chunks_exact_mut(4)
843                        .for_each(|c| c.copy_from_slice(color))
844                }
845                Some(canvas)
846            }
847        }
848        extended::composite_frame(
849            self.animation.canvas.as_mut().unwrap(),
850            self.width,
851            self.height,
852            clear_color,
853            &frame,
854            frame_x,
855            frame_y,
856            frame_width,
857            frame_height,
858            frame_has_alpha,
859            use_alpha_blending,
860            self.animation.previous_frame_width,
861            self.animation.previous_frame_height,
862            self.animation.previous_frame_x_offset,
863            self.animation.previous_frame_y_offset,
864        );
865
866        self.animation.previous_frame_width = frame_width;
867        self.animation.previous_frame_height = frame_height;
868        self.animation.previous_frame_x_offset = frame_x;
869        self.animation.previous_frame_y_offset = frame_y;
870
871        self.animation.dispose_next_frame = dispose;
872        self.animation.next_frame_start += anmf_size + 8;
873        self.animation.next_frame += 1;
874
875        if self.has_alpha() {
876            buf.copy_from_slice(self.animation.canvas.as_ref().unwrap());
877        } else {
878            for (b, c) in buf
879                .chunks_exact_mut(3)
880                .zip(self.animation.canvas.as_ref().unwrap().chunks_exact(4))
881            {
882                b.copy_from_slice(&c[..3]);
883            }
884        }
885
886        Ok(duration)
887    }
888
889    /// Resets the animation to the first frame.
890    ///
891    /// # Panics
892    ///
893    /// Panics if the image is not animated.
894    pub fn reset_animation(&mut self) {
895        assert!(self.is_animated());
896
897        self.animation.next_frame = 0;
898        self.animation.next_frame_start = self.chunks.get(&WebPRiffChunk::ANMF).unwrap().start - 8;
899        self.animation.dispose_next_frame = true;
900    }
901
902    /// Sets the upsampling method that is used in lossy decoding
903    pub fn set_lossy_upsampling(&mut self, upsampling_method: UpsamplingMethod) {
904        self.webp_decode_options.lossy_upsampling = upsampling_method;
905    }
906}
907
908pub(crate) fn read_fourcc(r: &mut SliceReader) -> Result<WebPRiffChunk, DecodingError> {
909    let mut chunk_fourcc = [0; 4];
910    r.read_exact(&mut chunk_fourcc)?;
911    Ok(WebPRiffChunk::from_fourcc(chunk_fourcc))
912}
913
914pub(crate) fn read_chunk_header(
915    r: &mut SliceReader,
916) -> Result<(WebPRiffChunk, u64, u64), DecodingError> {
917    let chunk = read_fourcc(r)?;
918    let chunk_size = r.read_u32_le()?;
919    let chunk_size_rounded = chunk_size.saturating_add(chunk_size & 1);
920    Ok((chunk, chunk_size.into(), chunk_size_rounded.into()))
921}
922
923// ============================================================================
924// Convenience decode functions (webpx-compatible API)
925// ============================================================================
926
927/// Decode WebP data to RGBA pixels.
928///
929/// Returns the decoded pixels and dimensions.
930///
931/// # Example
932///
933/// ```rust,no_run
934/// let webp_data: &[u8] = &[]; // your WebP data
935/// let (pixels, width, height) = zenwebp::decode_rgba(webp_data)?;
936/// # Ok::<(), zenwebp::DecodingError>(())
937/// ```
938pub fn decode_rgba(data: &[u8]) -> Result<(Vec<u8>, u32, u32), DecodingError> {
939    let mut decoder = WebPDecoder::new(data)?;
940    let (width, height) = decoder.dimensions();
941    let output_size = decoder
942        .output_buffer_size()
943        .ok_or(DecodingError::ImageTooLarge)?;
944
945    // Get output in native format (RGB or RGBA)
946    let mut output = vec![0u8; output_size];
947    decoder.read_image(&mut output)?;
948
949    // If the decoder outputs RGB, convert to RGBA
950    if !decoder.has_alpha() {
951        let mut rgba = Vec::with_capacity((width * height * 4) as usize);
952        for chunk in output.chunks_exact(3) {
953            rgba.extend_from_slice(chunk);
954            rgba.push(255);
955        }
956        return Ok((rgba, width, height));
957    }
958
959    Ok((output, width, height))
960}
961
962/// Decode WebP data to RGB pixels (no alpha).
963///
964/// Returns the decoded pixels and dimensions.
965///
966/// # Example
967///
968/// ```rust,no_run
969/// let webp_data: &[u8] = &[]; // your WebP data
970/// let (pixels, width, height) = zenwebp::decode_rgb(webp_data)?;
971/// # Ok::<(), zenwebp::DecodingError>(())
972/// ```
973pub fn decode_rgb(data: &[u8]) -> Result<(Vec<u8>, u32, u32), DecodingError> {
974    let mut decoder = WebPDecoder::new(data)?;
975    let (width, height) = decoder.dimensions();
976    let output_size = decoder
977        .output_buffer_size()
978        .ok_or(DecodingError::ImageTooLarge)?;
979
980    let mut output = vec![0u8; output_size];
981    decoder.read_image(&mut output)?;
982
983    // If the decoder outputs RGBA, convert to RGB
984    if decoder.has_alpha() {
985        let mut rgb = Vec::with_capacity((width * height * 3) as usize);
986        for chunk in output.chunks_exact(4) {
987            rgb.extend_from_slice(&chunk[..3]);
988        }
989        return Ok((rgb, width, height));
990    }
991
992    Ok((output, width, height))
993}
994
995/// Decode WebP data directly into a pre-allocated RGBA buffer.
996///
997/// # Arguments
998/// * `data` - WebP encoded data
999/// * `output` - Pre-allocated output buffer (must be at least stride * height bytes)
1000/// * `stride_bytes` - Row stride in bytes (must be >= width * 4)
1001///
1002/// # Returns
1003/// Width and height of the decoded image.
1004pub fn decode_rgba_into(
1005    data: &[u8],
1006    output: &mut [u8],
1007    stride_bytes: u32,
1008) -> Result<(u32, u32), DecodingError> {
1009    let mut decoder = WebPDecoder::new(data)?;
1010    let (width, height) = decoder.dimensions();
1011
1012    // Validate buffer
1013    let min_stride = (width as usize) * 4;
1014    if (stride_bytes as usize) < min_stride {
1015        return Err(DecodingError::InvalidParameter(format!(
1016            "stride too small: got {}, minimum {}",
1017            stride_bytes, min_stride
1018        )));
1019    }
1020
1021    let required = (stride_bytes as usize) * (height as usize);
1022    if output.len() < required {
1023        return Err(DecodingError::InvalidParameter(format!(
1024            "output buffer too small: got {}, need {}",
1025            output.len(),
1026            required
1027        )));
1028    }
1029
1030    // Decode into temporary buffer
1031    let output_size = decoder
1032        .output_buffer_size()
1033        .ok_or(DecodingError::ImageTooLarge)?;
1034    let mut temp = vec![0u8; output_size];
1035    decoder.read_image(&mut temp)?;
1036
1037    // Copy to output with stride
1038    let has_alpha = decoder.has_alpha();
1039    let src_bpp = if has_alpha { 4 } else { 3 };
1040
1041    for y in 0..(height as usize) {
1042        let dst_row = &mut output[y * (stride_bytes as usize)..][..width as usize * 4];
1043        let src_row = &temp[y * (width as usize) * src_bpp..][..width as usize * src_bpp];
1044
1045        if has_alpha {
1046            dst_row.copy_from_slice(src_row);
1047        } else {
1048            for (dst, src) in dst_row.chunks_exact_mut(4).zip(src_row.chunks_exact(3)) {
1049                dst[..3].copy_from_slice(src);
1050                dst[3] = 255;
1051            }
1052        }
1053    }
1054
1055    Ok((width, height))
1056}
1057
1058/// Decode WebP data directly into a pre-allocated RGB buffer.
1059///
1060/// # Arguments
1061/// * `data` - WebP encoded data
1062/// * `output` - Pre-allocated output buffer (must be at least stride * height bytes)
1063/// * `stride_bytes` - Row stride in bytes (must be >= width * 3)
1064///
1065/// # Returns
1066/// Width and height of the decoded image.
1067pub fn decode_rgb_into(
1068    data: &[u8],
1069    output: &mut [u8],
1070    stride_bytes: u32,
1071) -> Result<(u32, u32), DecodingError> {
1072    let mut decoder = WebPDecoder::new(data)?;
1073    let (width, height) = decoder.dimensions();
1074
1075    // Validate buffer
1076    let min_stride = (width as usize) * 3;
1077    if (stride_bytes as usize) < min_stride {
1078        return Err(DecodingError::InvalidParameter(format!(
1079            "stride too small: got {}, minimum {}",
1080            stride_bytes, min_stride
1081        )));
1082    }
1083
1084    let required = (stride_bytes as usize) * (height as usize);
1085    if output.len() < required {
1086        return Err(DecodingError::InvalidParameter(format!(
1087            "output buffer too small: got {}, need {}",
1088            output.len(),
1089            required
1090        )));
1091    }
1092
1093    // Decode into temporary buffer
1094    let output_size = decoder
1095        .output_buffer_size()
1096        .ok_or(DecodingError::ImageTooLarge)?;
1097    let mut temp = vec![0u8; output_size];
1098    decoder.read_image(&mut temp)?;
1099
1100    // Copy to output with stride
1101    let has_alpha = decoder.has_alpha();
1102    let src_bpp = if has_alpha { 4 } else { 3 };
1103
1104    for y in 0..(height as usize) {
1105        let dst_row = &mut output[y * (stride_bytes as usize)..][..width as usize * 3];
1106        let src_row = &temp[y * (width as usize) * src_bpp..][..width as usize * src_bpp];
1107
1108        if has_alpha {
1109            for (dst, src) in dst_row.chunks_exact_mut(3).zip(src_row.chunks_exact(4)) {
1110                dst.copy_from_slice(&src[..3]);
1111            }
1112        } else {
1113            dst_row.copy_from_slice(src_row);
1114        }
1115    }
1116
1117    Ok((width, height))
1118}
1119
1120/// Image information obtained from WebP data header.
1121#[derive(Debug, Clone)]
1122pub struct ImageInfo {
1123    /// Image width in pixels.
1124    pub width: u32,
1125    /// Image height in pixels.
1126    pub height: u32,
1127    /// Whether the image has an alpha channel.
1128    pub has_alpha: bool,
1129    /// Whether the image uses lossy compression.
1130    pub is_lossy: bool,
1131}
1132
1133impl ImageInfo {
1134    /// Parse image information from WebP data.
1135    pub fn from_webp(data: &[u8]) -> Result<Self, DecodingError> {
1136        let decoder = WebPDecoder::new(data)?;
1137        let (width, height) = decoder.dimensions();
1138        Ok(Self {
1139            width,
1140            height,
1141            has_alpha: decoder.has_alpha(),
1142            is_lossy: decoder.is_lossy(),
1143        })
1144    }
1145}
1146
1147#[cfg(test)]
1148mod tests {
1149    use super::*;
1150    const RGB_BPP: usize = 3;
1151
1152    #[test]
1153    fn add_with_overflow_size() {
1154        let bytes = vec![
1155            0x52, 0x49, 0x46, 0x46, 0xaf, 0x37, 0x80, 0x47, 0x57, 0x45, 0x42, 0x50, 0x6c, 0x64,
1156            0x00, 0x00, 0xff, 0xff, 0xff, 0xff, 0xfb, 0x7e, 0x73, 0x00, 0x06, 0x00, 0x00, 0x00,
1157            0x00, 0x00, 0x00, 0x00, 0x05, 0x00, 0x00, 0x00, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65,
1158            0x40, 0xfb, 0xff, 0xff, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65,
1159            0x00, 0x00, 0x00, 0x00, 0x62, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x49,
1160            0x49, 0x54, 0x55, 0x50, 0x4c, 0x54, 0x59, 0x50, 0x45, 0x33, 0x37, 0x44, 0x4d, 0x46,
1161        ];
1162
1163        let _ = WebPDecoder::new(&bytes);
1164    }
1165
1166    #[test]
1167    fn decode_2x2_single_color_image() {
1168        // Image data created from imagemagick and output of xxd:
1169        // $ convert -size 2x2 xc:#f00 red.webp
1170        // $ xxd -g 1 red.webp | head
1171
1172        const NUM_PIXELS: usize = 2 * 2 * RGB_BPP;
1173        // 2x2 red pixel image
1174        let bytes = [
1175            0x52, 0x49, 0x46, 0x46, 0x3c, 0x00, 0x00, 0x00, 0x57, 0x45, 0x42, 0x50, 0x56, 0x50,
1176            0x38, 0x20, 0x30, 0x00, 0x00, 0x00, 0xd0, 0x01, 0x00, 0x9d, 0x01, 0x2a, 0x02, 0x00,
1177            0x02, 0x00, 0x02, 0x00, 0x34, 0x25, 0xa0, 0x02, 0x74, 0xba, 0x01, 0xf8, 0x00, 0x03,
1178            0xb0, 0x00, 0xfe, 0xf0, 0xc4, 0x0b, 0xff, 0x20, 0xb9, 0x61, 0x75, 0xc8, 0xd7, 0xff,
1179            0x20, 0x3f, 0xe4, 0x07, 0xfc, 0x80, 0xff, 0xf8, 0xf2, 0x00, 0x00, 0x00,
1180        ];
1181
1182        let mut data = [0; NUM_PIXELS];
1183        let mut decoder = WebPDecoder::new(&bytes).unwrap();
1184        decoder.read_image(&mut data).unwrap();
1185
1186        // All pixels are the same value
1187        let first_pixel = &data[..RGB_BPP];
1188        assert!(data.chunks_exact(3).all(|ch| ch.iter().eq(first_pixel)));
1189    }
1190
1191    #[test]
1192    fn decode_3x3_single_color_image() {
1193        // Test that any odd pixel "tail" is decoded properly
1194
1195        const NUM_PIXELS: usize = 3 * 3 * RGB_BPP;
1196        // 3x3 red pixel image
1197        let bytes = [
1198            0x52, 0x49, 0x46, 0x46, 0x3c, 0x00, 0x00, 0x00, 0x57, 0x45, 0x42, 0x50, 0x56, 0x50,
1199            0x38, 0x20, 0x30, 0x00, 0x00, 0x00, 0xd0, 0x01, 0x00, 0x9d, 0x01, 0x2a, 0x03, 0x00,
1200            0x03, 0x00, 0x02, 0x00, 0x34, 0x25, 0xa0, 0x02, 0x74, 0xba, 0x01, 0xf8, 0x00, 0x03,
1201            0xb0, 0x00, 0xfe, 0xf0, 0xc4, 0x0b, 0xff, 0x20, 0xb9, 0x61, 0x75, 0xc8, 0xd7, 0xff,
1202            0x20, 0x3f, 0xe4, 0x07, 0xfc, 0x80, 0xff, 0xf8, 0xf2, 0x00, 0x00, 0x00,
1203        ];
1204
1205        let mut data = [0; NUM_PIXELS];
1206        let mut decoder = WebPDecoder::new(&bytes).unwrap();
1207        decoder.read_image(&mut data).unwrap();
1208
1209        // All pixels are the same value
1210        let first_pixel = &data[..RGB_BPP];
1211        assert!(data.chunks_exact(3).all(|ch| ch.iter().eq(first_pixel)));
1212    }
1213}