1use alloc::string::String;
2use thiserror::Error;
3use whereat::at;
4
5#[derive(Debug, Error)]
7#[non_exhaustive]
8pub enum DecodeError {
9 #[cfg(feature = "std")]
11 #[error("IO Error: {0}")]
12 IoError(#[from] std::io::Error),
13
14 #[error("Invalid RIFF signature: {0:x?}")]
16 RiffSignatureInvalid([u8; 4]),
17
18 #[error("Invalid WebP signature: {0:x?}")]
20 WebpSignatureInvalid([u8; 4]),
21
22 #[error("An expected chunk was missing")]
24 ChunkMissing,
25
26 #[error("Invalid Chunk header: {0:x?}")]
28 ChunkHeaderInvalid([u8; 4]),
29
30 #[error("Alpha chunk preprocessing flag invalid")]
32 InvalidAlphaPreprocessing,
33
34 #[error("Invalid compression method")]
36 InvalidCompressionMethod,
37
38 #[error("Alpha chunk size mismatch")]
40 AlphaChunkSizeMismatch,
41
42 #[error("Image too large")]
44 ImageTooLarge,
45
46 #[error("Frame outside image")]
48 FrameOutsideImage,
49
50 #[error("Invalid lossless signature: {0:x?}")]
52 LosslessSignatureInvalid(u8),
53
54 #[error("Invalid lossless version number: {0}")]
56 VersionNumberInvalid(u8),
57
58 #[error("Invalid color cache bits: {0}")]
60 InvalidColorCacheBits(u8),
61
62 #[error("Invalid Huffman code")]
64 HuffmanError,
65
66 #[error("Corrupt bitstream")]
68 BitStreamError,
69
70 #[error("Invalid transform")]
72 TransformError,
73
74 #[error("Invalid VP8 magic: {0:x?}")]
76 Vp8MagicInvalid([u8; 3]),
77
78 #[error("Not enough VP8 init data")]
80 NotEnoughInitData,
81
82 #[error("Invalid VP8 color space: {0}")]
84 ColorSpaceInvalid(u8),
85
86 #[error("Invalid VP8 luma prediction mode: {0}")]
88 LumaPredictionModeInvalid(i8),
89
90 #[error("Invalid VP8 intra prediction mode: {0}")]
92 IntraPredictionModeInvalid(i8),
93
94 #[error("Invalid VP8 chroma prediction mode: {0}")]
96 ChromaPredictionModeInvalid(i8),
97
98 #[error("Inconsistent image sizes")]
100 InconsistentImageSizes,
101
102 #[error("Unsupported feature: {0}")]
104 UnsupportedFeature(String),
105
106 #[error("Invalid parameter: {0}")]
108 InvalidParameter(String),
109
110 #[error("Memory limit exceeded")]
112 MemoryLimitExceeded,
113
114 #[error("Invalid chunk size")]
116 InvalidChunkSize,
117
118 #[error("No more frames")]
120 NoMoreFrames,
121
122 #[error("Decoding cancelled: {0}")]
124 Cancelled(enough::StopReason),
125
126 #[cfg(feature = "zencodec")]
128 #[error(transparent)]
129 UnsupportedOperation(#[from] zencodec::UnsupportedOperation),
130}
131
132pub type DecodeResult<T> = core::result::Result<T, whereat::At<DecodeError>>;
137
138impl From<enough::StopReason> for DecodeError {
139 fn from(reason: enough::StopReason) -> Self {
140 Self::Cancelled(reason)
141 }
142}
143
144impl From<whereat::At<DecodeError>> for DecodeError {
145 fn from(at: whereat::At<DecodeError>) -> Self {
146 at.decompose().0
147 }
148}
149
150use alloc::format;
152use alloc::vec;
153use alloc::vec::Vec;
154use core::num::NonZeroU16;
155use core::ops::Range;
156
157use hashbrown::HashMap;
158
159use super::extended::{self, WebPExtendedInfo, get_alpha_predictor, read_alpha_chunk};
160use super::lossless::LosslessDecoder;
161use super::vp8v2::DecoderContext;
162use crate::slice_reader::SliceReader;
163
164#[allow(clippy::upper_case_acronyms)]
166#[derive(Debug, Clone, Copy, PartialEq, Hash, Eq)]
167pub(crate) enum WebPRiffChunk {
168 RIFF,
169 WEBP,
170 VP8,
171 VP8L,
172 VP8X,
173 ANIM,
174 ANMF,
175 ALPH,
176 ICCP,
177 EXIF,
178 XMP,
179 Unknown([u8; 4]),
180}
181
182impl WebPRiffChunk {
183 pub(crate) const fn from_fourcc(chunk_fourcc: [u8; 4]) -> Self {
184 match &chunk_fourcc {
185 b"RIFF" => Self::RIFF,
186 b"WEBP" => Self::WEBP,
187 b"VP8 " => Self::VP8,
188 b"VP8L" => Self::VP8L,
189 b"VP8X" => Self::VP8X,
190 b"ANIM" => Self::ANIM,
191 b"ANMF" => Self::ANMF,
192 b"ALPH" => Self::ALPH,
193 b"ICCP" => Self::ICCP,
194 b"EXIF" => Self::EXIF,
195 b"XMP " => Self::XMP,
196 _ => Self::Unknown(chunk_fourcc),
197 }
198 }
199
200 pub(crate) const fn to_fourcc(self) -> [u8; 4] {
201 match self {
202 Self::RIFF => *b"RIFF",
203 Self::WEBP => *b"WEBP",
204 Self::VP8 => *b"VP8 ",
205 Self::VP8L => *b"VP8L",
206 Self::VP8X => *b"VP8X",
207 Self::ANIM => *b"ANIM",
208 Self::ANMF => *b"ANMF",
209 Self::ALPH => *b"ALPH",
210 Self::ICCP => *b"ICCP",
211 Self::EXIF => *b"EXIF",
212 Self::XMP => *b"XMP ",
213 Self::Unknown(fourcc) => fourcc,
214 }
215 }
216
217 pub(crate) const fn is_unknown(self) -> bool {
218 matches!(self, Self::Unknown(_))
219 }
220}
221
222enum ImageKind {
229 Lossy,
230 Lossless,
231 Extended(WebPExtendedInfo),
232}
233
234struct AnimationState {
235 next_frame: u32,
236 next_frame_start: u64,
237 dispose_next_frame: bool,
238 previous_frame_width: u32,
239 previous_frame_height: u32,
240 previous_frame_x_offset: u32,
241 previous_frame_y_offset: u32,
242 canvas: Option<Vec<u8>>,
243 frame_scratch: Vec<u8>,
246 ctx: DecoderContext,
250}
251impl Default for AnimationState {
252 fn default() -> Self {
253 Self {
254 next_frame: 0,
255 next_frame_start: 0,
256 dispose_next_frame: true,
257 previous_frame_width: 0,
258 previous_frame_height: 0,
259 previous_frame_x_offset: 0,
260 previous_frame_y_offset: 0,
261 canvas: None,
262 frame_scratch: Vec::new(),
263 ctx: DecoderContext::new(),
264 }
265 }
266}
267
268#[derive(Copy, Clone, Debug, Eq, PartialEq)]
270pub enum LoopCount {
271 Forever,
273 Times(NonZeroU16),
275}
276
277impl core::fmt::Display for LoopCount {
278 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
279 match self {
280 LoopCount::Forever => f.write_str("infinite"),
281 LoopCount::Times(n) => write!(f, "{} time{}", n, if n.get() == 1 { "" } else { "s" }),
282 }
283 }
284}
285
286impl From<u16> for LoopCount {
287 fn from(n: u16) -> Self {
288 match NonZeroU16::new(n) {
289 None => LoopCount::Forever,
290 Some(n) => LoopCount::Times(n),
291 }
292 }
293}
294
295#[derive(Clone, Debug, PartialEq)]
297pub struct DecodeConfig {
298 pub upsampling: UpsamplingMethod,
300
301 pub limits: super::limits::Limits,
303
304 pub dithering_strength: u8,
308}
309
310impl Default for DecodeConfig {
311 fn default() -> Self {
312 Self {
313 upsampling: UpsamplingMethod::Bilinear,
314 limits: super::limits::Limits::default(),
315 dithering_strength: 0,
316 }
317 }
318}
319
320impl DecodeConfig {
321 #[must_use]
323 pub fn upsampling(mut self, method: UpsamplingMethod) -> Self {
324 self.upsampling = method;
325 self
326 }
327
328 #[must_use]
330 pub fn limits(mut self, limits: super::limits::Limits) -> Self {
331 self.limits = limits;
332 self
333 }
334
335 #[must_use]
337 pub fn max_dimensions(mut self, width: u32, height: u32) -> Self {
338 self.limits = self.limits.max_dimensions(width, height);
339 self
340 }
341
342 #[must_use]
344 pub fn max_memory(mut self, bytes: u64) -> Self {
345 self.limits = self.limits.max_memory(bytes);
346 self
347 }
348
349 #[must_use]
351 pub fn no_fancy_upsampling(mut self) -> Self {
352 self.upsampling = UpsamplingMethod::Simple;
353 self
354 }
355
356 #[must_use]
361 pub fn with_dithering_strength(mut self, strength: u8) -> Self {
362 self.dithering_strength = strength;
363 self
364 }
365
366 pub(crate) fn to_options(&self) -> WebPDecodeOptions {
367 WebPDecodeOptions {
368 lossy_upsampling: self.upsampling,
369 dithering_strength: self.dithering_strength,
370 }
371 }
372}
373
374pub struct DecodeRequest<'a> {
387 config: &'a DecodeConfig,
388 data: &'a [u8],
389 stop: Option<&'a dyn enough::Stop>,
390 stride_pixels: Option<u32>,
391}
392
393impl<'a> DecodeRequest<'a> {
394 #[must_use]
396 pub fn new(config: &'a DecodeConfig, data: &'a [u8]) -> Self {
397 Self {
398 config,
399 data,
400 stop: None,
401 stride_pixels: None,
402 }
403 }
404
405 #[must_use]
407 pub fn stop(mut self, stop: &'a dyn enough::Stop) -> Self {
408 self.stop = Some(stop);
409 self
410 }
411
412 #[must_use]
414 pub fn stride(mut self, stride_pixels: u32) -> Self {
415 self.stride_pixels = Some(stride_pixels);
416 self
417 }
418
419 pub fn decode(self) -> DecodeResult<(Vec<u8>, u32, u32, crate::PixelLayout)> {
426 let (pixels, w, h, has_alpha) = decode_native_internal(
427 self.data,
428 &self.config.to_options(),
429 &self.config.limits,
430 self.stop,
431 )?;
432 let layout = if has_alpha {
433 crate::PixelLayout::Rgba8
434 } else {
435 crate::PixelLayout::Rgb8
436 };
437 Ok((pixels, w, h, layout))
438 }
439
440 pub fn decode_rgba(self) -> DecodeResult<(Vec<u8>, u32, u32)> {
442 let (rgba, w, h) = decode_to_rgba_internal(
443 self.data,
444 &self.config.to_options(),
445 &self.config.limits,
446 self.stop,
447 )?;
448 Ok((rgba, w, h))
449 }
450
451 pub fn decode_rgb(self) -> DecodeResult<(Vec<u8>, u32, u32)> {
453 let (native, w, h, has_alpha) = decode_native_internal(
454 self.data,
455 &self.config.to_options(),
456 &self.config.limits,
457 self.stop,
458 )?;
459 if !has_alpha {
460 Ok((native, w, h))
461 } else {
462 let pixel_count = (w as usize) * (h as usize);
463 let mut rgb = alloc::vec![0u8; pixel_count * 3];
464 garb::bytes::rgba_to_rgb(&native, &mut rgb).map_err(|e| at!(garb_err(e)))?;
465 Ok((rgb, w, h))
466 }
467 }
468
469 pub fn decode_rgba_into(self, output: &mut [u8]) -> DecodeResult<(u32, u32)> {
474 let (rgba, w, h) = decode_to_rgba_internal(
475 self.data,
476 &self.config.to_options(),
477 &self.config.limits,
478 self.stop,
479 )?;
480 convert_to_output(
481 &rgba,
482 output,
483 w,
484 h,
485 4,
486 self.stride_pixels,
487 |src, dst, w, h, ss, ds| {
488 for y in 0..h {
490 dst[y * ds..][..w * 4].copy_from_slice(&src[y * ss..][..w * 4]);
491 }
492 Ok(())
493 },
494 )?;
495 Ok((w, h))
496 }
497
498 pub fn decode_rgb_into(self, output: &mut [u8]) -> DecodeResult<(u32, u32)> {
503 let (rgba, w, h) = decode_to_rgba_internal(
504 self.data,
505 &self.config.to_options(),
506 &self.config.limits,
507 self.stop,
508 )?;
509 convert_to_output(
510 &rgba,
511 output,
512 w,
513 h,
514 3,
515 self.stride_pixels,
516 |src, dst, w, h, ss, ds| {
517 garb::bytes::rgba_to_rgb_strided(src, dst, w, h, ss, ds).map_err(garb_err)
518 },
519 )?;
520 Ok((w, h))
521 }
522
523 pub fn info(self) -> DecodeResult<ImageInfo> {
525 ImageInfo::from_webp(self.data)
526 }
527
528 pub fn decode_yuv420(self) -> DecodeResult<YuvPlanes> {
530 decode_yuv420(self.data)
531 }
532
533 #[allow(dead_code)]
535 pub(crate) fn decode_rgb_lossy(self) -> DecodeResult<(Vec<u8>, u16, u16)> {
536 self.decode_lossy_internal(3)
537 }
538
539 #[allow(dead_code)]
541 pub(crate) fn decode_rgba_lossy(self) -> DecodeResult<(Vec<u8>, u16, u16)> {
542 self.decode_lossy_internal(4)
543 }
544
545 fn decode_lossy_internal(self, bpp: usize) -> DecodeResult<(Vec<u8>, u16, u16)> {
546 let data = self.data;
547 let dither_strength = self.config.dithering_strength;
548 if data.len() < 20 {
549 return Err(whereat::at!(DecodeError::NotEnoughInitData));
550 }
551
552 if &data[..4] != b"RIFF" {
554 let mut sig = [0u8; 4];
555 sig.copy_from_slice(&data[..4]);
556 return Err(whereat::at!(DecodeError::RiffSignatureInvalid(sig)));
557 }
558 if &data[8..12] != b"WEBP" {
559 let mut sig = [0u8; 4];
560 sig.copy_from_slice(&data[8..12]);
561 return Err(whereat::at!(DecodeError::WebpSignatureInvalid(sig)));
562 }
563
564 let first_chunk = &data[12..16];
565
566 match first_chunk {
567 b"VP8 " => {
568 let chunk_size =
570 u32::from_le_bytes([data[16], data[17], data[18], data[19]]) as usize;
571 let vp8_start = 20;
572 let vp8_end = (vp8_start + chunk_size).min(data.len());
573 let vp8_data = &data[vp8_start..vp8_end];
574
575 let mut ctx = DecoderContext::new().with_dithering_strength(dither_strength);
576 let mut output = Vec::new();
577 let (w, h) = ctx.decode_to_rgb(vp8_data, &mut output, bpp)?;
578 Ok((output, w, h))
579 }
580 b"VP8X" => {
581 use crate::mux::WebPDemuxer;
583
584 let demuxer = WebPDemuxer::new(data).map_err(|e| {
585 whereat::at!(DecodeError::InvalidParameter(alloc::format!(
586 "demux error: {e}"
587 )))
588 })?;
589
590 if demuxer.is_animated() {
591 return Err(whereat::at!(DecodeError::UnsupportedFeature(
592 "lossy single-frame decode does not support animation; use AnimationDecoder"
593 .into()
594 )));
595 }
596
597 let frame = demuxer
598 .frame(1)
599 .ok_or_else(|| whereat::at!(DecodeError::ChunkMissing))?;
600
601 if !frame.is_lossy {
602 return Err(whereat::at!(DecodeError::UnsupportedFeature(
603 "lossy decoder only supports VP8, got VP8L".into()
604 )));
605 }
606
607 let mut ctx = DecoderContext::new().with_dithering_strength(dither_strength);
608 let mut output = Vec::new();
609
610 let decode_bpp = if frame.has_alpha { 4 } else { bpp };
612 let (w, h) = ctx.decode_to_rgb(frame.bitstream, &mut output, decode_bpp)?;
613
614 if let Some(alpha_data) = frame.alpha_data {
616 let alpha_chunk = read_alpha_chunk(alpha_data, w, h)?;
617
618 let fw = usize::from(w);
619 let fh = usize::from(h);
620 for y in 0..fh {
621 for x in 0..fw {
622 let predictor: u8 = get_alpha_predictor(
623 x,
624 y,
625 fw,
626 alpha_chunk.filtering_method,
627 &output,
628 );
629
630 let alpha_index = y * fw + x;
631 let buffer_index = alpha_index * 4 + 3;
632
633 output[buffer_index] =
634 predictor.wrapping_add(alpha_chunk.data[alpha_index]);
635 }
636 }
637 }
638
639 if decode_bpp == 4 && bpp == 3 {
641 let pixel_count = usize::from(w) * usize::from(h);
642 let mut rgb = alloc::vec![0u8; pixel_count * 3];
643 garb::bytes::rgba_to_rgb(&output, &mut rgb)
644 .map_err(|e| whereat::at!(garb_err(e)))?;
645 Ok((rgb, w, h))
646 } else {
647 Ok((output, w, h))
648 }
649 }
650 _ => Err(whereat::at!(DecodeError::UnsupportedFeature(
651 alloc::format!("lossy decoder only supports VP8, got {:?}", first_chunk)
652 ))),
653 }
654 }
655}
656
657#[derive(Clone)]
659#[non_exhaustive]
660pub(crate) struct WebPDecodeOptions {
661 pub lossy_upsampling: UpsamplingMethod,
663 pub dithering_strength: u8,
665}
666
667impl Default for WebPDecodeOptions {
668 fn default() -> Self {
669 Self {
670 lossy_upsampling: UpsamplingMethod::Bilinear,
671 dithering_strength: 0,
672 }
673 }
674}
675
676#[derive(Clone, Copy, Debug, Default, PartialEq, Eq)]
681pub enum UpsamplingMethod {
682 #[default]
687 Bilinear,
688 Simple,
693}
694
695pub struct WebPDecoder<'a> {
697 r: SliceReader<'a>,
698 memory_limit: usize,
699 limits: super::limits::Limits,
700
701 width: u32,
702 height: u32,
703
704 kind: ImageKind,
705 animation: AnimationState,
706
707 is_lossy: bool,
708 has_alpha: bool,
709 num_frames: u32,
710 loop_count: LoopCount,
711 loop_duration: u64,
712
713 chunks: HashMap<WebPRiffChunk, Range<u64>>,
714
715 webp_decode_options: WebPDecodeOptions,
716
717 stop: Option<&'a dyn enough::Stop>,
718}
719
720impl<'a> WebPDecoder<'a> {
721 pub fn build(data: &'a [u8]) -> Result<Self, DecodeError> {
745 Ok(Self::new(data)?)
746 }
747
748 pub fn new(data: &'a [u8]) -> DecodeResult<Self> {
750 Self::new_with_options(data, WebPDecodeOptions::default())
751 }
752
753 pub fn info(&self) -> ImageInfo {
770 let icc_profile = self
771 .read_chunk_direct(WebPRiffChunk::ICCP, self.memory_limit)
772 .unwrap_or(None);
773 let exif = self
774 .read_chunk_direct(WebPRiffChunk::EXIF, self.memory_limit)
775 .unwrap_or(None);
776 let xmp = self
777 .read_chunk_direct(WebPRiffChunk::XMP, self.memory_limit)
778 .unwrap_or(None);
779 let orientation = exif
780 .as_deref()
781 .and_then(crate::exif_orientation::parse_orientation)
782 .and_then(zenpixels::Orientation::from_exif);
783 ImageInfo {
784 width: self.width,
785 height: self.height,
786 has_alpha: self.has_alpha,
787 is_lossy: self.is_lossy,
788 has_animation: self.is_animated(),
789 frame_count: self.num_frames,
790 format: if self.is_lossy {
791 BitstreamFormat::Lossy
792 } else {
793 BitstreamFormat::Lossless
794 },
795 orientation,
796 icc_profile,
797 exif,
798 xmp,
799 }
800 }
801
802 pub(crate) fn new_with_options(
804 data: &'a [u8],
805 webp_decode_options: WebPDecodeOptions,
806 ) -> DecodeResult<Self> {
807 let mut decoder = Self {
808 r: SliceReader::new(data),
809 width: 0,
810 height: 0,
811 num_frames: 0,
812 kind: ImageKind::Lossy,
813 chunks: HashMap::new(),
814 animation: Default::default(),
815 memory_limit: usize::MAX,
816 limits: super::limits::Limits::default(),
817 is_lossy: false,
818 has_alpha: false,
819 loop_count: LoopCount::Times(NonZeroU16::new(1).unwrap()),
820 loop_duration: 0,
821 webp_decode_options,
822 stop: None,
823 };
824 decoder.read_data()?;
825 Ok(decoder)
826 }
827
828 fn read_data(&mut self) -> DecodeResult<()> {
829 let (WebPRiffChunk::RIFF, riff_size, _) = read_chunk_header(&mut self.r)? else {
830 return Err(at!(DecodeError::ChunkHeaderInvalid(*b"RIFF")));
831 };
832
833 match &read_fourcc(&mut self.r)? {
834 WebPRiffChunk::WEBP => {}
835 fourcc => return Err(at!(DecodeError::WebpSignatureInvalid(fourcc.to_fourcc()))),
836 }
837
838 let (chunk, chunk_size, chunk_size_rounded) = read_chunk_header(&mut self.r)?;
839 let start = self.r.stream_position();
840
841 match chunk {
842 WebPRiffChunk::VP8 => {
843 let tag = self.r.read_u24_le()?;
844
845 let keyframe = tag & 1 == 0;
846 if !keyframe {
847 return Err(at!(DecodeError::UnsupportedFeature(
848 "Non-keyframe frames".into(),
849 )));
850 }
851
852 let mut tag = [0u8; 3];
853 self.r.read_exact(&mut tag)?;
854 if tag != [0x9d, 0x01, 0x2a] {
855 return Err(at!(DecodeError::Vp8MagicInvalid(tag)));
856 }
857
858 let w = self.r.read_u16_le()?;
859 let h = self.r.read_u16_le()?;
860
861 self.width = u32::from(w & 0x3FFF);
862 self.height = u32::from(h & 0x3FFF);
863 if self.width == 0 || self.height == 0 {
864 return Err(at!(DecodeError::InconsistentImageSizes));
865 }
866
867 self.limits.check_dimensions(self.width, self.height)?;
868
869 self.chunks
870 .insert(WebPRiffChunk::VP8, start..start + chunk_size);
871 self.kind = ImageKind::Lossy;
872 self.is_lossy = true;
873 }
874 WebPRiffChunk::VP8L => {
875 let signature = self.r.read_u8()?;
876 if signature != 0x2f {
877 return Err(at!(DecodeError::LosslessSignatureInvalid(signature)));
878 }
879
880 let header = self.r.read_u32_le()?;
881 let version = header >> 29;
882 if version != 0 {
883 return Err(at!(DecodeError::VersionNumberInvalid(version as u8)));
884 }
885
886 self.width = (1 + header) & 0x3FFF;
887 self.height = (1 + (header >> 14)) & 0x3FFF;
888 self.limits.check_dimensions(self.width, self.height)?;
889 self.chunks
890 .insert(WebPRiffChunk::VP8L, start..start + chunk_size);
891 self.kind = ImageKind::Lossless;
892 self.has_alpha = (header >> 28) & 1 != 0;
893 }
894 WebPRiffChunk::VP8X => {
895 let mut info = extended::read_extended_header(&mut self.r)?;
896 self.width = info.canvas_width;
897 self.height = info.canvas_height;
898 self.limits.check_dimensions(self.width, self.height)?;
899
900 let mut position = start + chunk_size_rounded;
901 let max_position = position + riff_size.saturating_sub(12);
902 self.r.seek_from_start(position)?;
903
904 while position < max_position {
905 match read_chunk_header(&mut self.r) {
906 Ok((chunk, chunk_size, chunk_size_rounded)) => {
907 let range = position + 8..position + 8 + chunk_size;
908 position += 8 + chunk_size_rounded;
909
910 if !chunk.is_unknown() {
911 self.chunks.entry(chunk).or_insert(range);
912 }
913
914 if chunk == WebPRiffChunk::ANMF {
915 self.num_frames += 1;
916 self.limits.check_frame_count(self.num_frames as usize)?;
917 if chunk_size < 24 {
918 return Err(at!(DecodeError::InvalidChunkSize));
919 }
920
921 self.r.seek_relative(12)?;
922 let duration = self.r.read_u32_le()? & 0xffffff;
923 self.loop_duration =
924 self.loop_duration.wrapping_add(u64::from(duration));
925
926 if !self.is_lossy {
932 let (subchunk, ..) = read_chunk_header(&mut self.r)?;
933 if let WebPRiffChunk::VP8 | WebPRiffChunk::ALPH = subchunk {
934 self.is_lossy = true;
935 }
936 self.r.seek_relative(chunk_size_rounded as i64 - 24)?;
937 } else {
938 self.r.seek_relative(chunk_size_rounded as i64 - 16)?;
939 }
940
941 continue;
942 }
943
944 self.r.seek_relative(chunk_size_rounded as i64)?;
945 }
946 Err(DecodeError::BitStreamError) => {
947 break;
948 }
949 Err(e) => return Err(at!(e)),
950 }
951 }
952 self.is_lossy = self.is_lossy || self.chunks.contains_key(&WebPRiffChunk::VP8);
953
954 if info.animation
957 && (!self.chunks.contains_key(&WebPRiffChunk::ANIM)
958 || !self.chunks.contains_key(&WebPRiffChunk::ANMF))
959 || info.exif_metadata && !self.chunks.contains_key(&WebPRiffChunk::EXIF)
960 || info.xmp_metadata && !self.chunks.contains_key(&WebPRiffChunk::XMP)
961 || !info.animation
962 && self.chunks.contains_key(&WebPRiffChunk::VP8)
963 == self.chunks.contains_key(&WebPRiffChunk::VP8L)
964 {
965 return Err(at!(DecodeError::ChunkMissing));
966 }
967
968 if info.animation {
970 match self.read_chunk(WebPRiffChunk::ANIM, 6) {
971 Ok(Some(chunk)) => {
972 let mut cursor = SliceReader::new(&chunk);
973 cursor.read_exact(&mut info.background_color_hint)?;
974 self.loop_count = match cursor.read_u16_le()? {
975 0 => LoopCount::Forever,
976 n => LoopCount::Times(NonZeroU16::new(n).unwrap()),
977 };
978 self.animation.next_frame_start =
979 self.chunks.get(&WebPRiffChunk::ANMF).unwrap().start - 8;
980 }
981 Ok(None) => return Err(at!(DecodeError::ChunkMissing)),
982 Err(ref e) if matches!(e.error(), DecodeError::MemoryLimitExceeded) => {
983 return Err(at!(DecodeError::InvalidChunkSize));
984 }
985 Err(e) => return Err(e),
986 }
987 }
988
989 if let Some(range) = self.chunks.get(&WebPRiffChunk::ANMF).cloned() {
993 let mut position = range.start + 16;
994 self.r.seek_from_start(position)?;
995 for _ in 0..2 {
996 let (subchunk, subchunk_size, subchunk_size_rounded) =
997 read_chunk_header(&mut self.r)?;
998 let subrange = position + 8..position + 8 + subchunk_size;
999 self.chunks.entry(subchunk).or_insert(subrange.clone());
1000
1001 position += 8 + subchunk_size_rounded;
1002 if position + 8 > range.end {
1003 break;
1004 }
1005 }
1006 }
1007
1008 self.has_alpha = info.alpha;
1009 self.kind = ImageKind::Extended(info);
1010 }
1011 _ => return Err(at!(DecodeError::ChunkHeaderInvalid(chunk.to_fourcc()))),
1012 };
1013
1014 Ok(())
1015 }
1016
1017 pub fn set_stop(&mut self, stop: Option<&'a dyn enough::Stop>) {
1022 self.stop = stop;
1023 }
1024
1025 pub fn set_memory_limit(&mut self, limit: usize) {
1027 self.memory_limit = limit;
1028 }
1029
1030 pub fn set_limits(&mut self, limits: super::limits::Limits) {
1032 self.limits = limits;
1033 }
1034
1035 pub fn background_color_hint(&self) -> Option<[u8; 4]> {
1037 if let ImageKind::Extended(info) = &self.kind {
1038 Some(info.background_color_hint)
1039 } else {
1040 None
1041 }
1042 }
1043
1044 pub fn set_background_color(&mut self, color: [u8; 4]) -> DecodeResult<()> {
1046 if let ImageKind::Extended(info) = &mut self.kind {
1047 info.background_color = Some(color);
1048 Ok(())
1049 } else {
1050 Err(at!(DecodeError::InvalidParameter(
1051 "Background color can only be set on animated webp".into(),
1052 )))
1053 }
1054 }
1055
1056 pub fn dimensions(&self) -> (u32, u32) {
1058 (self.width, self.height)
1059 }
1060
1061 pub fn has_alpha(&self) -> bool {
1064 self.has_alpha
1065 }
1066
1067 pub fn is_animated(&self) -> bool {
1069 match &self.kind {
1070 ImageKind::Lossy | ImageKind::Lossless => false,
1071 ImageKind::Extended(extended) => extended.animation,
1072 }
1073 }
1074
1075 pub fn is_lossy(&self) -> bool {
1077 self.is_lossy
1078 }
1079
1080 pub fn num_frames(&self) -> u32 {
1083 self.num_frames
1084 }
1085
1086 pub fn loop_count(&self) -> LoopCount {
1088 self.loop_count
1089 }
1090
1091 pub fn loop_duration(&self) -> u64 {
1096 self.loop_duration
1097 }
1098
1099 fn read_chunk(
1100 &mut self,
1101 chunk: WebPRiffChunk,
1102 max_size: usize,
1103 ) -> DecodeResult<Option<Vec<u8>>> {
1104 self.read_chunk_direct(chunk, max_size)
1105 }
1106
1107 fn read_chunk_direct(
1108 &self,
1109 chunk: WebPRiffChunk,
1110 max_size: usize,
1111 ) -> DecodeResult<Option<Vec<u8>>> {
1112 match self.chunks.get(&chunk) {
1113 Some(range) => {
1114 let len = (range.end - range.start) as usize;
1115 if len > max_size {
1116 return Err(at!(DecodeError::MemoryLimitExceeded));
1117 }
1118 let slice = self.chunk_slice(range)?;
1119 Ok(Some(slice.to_vec()))
1120 }
1121 None => Ok(None),
1122 }
1123 }
1124
1125 fn chunk_slice(&self, range: &core::ops::Range<u64>) -> DecodeResult<&[u8]> {
1127 let buf = self.r.get_ref();
1128 let start = range.start as usize;
1129 let end = range.end as usize;
1130 if end > buf.len() || start > end {
1131 return Err(at!(DecodeError::InvalidChunkSize));
1132 }
1133 Ok(&buf[start..end])
1134 }
1135
1136 pub fn icc_profile(&mut self) -> DecodeResult<Option<Vec<u8>>> {
1138 self.read_chunk(WebPRiffChunk::ICCP, self.memory_limit)
1139 }
1140
1141 pub fn exif_metadata(&mut self) -> DecodeResult<Option<Vec<u8>>> {
1143 self.read_chunk(WebPRiffChunk::EXIF, self.memory_limit)
1144 }
1145
1146 pub fn xmp_metadata(&mut self) -> DecodeResult<Option<Vec<u8>>> {
1148 self.read_chunk(WebPRiffChunk::XMP, self.memory_limit)
1149 }
1150
1151 pub fn output_buffer_size(&self) -> Option<usize> {
1154 let bytes_per_pixel = if self.has_alpha() { 4 } else { 3 };
1155 (self.width as usize)
1156 .checked_mul(self.height as usize)?
1157 .checked_mul(bytes_per_pixel)
1158 }
1159
1160 pub fn read_image(&mut self, buf: &mut [u8]) -> DecodeResult<()> {
1164 if Some(buf.len()) != self.output_buffer_size() {
1165 return Err(at!(DecodeError::ImageTooLarge));
1166 }
1167
1168 if self.is_animated() {
1169 let saved = core::mem::take(&mut self.animation);
1170 self.animation.next_frame_start =
1171 self.chunks.get(&WebPRiffChunk::ANMF).unwrap().start - 8;
1172 let result = self.read_frame(buf);
1173 self.animation = saved;
1174 result?;
1175 } else if let Some(range) = self.chunks.get(&WebPRiffChunk::VP8L) {
1176 let data_slice = self.chunk_slice(range)?;
1177 let mut decoder = LosslessDecoder::new(data_slice);
1178 decoder.set_stop(self.stop);
1179
1180 if self.has_alpha {
1181 decoder.decode_frame(self.width, self.height, false, buf)?;
1182 } else {
1183 let alloc_size = self.width as usize * self.height as usize * 4;
1184 self.limits.check_memory(alloc_size)?;
1185 let mut data = vec![0; alloc_size];
1186 decoder.decode_frame(self.width, self.height, false, &mut data)?;
1187 garb::bytes::rgba_to_rgb(&data, buf).map_err(garb_err)?;
1188 }
1189 } else {
1190 let range = self
1191 .chunks
1192 .get(&WebPRiffChunk::VP8)
1193 .ok_or(DecodeError::ChunkMissing)?
1194 .clone();
1195 let data_buf = self.r.get_ref();
1196 let vp8_data = data_buf
1197 .get(range.start as usize..range.end as usize)
1198 .ok_or(at!(DecodeError::InvalidChunkSize))?;
1199
1200 let bpp = if self.has_alpha() { 4 } else { 3 };
1202 self.animation
1203 .ctx
1204 .set_dithering_strength(self.webp_decode_options.dithering_strength);
1205 let mut output = Vec::new();
1206 let (w, h) = self
1207 .animation
1208 .ctx
1209 .decode_to_rgb(vp8_data, &mut output, bpp)?;
1210 if u32::from(w) != self.width || u32::from(h) != self.height {
1211 return Err(at!(DecodeError::InconsistentImageSizes));
1212 }
1213
1214 if self.has_alpha() {
1215 buf.copy_from_slice(&output);
1216
1217 let alpha_range = self
1218 .chunks
1219 .get(&WebPRiffChunk::ALPH)
1220 .ok_or_else(|| at!(DecodeError::ChunkMissing))?
1221 .clone();
1222 let alpha_slice = &data_buf[alpha_range.start as usize..alpha_range.end as usize];
1223 let alpha_chunk =
1224 read_alpha_chunk(alpha_slice, self.width as u16, self.height as u16)?;
1225
1226 let fw = usize::from(w);
1227 let fh = usize::from(h);
1228 for y in 0..fh {
1229 for x in 0..fw {
1230 let predictor: u8 =
1231 get_alpha_predictor(x, y, fw, alpha_chunk.filtering_method, buf);
1232
1233 let alpha_index = y * fw + x;
1234 let buffer_index = alpha_index * 4 + 3;
1235
1236 buf[buffer_index] = predictor.wrapping_add(alpha_chunk.data[alpha_index]);
1237 }
1238 }
1239 } else {
1240 buf.copy_from_slice(&output);
1241 }
1242 }
1243
1244 Ok(())
1245 }
1246
1247 pub fn read_frame(&mut self, buf: &mut [u8]) -> DecodeResult<u32> {
1254 if !self.is_animated() {
1255 return Err(at!(DecodeError::InvalidParameter(String::from(
1256 "not an animated WebP",
1257 ))));
1258 }
1259 if Some(buf.len()) != self.output_buffer_size() {
1260 return Err(at!(DecodeError::ImageTooLarge));
1261 }
1262
1263 if self.animation.next_frame == self.num_frames {
1264 return Err(at!(DecodeError::NoMoreFrames));
1265 }
1266
1267 let ImageKind::Extended(info) = &self.kind else {
1268 unreachable!()
1269 };
1270
1271 self.r.seek_from_start(self.animation.next_frame_start)?;
1272
1273 let anmf_size = match read_chunk_header(&mut self.r)? {
1274 (WebPRiffChunk::ANMF, size, _) if size >= 32 => size,
1275 _ => return Err(at!(DecodeError::ChunkHeaderInvalid(*b"ANMF"))),
1276 };
1277
1278 let frame_x = extended::read_3_bytes(&mut self.r)? * 2;
1280 let frame_y = extended::read_3_bytes(&mut self.r)? * 2;
1281 let frame_width = extended::read_3_bytes(&mut self.r)? + 1;
1282 let frame_height = extended::read_3_bytes(&mut self.r)? + 1;
1283 if frame_width > 16384 || frame_height > 16384 {
1284 return Err(at!(DecodeError::ImageTooLarge));
1285 }
1286 if frame_x + frame_width > self.width || frame_y + frame_height > self.height {
1287 return Err(at!(DecodeError::FrameOutsideImage));
1288 }
1289 let duration = extended::read_3_bytes(&mut self.r)?;
1290 let frame_info = self.r.read_u8()?;
1291 let use_alpha_blending = frame_info & 0b00000010 == 0;
1292 let dispose = frame_info & 0b00000001 != 0;
1293
1294 self.animation
1296 .ctx
1297 .set_dithering_strength(self.webp_decode_options.dithering_strength);
1298
1299 let (chunk, chunk_size, chunk_size_rounded) = read_chunk_header(&mut self.r)?;
1301 if chunk_size_rounded + 24 > anmf_size {
1302 return Err(at!(DecodeError::ChunkHeaderInvalid(chunk.to_fourcc())));
1303 }
1304
1305 let frame_has_alpha: bool = match chunk {
1306 WebPRiffChunk::VP8 => {
1307 let data_slice = self.r.take_slice(chunk_size as usize)?;
1311 let (w, h) = self.animation.ctx.decode_to_rgb(
1312 data_slice,
1313 &mut self.animation.frame_scratch,
1314 3,
1315 )?;
1316 if u32::from(w) != frame_width || u32::from(h) != frame_height {
1317 return Err(at!(DecodeError::InconsistentImageSizes));
1318 }
1319 false
1320 }
1321 WebPRiffChunk::VP8L => {
1322 let data_slice = self.r.take_slice(chunk_size as usize)?;
1323 let mut lossless_decoder = LosslessDecoder::new(data_slice);
1324 lossless_decoder.set_stop(self.stop);
1325 let frame_alloc = frame_width as usize * frame_height as usize * 4;
1326 self.limits.check_memory(frame_alloc)?;
1327 self.animation.frame_scratch.resize(frame_alloc, 0);
1328 lossless_decoder.decode_frame(
1329 frame_width,
1330 frame_height,
1331 false,
1332 &mut self.animation.frame_scratch,
1333 )?;
1334 true
1335 }
1336 WebPRiffChunk::ALPH => {
1337 if chunk_size_rounded + 32 > anmf_size {
1338 return Err(at!(DecodeError::ChunkHeaderInvalid(chunk.to_fourcc())));
1339 }
1340
1341 let alpha_slice = self.r.take_slice(chunk_size as usize)?;
1343 if chunk_size_rounded > chunk_size {
1345 self.r
1346 .seek_relative((chunk_size_rounded - chunk_size) as i64)?;
1347 }
1348 let alpha_chunk =
1349 read_alpha_chunk(alpha_slice, frame_width as u16, frame_height as u16)?;
1350
1351 let (next_chunk, next_chunk_size, _) = read_chunk_header(&mut self.r)?;
1353 if chunk_size + next_chunk_size + 32 > anmf_size {
1354 return Err(at!(DecodeError::ChunkHeaderInvalid(next_chunk.to_fourcc())));
1355 }
1356
1357 let vp8_slice = self.r.take_slice(next_chunk_size as usize)?;
1358 let (w, h) = self.animation.ctx.decode_to_rgb(
1359 vp8_slice,
1360 &mut self.animation.frame_scratch,
1361 4,
1362 )?;
1363
1364 let fw = usize::from(w);
1365 let fh = usize::from(h);
1366
1367 for y in 0..fh {
1368 for x in 0..fw {
1369 let predictor: u8 = get_alpha_predictor(
1370 x,
1371 y,
1372 fw,
1373 alpha_chunk.filtering_method,
1374 &self.animation.frame_scratch,
1375 );
1376
1377 let alpha_index = y * fw + x;
1378 let buffer_index = alpha_index * 4 + 3;
1379
1380 self.animation.frame_scratch[buffer_index] =
1381 predictor.wrapping_add(alpha_chunk.data[alpha_index]);
1382 }
1383 }
1384
1385 true
1386 }
1387 _ => return Err(at!(DecodeError::ChunkHeaderInvalid(chunk.to_fourcc()))),
1388 };
1389
1390 let clear_color = if self.animation.dispose_next_frame {
1391 match (info.background_color, frame_has_alpha) {
1392 (color @ Some(_), _) => color,
1393 (_, true) => Some([0, 0, 0, 0]),
1394 _ => None,
1395 }
1396 } else {
1397 None
1398 };
1399
1400 if self.animation.canvas.is_none() {
1402 self.animation.canvas = {
1403 let canvas_alloc = self.width as usize * self.height as usize * 4;
1404 self.limits.check_memory(canvas_alloc)?;
1405 let mut canvas = vec![0; canvas_alloc];
1406 if let Some(color) = info.background_color.as_ref() {
1407 canvas
1408 .chunks_exact_mut(4)
1409 .for_each(|c| c.copy_from_slice(color))
1410 }
1411 Some(canvas)
1412 }
1413 }
1414 extended::composite_frame(
1415 self.animation.canvas.as_mut().unwrap(),
1416 self.width,
1417 self.height,
1418 clear_color,
1419 &self.animation.frame_scratch,
1420 frame_x,
1421 frame_y,
1422 frame_width,
1423 frame_height,
1424 frame_has_alpha,
1425 use_alpha_blending,
1426 self.animation.previous_frame_width,
1427 self.animation.previous_frame_height,
1428 self.animation.previous_frame_x_offset,
1429 self.animation.previous_frame_y_offset,
1430 )?;
1431
1432 self.animation.previous_frame_width = frame_width;
1433 self.animation.previous_frame_height = frame_height;
1434 self.animation.previous_frame_x_offset = frame_x;
1435 self.animation.previous_frame_y_offset = frame_y;
1436
1437 self.animation.dispose_next_frame = dispose;
1438 self.animation.next_frame_start += anmf_size + 8;
1439 self.animation.next_frame += 1;
1440
1441 if self.has_alpha() {
1442 buf.copy_from_slice(self.animation.canvas.as_ref().unwrap());
1443 } else {
1444 garb::bytes::rgba_to_rgb(self.animation.canvas.as_ref().unwrap(), buf)
1445 .map_err(garb_err)?;
1446 }
1447
1448 Ok(duration)
1449 }
1450
1451 pub fn reset_animation(&mut self) -> DecodeResult<()> {
1454 if !self.is_animated() {
1455 return Err(at!(DecodeError::InvalidParameter(String::from(
1456 "not an animated WebP",
1457 ))));
1458 }
1459 self.animation.next_frame = 0;
1460 self.animation.next_frame_start = self.chunks.get(&WebPRiffChunk::ANMF).unwrap().start - 8;
1461 self.animation.dispose_next_frame = true;
1462 Ok(())
1463 }
1464
1465 pub fn set_lossy_upsampling(&mut self, upsampling_method: UpsamplingMethod) {
1467 self.webp_decode_options.lossy_upsampling = upsampling_method;
1468 }
1469}
1470
1471fn garb_err(e: garb::SizeError) -> DecodeError {
1473 DecodeError::InvalidParameter(alloc::format!("pixel conversion: {e}"))
1474}
1475
1476pub(crate) fn read_fourcc(r: &mut SliceReader) -> Result<WebPRiffChunk, DecodeError> {
1477 let mut chunk_fourcc = [0; 4];
1478 r.read_exact(&mut chunk_fourcc)?;
1479 Ok(WebPRiffChunk::from_fourcc(chunk_fourcc))
1480}
1481
1482pub(crate) fn read_chunk_header(
1483 r: &mut SliceReader,
1484) -> Result<(WebPRiffChunk, u64, u64), DecodeError> {
1485 let chunk = read_fourcc(r)?;
1486 let chunk_size = r.read_u32_le()?;
1487 let chunk_size_rounded = chunk_size.saturating_add(chunk_size & 1);
1488 Ok((chunk, chunk_size.into(), chunk_size_rounded.into()))
1489}
1490
1491fn decode_native_internal(
1498 data: &[u8],
1499 options: &WebPDecodeOptions,
1500 limits: &super::limits::Limits,
1501 stop: Option<&dyn enough::Stop>,
1502) -> DecodeResult<(Vec<u8>, u32, u32, bool)> {
1503 let mut decoder = WebPDecoder::new_with_options(data, options.clone())?;
1504 decoder.set_limits(limits.clone());
1505 decoder.set_stop(stop);
1506 let (w, h) = decoder.dimensions();
1507 let output_size = decoder
1508 .output_buffer_size()
1509 .ok_or_else(|| at!(DecodeError::ImageTooLarge))?;
1510 let mut pixels = alloc::vec![0u8; output_size];
1511 decoder.read_image(&mut pixels)?;
1512 Ok((pixels, w, h, decoder.has_alpha()))
1513}
1514
1515fn decode_to_rgba_internal(
1519 data: &[u8],
1520 options: &WebPDecodeOptions,
1521 limits: &super::limits::Limits,
1522 stop: Option<&dyn enough::Stop>,
1523) -> DecodeResult<(Vec<u8>, u32, u32)> {
1524 let (native, w, h, has_alpha) = decode_native_internal(data, options, limits, stop)?;
1525 if has_alpha {
1526 Ok((native, w, h))
1527 } else {
1528 let pixel_count = (w as usize) * (h as usize);
1529 let mut rgba = alloc::vec![0u8; pixel_count * 4];
1530 garb::bytes::rgb_to_rgba(&native, &mut rgba).map_err(|e| at!(garb_err(e)))?;
1531 Ok((rgba, w, h))
1532 }
1533}
1534
1535fn convert_to_output(
1541 rgba: &[u8],
1542 output: &mut [u8],
1543 w: u32,
1544 h: u32,
1545 bpp: usize,
1546 stride_pixels: Option<u32>,
1547 convert_fn: impl FnOnce(&[u8], &mut [u8], usize, usize, usize, usize) -> Result<(), DecodeError>,
1548) -> DecodeResult<()> {
1549 let wu = w as usize;
1550 let hu = h as usize;
1551 let stride_px = stride_pixels.unwrap_or(w) as usize;
1552 if stride_px < wu {
1553 return Err(at!(DecodeError::InvalidParameter(format!(
1554 "stride_pixels {} < width {}",
1555 stride_px, w
1556 ))));
1557 }
1558 let dst_stride = stride_px * bpp;
1559 let required = dst_stride * hu;
1560 if output.len() < required {
1561 return Err(at!(DecodeError::InvalidParameter(format!(
1562 "output buffer too small: got {}, need {}",
1563 output.len(),
1564 required
1565 ))));
1566 }
1567 let src_stride = wu * 4;
1568 convert_fn(rgba, output, wu, hu, src_stride, dst_stride).map_err(|e| at!(e))
1569}
1570
1571#[track_caller]
1587pub fn decode_rgba(data: &[u8]) -> DecodeResult<(Vec<u8>, u32, u32)> {
1588 DecodeRequest::new(&DecodeConfig::default(), data).decode_rgba()
1589}
1590
1591#[track_caller]
1603pub fn decode_rgb(data: &[u8]) -> DecodeResult<(Vec<u8>, u32, u32)> {
1604 DecodeRequest::new(&DecodeConfig::default(), data).decode_rgb()
1605}
1606
1607#[track_caller]
1617pub fn decode_rgba_into(
1618 data: &[u8],
1619 output: &mut [u8],
1620 stride_pixels: u32,
1621) -> DecodeResult<(u32, u32)> {
1622 DecodeRequest::new(&DecodeConfig::default(), data)
1623 .stride(stride_pixels)
1624 .decode_rgba_into(output)
1625}
1626
1627#[track_caller]
1637pub fn decode_rgb_into(
1638 data: &[u8],
1639 output: &mut [u8],
1640 stride_pixels: u32,
1641) -> DecodeResult<(u32, u32)> {
1642 DecodeRequest::new(&DecodeConfig::default(), data)
1643 .stride(stride_pixels)
1644 .decode_rgb_into(output)
1645}
1646
1647#[derive(Debug, Clone)]
1649pub struct ImageInfo {
1650 pub width: u32,
1652 pub height: u32,
1654 pub has_alpha: bool,
1656 pub is_lossy: bool,
1658 pub has_animation: bool,
1660 pub frame_count: u32,
1662 pub format: BitstreamFormat,
1664 pub orientation: Option<zenpixels::Orientation>,
1670 pub icc_profile: Option<Vec<u8>>,
1672 pub exif: Option<Vec<u8>>,
1674 pub xmp: Option<Vec<u8>>,
1676}
1677
1678impl ImageInfo {
1679 pub const PROBE_BYTES: usize = 64;
1685
1686 #[track_caller]
1702 pub fn from_bytes(data: &[u8]) -> DecodeResult<Self> {
1703 Self::from_webp(data)
1704 }
1705
1706 #[track_caller]
1712 pub fn from_webp(data: &[u8]) -> DecodeResult<Self> {
1713 let mut decoder = WebPDecoder::new(data)?;
1714 let (width, height) = decoder.dimensions();
1715 let is_lossy = decoder.is_lossy();
1716 let is_animated = decoder.is_animated();
1717 let frame_count = if is_animated { decoder.num_frames() } else { 1 };
1718 let format = if is_lossy {
1719 BitstreamFormat::Lossy
1720 } else {
1721 BitstreamFormat::Lossless
1722 };
1723 let icc_profile = decoder.icc_profile().unwrap_or(None);
1724 let exif = decoder.exif_metadata().unwrap_or(None);
1725 let xmp = decoder.xmp_metadata().unwrap_or(None);
1726 let orientation = exif
1727 .as_deref()
1728 .and_then(crate::exif_orientation::parse_orientation)
1729 .and_then(zenpixels::Orientation::from_exif);
1730 Ok(Self {
1731 width,
1732 height,
1733 has_alpha: decoder.has_alpha(),
1734 is_lossy,
1735 has_animation: is_animated,
1736 frame_count,
1737 format,
1738 orientation,
1739 icc_profile,
1740 exif,
1741 xmp,
1742 })
1743 }
1744
1745 #[must_use]
1750 pub fn estimate_decode(&self, output_bpp: u8) -> crate::heuristics::DecodeEstimate {
1751 if self.has_animation {
1752 crate::heuristics::estimate_animation_decode(self.width, self.height, self.frame_count)
1753 } else {
1754 crate::heuristics::estimate_decode(self.width, self.height, output_bpp)
1755 }
1756 }
1757}
1758
1759#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)]
1761#[non_exhaustive]
1762pub enum BitstreamFormat {
1763 #[default]
1765 Lossy,
1766 Lossless,
1768}
1769
1770impl core::fmt::Display for BitstreamFormat {
1771 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
1772 match self {
1773 BitstreamFormat::Lossy => f.write_str("lossy"),
1774 BitstreamFormat::Lossless => f.write_str("lossless"),
1775 }
1776 }
1777}
1778
1779#[derive(Debug, Clone)]
1784pub struct YuvPlanes {
1785 pub y: Vec<u8>,
1787 pub u: Vec<u8>,
1789 pub v: Vec<u8>,
1791 pub y_width: u32,
1793 pub y_height: u32,
1795 pub uv_width: u32,
1797 pub uv_height: u32,
1799}
1800
1801#[track_caller]
1805pub fn decode_bgra(data: &[u8]) -> DecodeResult<(Vec<u8>, u32, u32)> {
1806 let (mut rgba, w, h) = decode_rgba(data)?;
1807 garb::bytes::rgba_to_bgra_inplace(&mut rgba).map_err(|e| at!(garb_err(e)))?;
1808 Ok((rgba, w, h))
1809}
1810
1811#[track_caller]
1815pub fn decode_bgr(data: &[u8]) -> DecodeResult<(Vec<u8>, u32, u32)> {
1816 let (rgba, w, h) = decode_rgba(data)?;
1817 let mut bgr = vec![0u8; (w * h * 3) as usize];
1818 garb::bytes::rgba_to_bgr(&rgba, &mut bgr).map_err(|e| at!(garb_err(e)))?;
1819 Ok((bgr, w, h))
1820}
1821
1822#[track_caller]
1834pub fn decode_bgra_into(
1835 data: &[u8],
1836 output: &mut [u8],
1837 stride_pixels: u32,
1838) -> DecodeResult<(u32, u32)> {
1839 let (rgba, w, h) = decode_rgba(data)?;
1840 convert_to_output(
1841 &rgba,
1842 output,
1843 w,
1844 h,
1845 4,
1846 Some(stride_pixels),
1847 |src, dst, w, h, ss, ds| {
1848 garb::bytes::rgba_to_bgra_strided(src, dst, w, h, ss, ds).map_err(garb_err)
1849 },
1850 )?;
1851 Ok((w, h))
1852}
1853
1854#[track_caller]
1858pub fn decode_argb(data: &[u8]) -> DecodeResult<(Vec<u8>, u32, u32)> {
1859 let (mut rgba, w, h) = decode_rgba(data)?;
1860 garb::bytes::rgba_to_argb_inplace(&mut rgba).map_err(|e| at!(garb_err(e)))?;
1861 Ok((rgba, w, h))
1862}
1863
1864#[track_caller]
1876pub fn decode_argb_into(
1877 data: &[u8],
1878 output: &mut [u8],
1879 stride_pixels: u32,
1880) -> DecodeResult<(u32, u32)> {
1881 let (rgba, w, h) = decode_rgba(data)?;
1882 convert_to_output(
1883 &rgba,
1884 output,
1885 w,
1886 h,
1887 4,
1888 Some(stride_pixels),
1889 |src, dst, w, h, ss, ds| {
1890 garb::bytes::rgba_to_argb_strided(src, dst, w, h, ss, ds).map_err(garb_err)
1891 },
1892 )?;
1893 Ok((w, h))
1894}
1895
1896#[track_caller]
1906pub fn decode_bgr_into(
1907 data: &[u8],
1908 output: &mut [u8],
1909 stride_pixels: u32,
1910) -> DecodeResult<(u32, u32)> {
1911 let (rgba, w, h) = decode_rgba(data)?;
1912 convert_to_output(
1913 &rgba,
1914 output,
1915 w,
1916 h,
1917 3,
1918 Some(stride_pixels),
1919 |src, dst, w, h, ss, ds| {
1920 garb::bytes::rgba_to_bgr_strided(src, dst, w, h, ss, ds).map_err(garb_err)
1921 },
1922 )?;
1923 Ok((w, h))
1924}
1925
1926#[track_caller]
1934pub fn decode_yuv420(data: &[u8]) -> DecodeResult<YuvPlanes> {
1935 let decoder = WebPDecoder::new(data)?;
1936
1937 if decoder.is_lossy() && !decoder.is_animated() {
1938 if let Some(range) = decoder.chunks.get(&WebPRiffChunk::VP8) {
1940 let data_slice = decoder.chunk_slice(range)?;
1941 let mut ctx = super::vp8v2::DecoderContext::new();
1942 let frame = ctx.decode_to_frame(data_slice)?;
1943
1944 let w = u32::from(frame.width);
1945 let h = u32::from(frame.height);
1946 let uv_w = w.div_ceil(2);
1947 let uv_h = h.div_ceil(2);
1948
1949 let buffer_width = {
1950 let diff = w % 16;
1951 if diff > 0 {
1952 (w + 16 - diff) as usize
1953 } else {
1954 w as usize
1955 }
1956 };
1957 let chroma_bw = buffer_width / 2;
1958
1959 let mut y = Vec::with_capacity((w * h) as usize);
1960 for row in 0..h as usize {
1961 y.extend_from_slice(
1962 &frame.ybuf[row * buffer_width..row * buffer_width + w as usize],
1963 );
1964 }
1965
1966 let mut u = Vec::with_capacity((uv_w * uv_h) as usize);
1967 let mut v = Vec::with_capacity((uv_w * uv_h) as usize);
1968 for row in 0..uv_h as usize {
1969 u.extend_from_slice(&frame.ubuf[row * chroma_bw..row * chroma_bw + uv_w as usize]);
1970 v.extend_from_slice(&frame.vbuf[row * chroma_bw..row * chroma_bw + uv_w as usize]);
1971 }
1972
1973 return Ok(YuvPlanes {
1974 y,
1975 u,
1976 v,
1977 y_width: w,
1978 y_height: h,
1979 uv_width: uv_w,
1980 uv_height: uv_h,
1981 });
1982 }
1983 }
1984
1985 let (rgba, w, h) = decode_rgba(data)?;
1987 let (y_bytes, u_bytes, v_bytes) =
1988 super::yuv::convert_image_yuv::<4>(&rgba, w as u16, h as u16, w as usize);
1989
1990 let uv_w = w.div_ceil(2);
1991 let uv_h = h.div_ceil(2);
1992 let mb_width = (w as usize).div_ceil(16);
1993
1994 let luma_width = 16 * mb_width;
1995 let chroma_width = 8 * mb_width;
1996
1997 let mut y = Vec::with_capacity((w * h) as usize);
1998 for row in 0..h as usize {
1999 y.extend_from_slice(&y_bytes[row * luma_width..row * luma_width + w as usize]);
2000 }
2001
2002 let mut u = Vec::with_capacity((uv_w * uv_h) as usize);
2003 let mut v = Vec::with_capacity((uv_w * uv_h) as usize);
2004 for row in 0..uv_h as usize {
2005 u.extend_from_slice(&u_bytes[row * chroma_width..row * chroma_width + uv_w as usize]);
2006 v.extend_from_slice(&v_bytes[row * chroma_width..row * chroma_width + uv_w as usize]);
2007 }
2008
2009 Ok(YuvPlanes {
2010 y,
2011 u,
2012 v,
2013 y_width: w,
2014 y_height: h,
2015 uv_width: uv_w,
2016 uv_height: uv_h,
2017 })
2018}
2019
2020#[track_caller]
2026pub fn decode_rgba_premultiplied(data: &[u8]) -> DecodeResult<(Vec<u8>, u32, u32)> {
2027 let (mut pixels, w, h) = decode_rgba(data)?;
2028 garb::bytes::premultiply_alpha_rgba_u8(&mut pixels).map_err(|e| at!(garb_err(e)))?;
2029 Ok((pixels, w, h))
2030}
2031
2032#[track_caller]
2036pub fn decode_bgra_premultiplied(data: &[u8]) -> DecodeResult<(Vec<u8>, u32, u32)> {
2037 let (mut pixels, w, h) = decode_bgra(data)?;
2038 garb::bytes::premultiply_alpha_bgra_u8(&mut pixels).map_err(|e| at!(garb_err(e)))?;
2039 Ok((pixels, w, h))
2040}
2041
2042#[track_caller]
2046pub fn decode_argb_premultiplied(data: &[u8]) -> DecodeResult<(Vec<u8>, u32, u32)> {
2047 let (mut pixels, w, h) = decode_rgba_premultiplied(data)?;
2048 garb::bytes::rgba_to_argb_inplace(&mut pixels).map_err(|e| at!(garb_err(e)))?;
2049 Ok((pixels, w, h))
2050}
2051
2052#[track_caller]
2056pub fn decode_rgb565(data: &[u8]) -> DecodeResult<(Vec<u8>, u32, u32)> {
2057 let (rgba, w, h) = decode_rgba(data)?;
2058 let mut out = vec![0u8; (w * h * 2) as usize];
2059 garb::bytes::rgba_to_rgb565(&rgba, &mut out).map_err(|e| at!(garb_err(e)))?;
2060 Ok((out, w, h))
2061}
2062
2063#[track_caller]
2067pub fn decode_rgba4444(data: &[u8]) -> DecodeResult<(Vec<u8>, u32, u32)> {
2068 let (rgba, w, h) = decode_rgba(data)?;
2069 let mut out = vec![0u8; (w * h * 2) as usize];
2070 garb::bytes::rgba_to_rgba4444(&rgba, &mut out).map_err(|e| at!(garb_err(e)))?;
2071 Ok((out, w, h))
2072}
2073
2074#[cfg(test)]
2075mod tests {
2076 use super::*;
2077 const RGB_BPP: usize = 3;
2078
2079 #[test]
2080 fn add_with_overflow_size() {
2081 let bytes = vec![
2082 0x52, 0x49, 0x46, 0x46, 0xaf, 0x37, 0x80, 0x47, 0x57, 0x45, 0x42, 0x50, 0x6c, 0x64,
2083 0x00, 0x00, 0xff, 0xff, 0xff, 0xff, 0xfb, 0x7e, 0x73, 0x00, 0x06, 0x00, 0x00, 0x00,
2084 0x00, 0x00, 0x00, 0x00, 0x05, 0x00, 0x00, 0x00, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65,
2085 0x40, 0xfb, 0xff, 0xff, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65,
2086 0x00, 0x00, 0x00, 0x00, 0x62, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x49,
2087 0x49, 0x54, 0x55, 0x50, 0x4c, 0x54, 0x59, 0x50, 0x45, 0x33, 0x37, 0x44, 0x4d, 0x46,
2088 ];
2089
2090 let _ = WebPDecoder::new(&bytes);
2091 }
2092
2093 #[test]
2094 fn decode_2x2_single_color_image() {
2095 const NUM_PIXELS: usize = 2 * 2 * RGB_BPP;
2100 let bytes = [
2102 0x52, 0x49, 0x46, 0x46, 0x3c, 0x00, 0x00, 0x00, 0x57, 0x45, 0x42, 0x50, 0x56, 0x50,
2103 0x38, 0x20, 0x30, 0x00, 0x00, 0x00, 0xd0, 0x01, 0x00, 0x9d, 0x01, 0x2a, 0x02, 0x00,
2104 0x02, 0x00, 0x02, 0x00, 0x34, 0x25, 0xa0, 0x02, 0x74, 0xba, 0x01, 0xf8, 0x00, 0x03,
2105 0xb0, 0x00, 0xfe, 0xf0, 0xc4, 0x0b, 0xff, 0x20, 0xb9, 0x61, 0x75, 0xc8, 0xd7, 0xff,
2106 0x20, 0x3f, 0xe4, 0x07, 0xfc, 0x80, 0xff, 0xf8, 0xf2, 0x00, 0x00, 0x00,
2107 ];
2108
2109 let mut data = [0; NUM_PIXELS];
2110 let mut decoder = WebPDecoder::new(&bytes).unwrap();
2111 decoder.read_image(&mut data).unwrap();
2112
2113 let first_pixel = &data[..RGB_BPP];
2117 for (i, ch) in data.chunks_exact(3).enumerate() {
2118 for c in 0..3 {
2119 let diff = (ch[c] as i16 - first_pixel[c] as i16).unsigned_abs();
2120 assert!(
2121 diff <= 1,
2122 "pixel {i} channel {c}: got {} expected {} (diff {diff})",
2123 ch[c],
2124 first_pixel[c]
2125 );
2126 }
2127 }
2128 }
2129
2130 #[test]
2131 fn decode_3x3_single_color_image() {
2132 const NUM_PIXELS: usize = 3 * 3 * RGB_BPP;
2135 let bytes = [
2137 0x52, 0x49, 0x46, 0x46, 0x3c, 0x00, 0x00, 0x00, 0x57, 0x45, 0x42, 0x50, 0x56, 0x50,
2138 0x38, 0x20, 0x30, 0x00, 0x00, 0x00, 0xd0, 0x01, 0x00, 0x9d, 0x01, 0x2a, 0x03, 0x00,
2139 0x03, 0x00, 0x02, 0x00, 0x34, 0x25, 0xa0, 0x02, 0x74, 0xba, 0x01, 0xf8, 0x00, 0x03,
2140 0xb0, 0x00, 0xfe, 0xf0, 0xc4, 0x0b, 0xff, 0x20, 0xb9, 0x61, 0x75, 0xc8, 0xd7, 0xff,
2141 0x20, 0x3f, 0xe4, 0x07, 0xfc, 0x80, 0xff, 0xf8, 0xf2, 0x00, 0x00, 0x00,
2142 ];
2143
2144 let mut data = [0; NUM_PIXELS];
2145 let mut decoder = WebPDecoder::new(&bytes).unwrap();
2146 decoder.read_image(&mut data).unwrap();
2147
2148 let first_pixel = &data[..RGB_BPP];
2152 for (i, ch) in data.chunks_exact(3).enumerate() {
2153 for c in 0..3 {
2154 let diff = (ch[c] as i16 - first_pixel[c] as i16).unsigned_abs();
2155 assert!(
2156 diff <= 1,
2157 "pixel {i} channel {c}: got {} expected {} (diff {diff})",
2158 ch[c],
2159 first_pixel[c]
2160 );
2161 }
2162 }
2163 }
2164}