use alloc::string::String;
use thiserror::Error;
use whereat::at;
#[derive(Debug, Error)]
#[non_exhaustive]
pub enum DecodeError {
#[cfg(feature = "std")]
#[error("IO Error: {0}")]
IoError(#[from] std::io::Error),
#[error("Invalid RIFF signature: {0:x?}")]
RiffSignatureInvalid([u8; 4]),
#[error("Invalid WebP signature: {0:x?}")]
WebpSignatureInvalid([u8; 4]),
#[error("An expected chunk was missing")]
ChunkMissing,
#[error("Invalid Chunk header: {0:x?}")]
ChunkHeaderInvalid([u8; 4]),
#[error("Alpha chunk preprocessing flag invalid")]
InvalidAlphaPreprocessing,
#[error("Invalid compression method")]
InvalidCompressionMethod,
#[error("Alpha chunk size mismatch")]
AlphaChunkSizeMismatch,
#[error("Image too large")]
ImageTooLarge,
#[error("Frame outside image")]
FrameOutsideImage,
#[error("Invalid lossless signature: {0:x?}")]
LosslessSignatureInvalid(u8),
#[error("Invalid lossless version number: {0}")]
VersionNumberInvalid(u8),
#[error("Invalid color cache bits: {0}")]
InvalidColorCacheBits(u8),
#[error("Invalid Huffman code")]
HuffmanError,
#[error("Corrupt bitstream")]
BitStreamError,
#[error("Invalid transform")]
TransformError,
#[error("Invalid VP8 magic: {0:x?}")]
Vp8MagicInvalid([u8; 3]),
#[error("Not enough VP8 init data")]
NotEnoughInitData,
#[error("Invalid VP8 color space: {0}")]
ColorSpaceInvalid(u8),
#[error("Invalid VP8 luma prediction mode: {0}")]
LumaPredictionModeInvalid(i8),
#[error("Invalid VP8 intra prediction mode: {0}")]
IntraPredictionModeInvalid(i8),
#[error("Invalid VP8 chroma prediction mode: {0}")]
ChromaPredictionModeInvalid(i8),
#[error("Inconsistent image sizes")]
InconsistentImageSizes,
#[error("Unsupported feature: {0}")]
UnsupportedFeature(String),
#[error("Invalid parameter: {0}")]
InvalidParameter(String),
#[error("Memory limit exceeded")]
MemoryLimitExceeded,
#[error("Invalid chunk size")]
InvalidChunkSize,
#[error("No more frames")]
NoMoreFrames,
#[error("Decoding cancelled: {0}")]
Cancelled(enough::StopReason),
#[cfg(feature = "zencodec")]
#[error(transparent)]
UnsupportedOperation(#[from] zencodec::UnsupportedOperation),
}
pub type DecodeResult<T> = core::result::Result<T, whereat::At<DecodeError>>;
impl From<enough::StopReason> for DecodeError {
fn from(reason: enough::StopReason) -> Self {
Self::Cancelled(reason)
}
}
impl From<whereat::At<DecodeError>> for DecodeError {
fn from(at: whereat::At<DecodeError>) -> Self {
at.decompose().0
}
}
use alloc::format;
use alloc::vec;
use alloc::vec::Vec;
use core::num::NonZeroU16;
use core::ops::Range;
use hashbrown::HashMap;
use super::extended::{self, WebPExtendedInfo, get_alpha_predictor, read_alpha_chunk};
use super::lossless::LosslessDecoder;
use super::vp8v2::DecoderContext;
use crate::slice_reader::SliceReader;
#[allow(clippy::upper_case_acronyms)]
#[derive(Debug, Clone, Copy, PartialEq, Hash, Eq)]
pub(crate) enum WebPRiffChunk {
RIFF,
WEBP,
VP8,
VP8L,
VP8X,
ANIM,
ANMF,
ALPH,
ICCP,
EXIF,
XMP,
Unknown([u8; 4]),
}
impl WebPRiffChunk {
pub(crate) const fn from_fourcc(chunk_fourcc: [u8; 4]) -> Self {
match &chunk_fourcc {
b"RIFF" => Self::RIFF,
b"WEBP" => Self::WEBP,
b"VP8 " => Self::VP8,
b"VP8L" => Self::VP8L,
b"VP8X" => Self::VP8X,
b"ANIM" => Self::ANIM,
b"ANMF" => Self::ANMF,
b"ALPH" => Self::ALPH,
b"ICCP" => Self::ICCP,
b"EXIF" => Self::EXIF,
b"XMP " => Self::XMP,
_ => Self::Unknown(chunk_fourcc),
}
}
pub(crate) const fn to_fourcc(self) -> [u8; 4] {
match self {
Self::RIFF => *b"RIFF",
Self::WEBP => *b"WEBP",
Self::VP8 => *b"VP8 ",
Self::VP8L => *b"VP8L",
Self::VP8X => *b"VP8X",
Self::ANIM => *b"ANIM",
Self::ANMF => *b"ANMF",
Self::ALPH => *b"ALPH",
Self::ICCP => *b"ICCP",
Self::EXIF => *b"EXIF",
Self::XMP => *b"XMP ",
Self::Unknown(fourcc) => fourcc,
}
}
pub(crate) const fn is_unknown(self) -> bool {
matches!(self, Self::Unknown(_))
}
}
enum ImageKind {
Lossy,
Lossless,
Extended(WebPExtendedInfo),
}
struct AnimationState {
next_frame: u32,
next_frame_start: u64,
dispose_next_frame: bool,
previous_frame_width: u32,
previous_frame_height: u32,
previous_frame_x_offset: u32,
previous_frame_y_offset: u32,
canvas: Option<Vec<u8>>,
frame_scratch: Vec<u8>,
ctx: DecoderContext,
}
impl Default for AnimationState {
fn default() -> Self {
Self {
next_frame: 0,
next_frame_start: 0,
dispose_next_frame: true,
previous_frame_width: 0,
previous_frame_height: 0,
previous_frame_x_offset: 0,
previous_frame_y_offset: 0,
canvas: None,
frame_scratch: Vec::new(),
ctx: DecoderContext::new(),
}
}
}
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub enum LoopCount {
Forever,
Times(NonZeroU16),
}
impl core::fmt::Display for LoopCount {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
match self {
LoopCount::Forever => f.write_str("infinite"),
LoopCount::Times(n) => write!(f, "{} time{}", n, if n.get() == 1 { "" } else { "s" }),
}
}
}
impl From<u16> for LoopCount {
fn from(n: u16) -> Self {
match NonZeroU16::new(n) {
None => LoopCount::Forever,
Some(n) => LoopCount::Times(n),
}
}
}
#[derive(Clone, Debug, PartialEq)]
pub struct DecodeConfig {
pub upsampling: UpsamplingMethod,
pub limits: super::limits::Limits,
pub dithering_strength: u8,
}
impl Default for DecodeConfig {
fn default() -> Self {
Self {
upsampling: UpsamplingMethod::Bilinear,
limits: super::limits::Limits::default(),
dithering_strength: 0,
}
}
}
impl DecodeConfig {
#[must_use]
pub fn upsampling(mut self, method: UpsamplingMethod) -> Self {
self.upsampling = method;
self
}
#[must_use]
pub fn limits(mut self, limits: super::limits::Limits) -> Self {
self.limits = limits;
self
}
#[must_use]
pub fn max_dimensions(mut self, width: u32, height: u32) -> Self {
self.limits = self.limits.max_dimensions(width, height);
self
}
#[must_use]
pub fn max_memory(mut self, bytes: u64) -> Self {
self.limits = self.limits.max_memory(bytes);
self
}
#[must_use]
pub fn no_fancy_upsampling(mut self) -> Self {
self.upsampling = UpsamplingMethod::Simple;
self
}
#[must_use]
pub fn with_dithering_strength(mut self, strength: u8) -> Self {
self.dithering_strength = strength;
self
}
pub(crate) fn to_options(&self) -> WebPDecodeOptions {
WebPDecodeOptions {
lossy_upsampling: self.upsampling,
dithering_strength: self.dithering_strength,
}
}
}
pub struct DecodeRequest<'a> {
config: &'a DecodeConfig,
data: &'a [u8],
stop: Option<&'a dyn enough::Stop>,
stride_pixels: Option<u32>,
}
impl<'a> DecodeRequest<'a> {
#[must_use]
pub fn new(config: &'a DecodeConfig, data: &'a [u8]) -> Self {
Self {
config,
data,
stop: None,
stride_pixels: None,
}
}
#[must_use]
pub fn stop(mut self, stop: &'a dyn enough::Stop) -> Self {
self.stop = Some(stop);
self
}
#[must_use]
pub fn stride(mut self, stride_pixels: u32) -> Self {
self.stride_pixels = Some(stride_pixels);
self
}
pub fn decode(self) -> DecodeResult<(Vec<u8>, u32, u32, crate::PixelLayout)> {
let (pixels, w, h, has_alpha) = decode_native_internal(
self.data,
&self.config.to_options(),
&self.config.limits,
self.stop,
)?;
let layout = if has_alpha {
crate::PixelLayout::Rgba8
} else {
crate::PixelLayout::Rgb8
};
Ok((pixels, w, h, layout))
}
pub fn decode_rgba(self) -> DecodeResult<(Vec<u8>, u32, u32)> {
let (rgba, w, h) = decode_to_rgba_internal(
self.data,
&self.config.to_options(),
&self.config.limits,
self.stop,
)?;
Ok((rgba, w, h))
}
pub fn decode_rgb(self) -> DecodeResult<(Vec<u8>, u32, u32)> {
let (native, w, h, has_alpha) = decode_native_internal(
self.data,
&self.config.to_options(),
&self.config.limits,
self.stop,
)?;
if !has_alpha {
Ok((native, w, h))
} else {
let pixel_count = (w as usize) * (h as usize);
let mut rgb = alloc::vec![0u8; pixel_count * 3];
garb::bytes::rgba_to_rgb(&native, &mut rgb).map_err(|e| at!(garb_err(e)))?;
Ok((rgb, w, h))
}
}
pub fn decode_rgba_into(self, output: &mut [u8]) -> DecodeResult<(u32, u32)> {
let (rgba, w, h) = decode_to_rgba_internal(
self.data,
&self.config.to_options(),
&self.config.limits,
self.stop,
)?;
convert_to_output(
&rgba,
output,
w,
h,
4,
self.stride_pixels,
|src, dst, w, h, ss, ds| {
for y in 0..h {
dst[y * ds..][..w * 4].copy_from_slice(&src[y * ss..][..w * 4]);
}
Ok(())
},
)?;
Ok((w, h))
}
pub fn decode_rgb_into(self, output: &mut [u8]) -> DecodeResult<(u32, u32)> {
let (rgba, w, h) = decode_to_rgba_internal(
self.data,
&self.config.to_options(),
&self.config.limits,
self.stop,
)?;
convert_to_output(
&rgba,
output,
w,
h,
3,
self.stride_pixels,
|src, dst, w, h, ss, ds| {
garb::bytes::rgba_to_rgb_strided(src, dst, w, h, ss, ds).map_err(garb_err)
},
)?;
Ok((w, h))
}
pub fn info(self) -> DecodeResult<ImageInfo> {
ImageInfo::from_webp(self.data)
}
pub fn decode_yuv420(self) -> DecodeResult<YuvPlanes> {
decode_yuv420(self.data)
}
#[allow(dead_code)]
pub(crate) fn decode_rgb_lossy(self) -> DecodeResult<(Vec<u8>, u16, u16)> {
self.decode_lossy_internal(3)
}
#[allow(dead_code)]
pub(crate) fn decode_rgba_lossy(self) -> DecodeResult<(Vec<u8>, u16, u16)> {
self.decode_lossy_internal(4)
}
fn decode_lossy_internal(self, bpp: usize) -> DecodeResult<(Vec<u8>, u16, u16)> {
let data = self.data;
let dither_strength = self.config.dithering_strength;
if data.len() < 20 {
return Err(whereat::at!(DecodeError::NotEnoughInitData));
}
if &data[..4] != b"RIFF" {
let mut sig = [0u8; 4];
sig.copy_from_slice(&data[..4]);
return Err(whereat::at!(DecodeError::RiffSignatureInvalid(sig)));
}
if &data[8..12] != b"WEBP" {
let mut sig = [0u8; 4];
sig.copy_from_slice(&data[8..12]);
return Err(whereat::at!(DecodeError::WebpSignatureInvalid(sig)));
}
let first_chunk = &data[12..16];
match first_chunk {
b"VP8 " => {
let chunk_size =
u32::from_le_bytes([data[16], data[17], data[18], data[19]]) as usize;
let vp8_start = 20;
let vp8_end = (vp8_start + chunk_size).min(data.len());
let vp8_data = &data[vp8_start..vp8_end];
let mut ctx = DecoderContext::new().with_dithering_strength(dither_strength);
let mut output = Vec::new();
let (w, h) = ctx.decode_to_rgb(vp8_data, &mut output, bpp)?;
Ok((output, w, h))
}
b"VP8X" => {
use crate::mux::WebPDemuxer;
let demuxer = WebPDemuxer::new(data).map_err(|e| {
whereat::at!(DecodeError::InvalidParameter(alloc::format!(
"demux error: {e}"
)))
})?;
if demuxer.is_animated() {
return Err(whereat::at!(DecodeError::UnsupportedFeature(
"lossy single-frame decode does not support animation; use AnimationDecoder"
.into()
)));
}
let frame = demuxer
.frame(1)
.ok_or_else(|| whereat::at!(DecodeError::ChunkMissing))?;
if !frame.is_lossy {
return Err(whereat::at!(DecodeError::UnsupportedFeature(
"lossy decoder only supports VP8, got VP8L".into()
)));
}
let mut ctx = DecoderContext::new().with_dithering_strength(dither_strength);
let mut output = Vec::new();
let decode_bpp = if frame.has_alpha { 4 } else { bpp };
let (w, h) = ctx.decode_to_rgb(frame.bitstream, &mut output, decode_bpp)?;
if let Some(alpha_data) = frame.alpha_data {
let alpha_chunk = read_alpha_chunk(alpha_data, w, h)?;
let fw = usize::from(w);
let fh = usize::from(h);
for y in 0..fh {
for x in 0..fw {
let predictor: u8 = get_alpha_predictor(
x,
y,
fw,
alpha_chunk.filtering_method,
&output,
);
let alpha_index = y * fw + x;
let buffer_index = alpha_index * 4 + 3;
output[buffer_index] =
predictor.wrapping_add(alpha_chunk.data[alpha_index]);
}
}
}
if decode_bpp == 4 && bpp == 3 {
let pixel_count = usize::from(w) * usize::from(h);
let mut rgb = alloc::vec![0u8; pixel_count * 3];
garb::bytes::rgba_to_rgb(&output, &mut rgb)
.map_err(|e| whereat::at!(garb_err(e)))?;
Ok((rgb, w, h))
} else {
Ok((output, w, h))
}
}
_ => Err(whereat::at!(DecodeError::UnsupportedFeature(
alloc::format!("lossy decoder only supports VP8, got {:?}", first_chunk)
))),
}
}
}
#[derive(Clone)]
#[non_exhaustive]
pub(crate) struct WebPDecodeOptions {
pub lossy_upsampling: UpsamplingMethod,
pub dithering_strength: u8,
}
impl Default for WebPDecodeOptions {
fn default() -> Self {
Self {
lossy_upsampling: UpsamplingMethod::Bilinear,
dithering_strength: 0,
}
}
}
#[derive(Clone, Copy, Debug, Default, PartialEq, Eq)]
pub enum UpsamplingMethod {
#[default]
Bilinear,
Simple,
}
pub struct WebPDecoder<'a> {
r: SliceReader<'a>,
memory_limit: usize,
limits: super::limits::Limits,
width: u32,
height: u32,
kind: ImageKind,
animation: AnimationState,
is_lossy: bool,
has_alpha: bool,
num_frames: u32,
loop_count: LoopCount,
loop_duration: u64,
chunks: HashMap<WebPRiffChunk, Range<u64>>,
webp_decode_options: WebPDecodeOptions,
stop: Option<&'a dyn enough::Stop>,
}
impl<'a> WebPDecoder<'a> {
pub fn build(data: &'a [u8]) -> Result<Self, DecodeError> {
Ok(Self::new(data)?)
}
pub fn new(data: &'a [u8]) -> DecodeResult<Self> {
Self::new_with_options(data, WebPDecodeOptions::default())
}
pub fn info(&self) -> ImageInfo {
let icc_profile = self
.read_chunk_direct(WebPRiffChunk::ICCP, self.memory_limit)
.unwrap_or(None);
let exif = self
.read_chunk_direct(WebPRiffChunk::EXIF, self.memory_limit)
.unwrap_or(None);
let xmp = self
.read_chunk_direct(WebPRiffChunk::XMP, self.memory_limit)
.unwrap_or(None);
let orientation = exif
.as_deref()
.and_then(crate::exif_orientation::parse_orientation)
.and_then(zenpixels::Orientation::from_exif);
ImageInfo {
width: self.width,
height: self.height,
has_alpha: self.has_alpha,
is_lossy: self.is_lossy,
has_animation: self.is_animated(),
frame_count: self.num_frames,
format: if self.is_lossy {
BitstreamFormat::Lossy
} else {
BitstreamFormat::Lossless
},
orientation,
icc_profile,
exif,
xmp,
}
}
pub(crate) fn new_with_options(
data: &'a [u8],
webp_decode_options: WebPDecodeOptions,
) -> DecodeResult<Self> {
let mut decoder = Self {
r: SliceReader::new(data),
width: 0,
height: 0,
num_frames: 0,
kind: ImageKind::Lossy,
chunks: HashMap::new(),
animation: Default::default(),
memory_limit: usize::MAX,
limits: super::limits::Limits::default(),
is_lossy: false,
has_alpha: false,
loop_count: LoopCount::Times(NonZeroU16::new(1).unwrap()),
loop_duration: 0,
webp_decode_options,
stop: None,
};
decoder.read_data()?;
Ok(decoder)
}
fn read_data(&mut self) -> DecodeResult<()> {
let (WebPRiffChunk::RIFF, riff_size, _) = read_chunk_header(&mut self.r)? else {
return Err(at!(DecodeError::ChunkHeaderInvalid(*b"RIFF")));
};
match &read_fourcc(&mut self.r)? {
WebPRiffChunk::WEBP => {}
fourcc => return Err(at!(DecodeError::WebpSignatureInvalid(fourcc.to_fourcc()))),
}
let (chunk, chunk_size, chunk_size_rounded) = read_chunk_header(&mut self.r)?;
let start = self.r.stream_position();
match chunk {
WebPRiffChunk::VP8 => {
let tag = self.r.read_u24_le()?;
let keyframe = tag & 1 == 0;
if !keyframe {
return Err(at!(DecodeError::UnsupportedFeature(
"Non-keyframe frames".into(),
)));
}
let mut tag = [0u8; 3];
self.r.read_exact(&mut tag)?;
if tag != [0x9d, 0x01, 0x2a] {
return Err(at!(DecodeError::Vp8MagicInvalid(tag)));
}
let w = self.r.read_u16_le()?;
let h = self.r.read_u16_le()?;
self.width = u32::from(w & 0x3FFF);
self.height = u32::from(h & 0x3FFF);
if self.width == 0 || self.height == 0 {
return Err(at!(DecodeError::InconsistentImageSizes));
}
self.limits.check_dimensions(self.width, self.height)?;
self.chunks
.insert(WebPRiffChunk::VP8, start..start + chunk_size);
self.kind = ImageKind::Lossy;
self.is_lossy = true;
}
WebPRiffChunk::VP8L => {
let signature = self.r.read_u8()?;
if signature != 0x2f {
return Err(at!(DecodeError::LosslessSignatureInvalid(signature)));
}
let header = self.r.read_u32_le()?;
let version = header >> 29;
if version != 0 {
return Err(at!(DecodeError::VersionNumberInvalid(version as u8)));
}
self.width = (1 + header) & 0x3FFF;
self.height = (1 + (header >> 14)) & 0x3FFF;
self.limits.check_dimensions(self.width, self.height)?;
self.chunks
.insert(WebPRiffChunk::VP8L, start..start + chunk_size);
self.kind = ImageKind::Lossless;
self.has_alpha = (header >> 28) & 1 != 0;
}
WebPRiffChunk::VP8X => {
let mut info = extended::read_extended_header(&mut self.r)?;
self.width = info.canvas_width;
self.height = info.canvas_height;
self.limits.check_dimensions(self.width, self.height)?;
let mut position = start + chunk_size_rounded;
let max_position = position + riff_size.saturating_sub(12);
self.r.seek_from_start(position)?;
while position < max_position {
match read_chunk_header(&mut self.r) {
Ok((chunk, chunk_size, chunk_size_rounded)) => {
let range = position + 8..position + 8 + chunk_size;
position += 8 + chunk_size_rounded;
if !chunk.is_unknown() {
self.chunks.entry(chunk).or_insert(range);
}
if chunk == WebPRiffChunk::ANMF {
self.num_frames += 1;
self.limits.check_frame_count(self.num_frames as usize)?;
if chunk_size < 24 {
return Err(at!(DecodeError::InvalidChunkSize));
}
self.r.seek_relative(12)?;
let duration = self.r.read_u32_le()? & 0xffffff;
self.loop_duration =
self.loop_duration.wrapping_add(u64::from(duration));
if !self.is_lossy {
let (subchunk, ..) = read_chunk_header(&mut self.r)?;
if let WebPRiffChunk::VP8 | WebPRiffChunk::ALPH = subchunk {
self.is_lossy = true;
}
self.r.seek_relative(chunk_size_rounded as i64 - 24)?;
} else {
self.r.seek_relative(chunk_size_rounded as i64 - 16)?;
}
continue;
}
self.r.seek_relative(chunk_size_rounded as i64)?;
}
Err(DecodeError::BitStreamError) => {
break;
}
Err(e) => return Err(at!(e)),
}
}
self.is_lossy = self.is_lossy || self.chunks.contains_key(&WebPRiffChunk::VP8);
if info.animation
&& (!self.chunks.contains_key(&WebPRiffChunk::ANIM)
|| !self.chunks.contains_key(&WebPRiffChunk::ANMF))
|| info.exif_metadata && !self.chunks.contains_key(&WebPRiffChunk::EXIF)
|| info.xmp_metadata && !self.chunks.contains_key(&WebPRiffChunk::XMP)
|| !info.animation
&& self.chunks.contains_key(&WebPRiffChunk::VP8)
== self.chunks.contains_key(&WebPRiffChunk::VP8L)
{
return Err(at!(DecodeError::ChunkMissing));
}
if info.animation {
match self.read_chunk(WebPRiffChunk::ANIM, 6) {
Ok(Some(chunk)) => {
let mut cursor = SliceReader::new(&chunk);
cursor.read_exact(&mut info.background_color_hint)?;
self.loop_count = match cursor.read_u16_le()? {
0 => LoopCount::Forever,
n => LoopCount::Times(NonZeroU16::new(n).unwrap()),
};
self.animation.next_frame_start =
self.chunks.get(&WebPRiffChunk::ANMF).unwrap().start - 8;
}
Ok(None) => return Err(at!(DecodeError::ChunkMissing)),
Err(ref e) if matches!(e.error(), DecodeError::MemoryLimitExceeded) => {
return Err(at!(DecodeError::InvalidChunkSize));
}
Err(e) => return Err(e),
}
}
if let Some(range) = self.chunks.get(&WebPRiffChunk::ANMF).cloned() {
let mut position = range.start + 16;
self.r.seek_from_start(position)?;
for _ in 0..2 {
let (subchunk, subchunk_size, subchunk_size_rounded) =
read_chunk_header(&mut self.r)?;
let subrange = position + 8..position + 8 + subchunk_size;
self.chunks.entry(subchunk).or_insert(subrange.clone());
position += 8 + subchunk_size_rounded;
if position + 8 > range.end {
break;
}
}
}
self.has_alpha = info.alpha;
self.kind = ImageKind::Extended(info);
}
_ => return Err(at!(DecodeError::ChunkHeaderInvalid(chunk.to_fourcc()))),
};
Ok(())
}
pub fn set_stop(&mut self, stop: Option<&'a dyn enough::Stop>) {
self.stop = stop;
}
pub fn set_memory_limit(&mut self, limit: usize) {
self.memory_limit = limit;
}
pub fn set_limits(&mut self, limits: super::limits::Limits) {
self.limits = limits;
}
pub fn background_color_hint(&self) -> Option<[u8; 4]> {
if let ImageKind::Extended(info) = &self.kind {
Some(info.background_color_hint)
} else {
None
}
}
pub fn set_background_color(&mut self, color: [u8; 4]) -> DecodeResult<()> {
if let ImageKind::Extended(info) = &mut self.kind {
info.background_color = Some(color);
Ok(())
} else {
Err(at!(DecodeError::InvalidParameter(
"Background color can only be set on animated webp".into(),
)))
}
}
pub fn dimensions(&self) -> (u32, u32) {
(self.width, self.height)
}
pub fn has_alpha(&self) -> bool {
self.has_alpha
}
pub fn is_animated(&self) -> bool {
match &self.kind {
ImageKind::Lossy | ImageKind::Lossless => false,
ImageKind::Extended(extended) => extended.animation,
}
}
pub fn is_lossy(&self) -> bool {
self.is_lossy
}
pub fn num_frames(&self) -> u32 {
self.num_frames
}
pub fn loop_count(&self) -> LoopCount {
self.loop_count
}
pub fn loop_duration(&self) -> u64 {
self.loop_duration
}
fn read_chunk(
&mut self,
chunk: WebPRiffChunk,
max_size: usize,
) -> DecodeResult<Option<Vec<u8>>> {
self.read_chunk_direct(chunk, max_size)
}
fn read_chunk_direct(
&self,
chunk: WebPRiffChunk,
max_size: usize,
) -> DecodeResult<Option<Vec<u8>>> {
match self.chunks.get(&chunk) {
Some(range) => {
let len = (range.end - range.start) as usize;
if len > max_size {
return Err(at!(DecodeError::MemoryLimitExceeded));
}
let slice = self.chunk_slice(range)?;
Ok(Some(slice.to_vec()))
}
None => Ok(None),
}
}
fn chunk_slice(&self, range: &core::ops::Range<u64>) -> DecodeResult<&[u8]> {
let buf = self.r.get_ref();
let start = range.start as usize;
let end = range.end as usize;
if end > buf.len() || start > end {
return Err(at!(DecodeError::InvalidChunkSize));
}
Ok(&buf[start..end])
}
pub fn icc_profile(&mut self) -> DecodeResult<Option<Vec<u8>>> {
self.read_chunk(WebPRiffChunk::ICCP, self.memory_limit)
}
pub fn exif_metadata(&mut self) -> DecodeResult<Option<Vec<u8>>> {
self.read_chunk(WebPRiffChunk::EXIF, self.memory_limit)
}
pub fn xmp_metadata(&mut self) -> DecodeResult<Option<Vec<u8>>> {
self.read_chunk(WebPRiffChunk::XMP, self.memory_limit)
}
pub fn output_buffer_size(&self) -> Option<usize> {
let bytes_per_pixel = if self.has_alpha() { 4 } else { 3 };
(self.width as usize)
.checked_mul(self.height as usize)?
.checked_mul(bytes_per_pixel)
}
pub fn read_image(&mut self, buf: &mut [u8]) -> DecodeResult<()> {
if Some(buf.len()) != self.output_buffer_size() {
return Err(at!(DecodeError::ImageTooLarge));
}
if self.is_animated() {
let saved = core::mem::take(&mut self.animation);
self.animation.next_frame_start =
self.chunks.get(&WebPRiffChunk::ANMF).unwrap().start - 8;
let result = self.read_frame(buf);
self.animation = saved;
result?;
} else if let Some(range) = self.chunks.get(&WebPRiffChunk::VP8L) {
let data_slice = self.chunk_slice(range)?;
let mut decoder = LosslessDecoder::new(data_slice);
decoder.set_stop(self.stop);
if self.has_alpha {
decoder.decode_frame(self.width, self.height, false, buf)?;
} else {
let alloc_size = self.width as usize * self.height as usize * 4;
self.limits.check_memory(alloc_size)?;
let mut data = vec![0; alloc_size];
decoder.decode_frame(self.width, self.height, false, &mut data)?;
garb::bytes::rgba_to_rgb(&data, buf).map_err(garb_err)?;
}
} else {
let range = self
.chunks
.get(&WebPRiffChunk::VP8)
.ok_or(DecodeError::ChunkMissing)?
.clone();
let data_buf = self.r.get_ref();
let vp8_data = data_buf
.get(range.start as usize..range.end as usize)
.ok_or(at!(DecodeError::InvalidChunkSize))?;
let bpp = if self.has_alpha() { 4 } else { 3 };
self.animation
.ctx
.set_dithering_strength(self.webp_decode_options.dithering_strength);
let mut output = Vec::new();
let (w, h) = self
.animation
.ctx
.decode_to_rgb(vp8_data, &mut output, bpp)?;
if u32::from(w) != self.width || u32::from(h) != self.height {
return Err(at!(DecodeError::InconsistentImageSizes));
}
if self.has_alpha() {
buf.copy_from_slice(&output);
let alpha_range = self
.chunks
.get(&WebPRiffChunk::ALPH)
.ok_or_else(|| at!(DecodeError::ChunkMissing))?
.clone();
let alpha_slice = &data_buf[alpha_range.start as usize..alpha_range.end as usize];
let alpha_chunk =
read_alpha_chunk(alpha_slice, self.width as u16, self.height as u16)?;
let fw = usize::from(w);
let fh = usize::from(h);
for y in 0..fh {
for x in 0..fw {
let predictor: u8 =
get_alpha_predictor(x, y, fw, alpha_chunk.filtering_method, buf);
let alpha_index = y * fw + x;
let buffer_index = alpha_index * 4 + 3;
buf[buffer_index] = predictor.wrapping_add(alpha_chunk.data[alpha_index]);
}
}
} else {
buf.copy_from_slice(&output);
}
}
Ok(())
}
pub fn read_frame(&mut self, buf: &mut [u8]) -> DecodeResult<u32> {
if !self.is_animated() {
return Err(at!(DecodeError::InvalidParameter(String::from(
"not an animated WebP",
))));
}
if Some(buf.len()) != self.output_buffer_size() {
return Err(at!(DecodeError::ImageTooLarge));
}
if self.animation.next_frame == self.num_frames {
return Err(at!(DecodeError::NoMoreFrames));
}
let ImageKind::Extended(info) = &self.kind else {
unreachable!()
};
self.r.seek_from_start(self.animation.next_frame_start)?;
let anmf_size = match read_chunk_header(&mut self.r)? {
(WebPRiffChunk::ANMF, size, _) if size >= 32 => size,
_ => return Err(at!(DecodeError::ChunkHeaderInvalid(*b"ANMF"))),
};
let frame_x = extended::read_3_bytes(&mut self.r)? * 2;
let frame_y = extended::read_3_bytes(&mut self.r)? * 2;
let frame_width = extended::read_3_bytes(&mut self.r)? + 1;
let frame_height = extended::read_3_bytes(&mut self.r)? + 1;
if frame_width > 16384 || frame_height > 16384 {
return Err(at!(DecodeError::ImageTooLarge));
}
if frame_x + frame_width > self.width || frame_y + frame_height > self.height {
return Err(at!(DecodeError::FrameOutsideImage));
}
let duration = extended::read_3_bytes(&mut self.r)?;
let frame_info = self.r.read_u8()?;
let use_alpha_blending = frame_info & 0b00000010 == 0;
let dispose = frame_info & 0b00000001 != 0;
self.animation
.ctx
.set_dithering_strength(self.webp_decode_options.dithering_strength);
let (chunk, chunk_size, chunk_size_rounded) = read_chunk_header(&mut self.r)?;
if chunk_size_rounded + 24 > anmf_size {
return Err(at!(DecodeError::ChunkHeaderInvalid(chunk.to_fourcc())));
}
let frame_has_alpha: bool = match chunk {
WebPRiffChunk::VP8 => {
let data_slice = self.r.take_slice(chunk_size as usize)?;
let (w, h) = self.animation.ctx.decode_to_rgb(
data_slice,
&mut self.animation.frame_scratch,
3,
)?;
if u32::from(w) != frame_width || u32::from(h) != frame_height {
return Err(at!(DecodeError::InconsistentImageSizes));
}
false
}
WebPRiffChunk::VP8L => {
let data_slice = self.r.take_slice(chunk_size as usize)?;
let mut lossless_decoder = LosslessDecoder::new(data_slice);
lossless_decoder.set_stop(self.stop);
let frame_alloc = frame_width as usize * frame_height as usize * 4;
self.limits.check_memory(frame_alloc)?;
self.animation.frame_scratch.resize(frame_alloc, 0);
lossless_decoder.decode_frame(
frame_width,
frame_height,
false,
&mut self.animation.frame_scratch,
)?;
true
}
WebPRiffChunk::ALPH => {
if chunk_size_rounded + 32 > anmf_size {
return Err(at!(DecodeError::ChunkHeaderInvalid(chunk.to_fourcc())));
}
let alpha_slice = self.r.take_slice(chunk_size as usize)?;
if chunk_size_rounded > chunk_size {
self.r
.seek_relative((chunk_size_rounded - chunk_size) as i64)?;
}
let alpha_chunk =
read_alpha_chunk(alpha_slice, frame_width as u16, frame_height as u16)?;
let (next_chunk, next_chunk_size, _) = read_chunk_header(&mut self.r)?;
if chunk_size + next_chunk_size + 32 > anmf_size {
return Err(at!(DecodeError::ChunkHeaderInvalid(next_chunk.to_fourcc())));
}
let vp8_slice = self.r.take_slice(next_chunk_size as usize)?;
let (w, h) = self.animation.ctx.decode_to_rgb(
vp8_slice,
&mut self.animation.frame_scratch,
4,
)?;
let fw = usize::from(w);
let fh = usize::from(h);
for y in 0..fh {
for x in 0..fw {
let predictor: u8 = get_alpha_predictor(
x,
y,
fw,
alpha_chunk.filtering_method,
&self.animation.frame_scratch,
);
let alpha_index = y * fw + x;
let buffer_index = alpha_index * 4 + 3;
self.animation.frame_scratch[buffer_index] =
predictor.wrapping_add(alpha_chunk.data[alpha_index]);
}
}
true
}
_ => return Err(at!(DecodeError::ChunkHeaderInvalid(chunk.to_fourcc()))),
};
let clear_color = if self.animation.dispose_next_frame {
match (info.background_color, frame_has_alpha) {
(color @ Some(_), _) => color,
(_, true) => Some([0, 0, 0, 0]),
_ => None,
}
} else {
None
};
if self.animation.canvas.is_none() {
self.animation.canvas = {
let canvas_alloc = self.width as usize * self.height as usize * 4;
self.limits.check_memory(canvas_alloc)?;
let mut canvas = vec![0; canvas_alloc];
if let Some(color) = info.background_color.as_ref() {
canvas
.chunks_exact_mut(4)
.for_each(|c| c.copy_from_slice(color))
}
Some(canvas)
}
}
extended::composite_frame(
self.animation.canvas.as_mut().unwrap(),
self.width,
self.height,
clear_color,
&self.animation.frame_scratch,
frame_x,
frame_y,
frame_width,
frame_height,
frame_has_alpha,
use_alpha_blending,
self.animation.previous_frame_width,
self.animation.previous_frame_height,
self.animation.previous_frame_x_offset,
self.animation.previous_frame_y_offset,
)?;
self.animation.previous_frame_width = frame_width;
self.animation.previous_frame_height = frame_height;
self.animation.previous_frame_x_offset = frame_x;
self.animation.previous_frame_y_offset = frame_y;
self.animation.dispose_next_frame = dispose;
self.animation.next_frame_start += anmf_size + 8;
self.animation.next_frame += 1;
if self.has_alpha() {
buf.copy_from_slice(self.animation.canvas.as_ref().unwrap());
} else {
garb::bytes::rgba_to_rgb(self.animation.canvas.as_ref().unwrap(), buf)
.map_err(garb_err)?;
}
Ok(duration)
}
pub fn reset_animation(&mut self) -> DecodeResult<()> {
if !self.is_animated() {
return Err(at!(DecodeError::InvalidParameter(String::from(
"not an animated WebP",
))));
}
self.animation.next_frame = 0;
self.animation.next_frame_start = self.chunks.get(&WebPRiffChunk::ANMF).unwrap().start - 8;
self.animation.dispose_next_frame = true;
Ok(())
}
pub fn set_lossy_upsampling(&mut self, upsampling_method: UpsamplingMethod) {
self.webp_decode_options.lossy_upsampling = upsampling_method;
}
}
fn garb_err(e: garb::SizeError) -> DecodeError {
DecodeError::InvalidParameter(alloc::format!("pixel conversion: {e}"))
}
pub(crate) fn read_fourcc(r: &mut SliceReader) -> Result<WebPRiffChunk, DecodeError> {
let mut chunk_fourcc = [0; 4];
r.read_exact(&mut chunk_fourcc)?;
Ok(WebPRiffChunk::from_fourcc(chunk_fourcc))
}
pub(crate) fn read_chunk_header(
r: &mut SliceReader,
) -> Result<(WebPRiffChunk, u64, u64), DecodeError> {
let chunk = read_fourcc(r)?;
let chunk_size = r.read_u32_le()?;
let chunk_size_rounded = chunk_size.saturating_add(chunk_size & 1);
Ok((chunk, chunk_size.into(), chunk_size_rounded.into()))
}
fn decode_native_internal(
data: &[u8],
options: &WebPDecodeOptions,
limits: &super::limits::Limits,
stop: Option<&dyn enough::Stop>,
) -> DecodeResult<(Vec<u8>, u32, u32, bool)> {
let mut decoder = WebPDecoder::new_with_options(data, options.clone())?;
decoder.set_limits(limits.clone());
decoder.set_stop(stop);
let (w, h) = decoder.dimensions();
let output_size = decoder
.output_buffer_size()
.ok_or_else(|| at!(DecodeError::ImageTooLarge))?;
let mut pixels = alloc::vec![0u8; output_size];
decoder.read_image(&mut pixels)?;
Ok((pixels, w, h, decoder.has_alpha()))
}
fn decode_to_rgba_internal(
data: &[u8],
options: &WebPDecodeOptions,
limits: &super::limits::Limits,
stop: Option<&dyn enough::Stop>,
) -> DecodeResult<(Vec<u8>, u32, u32)> {
let (native, w, h, has_alpha) = decode_native_internal(data, options, limits, stop)?;
if has_alpha {
Ok((native, w, h))
} else {
let pixel_count = (w as usize) * (h as usize);
let mut rgba = alloc::vec![0u8; pixel_count * 4];
garb::bytes::rgb_to_rgba(&native, &mut rgba).map_err(|e| at!(garb_err(e)))?;
Ok((rgba, w, h))
}
}
fn convert_to_output(
rgba: &[u8],
output: &mut [u8],
w: u32,
h: u32,
bpp: usize,
stride_pixels: Option<u32>,
convert_fn: impl FnOnce(&[u8], &mut [u8], usize, usize, usize, usize) -> Result<(), DecodeError>,
) -> DecodeResult<()> {
let wu = w as usize;
let hu = h as usize;
let stride_px = stride_pixels.unwrap_or(w) as usize;
if stride_px < wu {
return Err(at!(DecodeError::InvalidParameter(format!(
"stride_pixels {} < width {}",
stride_px, w
))));
}
let dst_stride = stride_px * bpp;
let required = dst_stride * hu;
if output.len() < required {
return Err(at!(DecodeError::InvalidParameter(format!(
"output buffer too small: got {}, need {}",
output.len(),
required
))));
}
let src_stride = wu * 4;
convert_fn(rgba, output, wu, hu, src_stride, dst_stride).map_err(|e| at!(e))
}
#[track_caller]
pub fn decode_rgba(data: &[u8]) -> DecodeResult<(Vec<u8>, u32, u32)> {
DecodeRequest::new(&DecodeConfig::default(), data).decode_rgba()
}
#[track_caller]
pub fn decode_rgb(data: &[u8]) -> DecodeResult<(Vec<u8>, u32, u32)> {
DecodeRequest::new(&DecodeConfig::default(), data).decode_rgb()
}
#[track_caller]
pub fn decode_rgba_into(
data: &[u8],
output: &mut [u8],
stride_pixels: u32,
) -> DecodeResult<(u32, u32)> {
DecodeRequest::new(&DecodeConfig::default(), data)
.stride(stride_pixels)
.decode_rgba_into(output)
}
#[track_caller]
pub fn decode_rgb_into(
data: &[u8],
output: &mut [u8],
stride_pixels: u32,
) -> DecodeResult<(u32, u32)> {
DecodeRequest::new(&DecodeConfig::default(), data)
.stride(stride_pixels)
.decode_rgb_into(output)
}
#[derive(Debug, Clone)]
pub struct ImageInfo {
pub width: u32,
pub height: u32,
pub has_alpha: bool,
pub is_lossy: bool,
pub has_animation: bool,
pub frame_count: u32,
pub format: BitstreamFormat,
pub orientation: Option<zenpixels::Orientation>,
pub icc_profile: Option<Vec<u8>>,
pub exif: Option<Vec<u8>>,
pub xmp: Option<Vec<u8>>,
}
impl ImageInfo {
pub const PROBE_BYTES: usize = 64;
#[track_caller]
pub fn from_bytes(data: &[u8]) -> DecodeResult<Self> {
Self::from_webp(data)
}
#[track_caller]
pub fn from_webp(data: &[u8]) -> DecodeResult<Self> {
let mut decoder = WebPDecoder::new(data)?;
let (width, height) = decoder.dimensions();
let is_lossy = decoder.is_lossy();
let is_animated = decoder.is_animated();
let frame_count = if is_animated { decoder.num_frames() } else { 1 };
let format = if is_lossy {
BitstreamFormat::Lossy
} else {
BitstreamFormat::Lossless
};
let icc_profile = decoder.icc_profile().unwrap_or(None);
let exif = decoder.exif_metadata().unwrap_or(None);
let xmp = decoder.xmp_metadata().unwrap_or(None);
let orientation = exif
.as_deref()
.and_then(crate::exif_orientation::parse_orientation)
.and_then(zenpixels::Orientation::from_exif);
Ok(Self {
width,
height,
has_alpha: decoder.has_alpha(),
is_lossy,
has_animation: is_animated,
frame_count,
format,
orientation,
icc_profile,
exif,
xmp,
})
}
#[must_use]
pub fn estimate_decode(&self, output_bpp: u8) -> crate::heuristics::DecodeEstimate {
if self.has_animation {
crate::heuristics::estimate_animation_decode(self.width, self.height, self.frame_count)
} else {
crate::heuristics::estimate_decode(self.width, self.height, output_bpp)
}
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)]
#[non_exhaustive]
pub enum BitstreamFormat {
#[default]
Lossy,
Lossless,
}
impl core::fmt::Display for BitstreamFormat {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
match self {
BitstreamFormat::Lossy => f.write_str("lossy"),
BitstreamFormat::Lossless => f.write_str("lossless"),
}
}
}
#[derive(Debug, Clone)]
pub struct YuvPlanes {
pub y: Vec<u8>,
pub u: Vec<u8>,
pub v: Vec<u8>,
pub y_width: u32,
pub y_height: u32,
pub uv_width: u32,
pub uv_height: u32,
}
#[track_caller]
pub fn decode_bgra(data: &[u8]) -> DecodeResult<(Vec<u8>, u32, u32)> {
let (mut rgba, w, h) = decode_rgba(data)?;
garb::bytes::rgba_to_bgra_inplace(&mut rgba).map_err(|e| at!(garb_err(e)))?;
Ok((rgba, w, h))
}
#[track_caller]
pub fn decode_bgr(data: &[u8]) -> DecodeResult<(Vec<u8>, u32, u32)> {
let (rgba, w, h) = decode_rgba(data)?;
let mut bgr = vec![0u8; (w * h * 3) as usize];
garb::bytes::rgba_to_bgr(&rgba, &mut bgr).map_err(|e| at!(garb_err(e)))?;
Ok((bgr, w, h))
}
#[track_caller]
pub fn decode_bgra_into(
data: &[u8],
output: &mut [u8],
stride_pixels: u32,
) -> DecodeResult<(u32, u32)> {
let (rgba, w, h) = decode_rgba(data)?;
convert_to_output(
&rgba,
output,
w,
h,
4,
Some(stride_pixels),
|src, dst, w, h, ss, ds| {
garb::bytes::rgba_to_bgra_strided(src, dst, w, h, ss, ds).map_err(garb_err)
},
)?;
Ok((w, h))
}
#[track_caller]
pub fn decode_argb(data: &[u8]) -> DecodeResult<(Vec<u8>, u32, u32)> {
let (mut rgba, w, h) = decode_rgba(data)?;
garb::bytes::rgba_to_argb_inplace(&mut rgba).map_err(|e| at!(garb_err(e)))?;
Ok((rgba, w, h))
}
#[track_caller]
pub fn decode_argb_into(
data: &[u8],
output: &mut [u8],
stride_pixels: u32,
) -> DecodeResult<(u32, u32)> {
let (rgba, w, h) = decode_rgba(data)?;
convert_to_output(
&rgba,
output,
w,
h,
4,
Some(stride_pixels),
|src, dst, w, h, ss, ds| {
garb::bytes::rgba_to_argb_strided(src, dst, w, h, ss, ds).map_err(garb_err)
},
)?;
Ok((w, h))
}
#[track_caller]
pub fn decode_bgr_into(
data: &[u8],
output: &mut [u8],
stride_pixels: u32,
) -> DecodeResult<(u32, u32)> {
let (rgba, w, h) = decode_rgba(data)?;
convert_to_output(
&rgba,
output,
w,
h,
3,
Some(stride_pixels),
|src, dst, w, h, ss, ds| {
garb::bytes::rgba_to_bgr_strided(src, dst, w, h, ss, ds).map_err(garb_err)
},
)?;
Ok((w, h))
}
#[track_caller]
pub fn decode_yuv420(data: &[u8]) -> DecodeResult<YuvPlanes> {
let decoder = WebPDecoder::new(data)?;
if decoder.is_lossy() && !decoder.is_animated() {
if let Some(range) = decoder.chunks.get(&WebPRiffChunk::VP8) {
let data_slice = decoder.chunk_slice(range)?;
let mut ctx = super::vp8v2::DecoderContext::new();
let frame = ctx.decode_to_frame(data_slice)?;
let w = u32::from(frame.width);
let h = u32::from(frame.height);
let uv_w = w.div_ceil(2);
let uv_h = h.div_ceil(2);
let buffer_width = {
let diff = w % 16;
if diff > 0 {
(w + 16 - diff) as usize
} else {
w as usize
}
};
let chroma_bw = buffer_width / 2;
let mut y = Vec::with_capacity((w * h) as usize);
for row in 0..h as usize {
y.extend_from_slice(
&frame.ybuf[row * buffer_width..row * buffer_width + w as usize],
);
}
let mut u = Vec::with_capacity((uv_w * uv_h) as usize);
let mut v = Vec::with_capacity((uv_w * uv_h) as usize);
for row in 0..uv_h as usize {
u.extend_from_slice(&frame.ubuf[row * chroma_bw..row * chroma_bw + uv_w as usize]);
v.extend_from_slice(&frame.vbuf[row * chroma_bw..row * chroma_bw + uv_w as usize]);
}
return Ok(YuvPlanes {
y,
u,
v,
y_width: w,
y_height: h,
uv_width: uv_w,
uv_height: uv_h,
});
}
}
let (rgba, w, h) = decode_rgba(data)?;
let (y_bytes, u_bytes, v_bytes) =
super::yuv::convert_image_yuv::<4>(&rgba, w as u16, h as u16, w as usize);
let uv_w = w.div_ceil(2);
let uv_h = h.div_ceil(2);
let mb_width = (w as usize).div_ceil(16);
let luma_width = 16 * mb_width;
let chroma_width = 8 * mb_width;
let mut y = Vec::with_capacity((w * h) as usize);
for row in 0..h as usize {
y.extend_from_slice(&y_bytes[row * luma_width..row * luma_width + w as usize]);
}
let mut u = Vec::with_capacity((uv_w * uv_h) as usize);
let mut v = Vec::with_capacity((uv_w * uv_h) as usize);
for row in 0..uv_h as usize {
u.extend_from_slice(&u_bytes[row * chroma_width..row * chroma_width + uv_w as usize]);
v.extend_from_slice(&v_bytes[row * chroma_width..row * chroma_width + uv_w as usize]);
}
Ok(YuvPlanes {
y,
u,
v,
y_width: w,
y_height: h,
uv_width: uv_w,
uv_height: uv_h,
})
}
#[track_caller]
pub fn decode_rgba_premultiplied(data: &[u8]) -> DecodeResult<(Vec<u8>, u32, u32)> {
let (mut pixels, w, h) = decode_rgba(data)?;
garb::bytes::premultiply_alpha_rgba_u8(&mut pixels).map_err(|e| at!(garb_err(e)))?;
Ok((pixels, w, h))
}
#[track_caller]
pub fn decode_bgra_premultiplied(data: &[u8]) -> DecodeResult<(Vec<u8>, u32, u32)> {
let (mut pixels, w, h) = decode_bgra(data)?;
garb::bytes::premultiply_alpha_bgra_u8(&mut pixels).map_err(|e| at!(garb_err(e)))?;
Ok((pixels, w, h))
}
#[track_caller]
pub fn decode_argb_premultiplied(data: &[u8]) -> DecodeResult<(Vec<u8>, u32, u32)> {
let (mut pixels, w, h) = decode_rgba_premultiplied(data)?;
garb::bytes::rgba_to_argb_inplace(&mut pixels).map_err(|e| at!(garb_err(e)))?;
Ok((pixels, w, h))
}
#[track_caller]
pub fn decode_rgb565(data: &[u8]) -> DecodeResult<(Vec<u8>, u32, u32)> {
let (rgba, w, h) = decode_rgba(data)?;
let mut out = vec![0u8; (w * h * 2) as usize];
garb::bytes::rgba_to_rgb565(&rgba, &mut out).map_err(|e| at!(garb_err(e)))?;
Ok((out, w, h))
}
#[track_caller]
pub fn decode_rgba4444(data: &[u8]) -> DecodeResult<(Vec<u8>, u32, u32)> {
let (rgba, w, h) = decode_rgba(data)?;
let mut out = vec![0u8; (w * h * 2) as usize];
garb::bytes::rgba_to_rgba4444(&rgba, &mut out).map_err(|e| at!(garb_err(e)))?;
Ok((out, w, h))
}
#[cfg(test)]
mod tests {
use super::*;
const RGB_BPP: usize = 3;
#[test]
fn add_with_overflow_size() {
let bytes = vec![
0x52, 0x49, 0x46, 0x46, 0xaf, 0x37, 0x80, 0x47, 0x57, 0x45, 0x42, 0x50, 0x6c, 0x64,
0x00, 0x00, 0xff, 0xff, 0xff, 0xff, 0xfb, 0x7e, 0x73, 0x00, 0x06, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x05, 0x00, 0x00, 0x00, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65,
0x40, 0xfb, 0xff, 0xff, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65,
0x00, 0x00, 0x00, 0x00, 0x62, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x49,
0x49, 0x54, 0x55, 0x50, 0x4c, 0x54, 0x59, 0x50, 0x45, 0x33, 0x37, 0x44, 0x4d, 0x46,
];
let _ = WebPDecoder::new(&bytes);
}
#[test]
fn decode_2x2_single_color_image() {
const NUM_PIXELS: usize = 2 * 2 * RGB_BPP;
let bytes = [
0x52, 0x49, 0x46, 0x46, 0x3c, 0x00, 0x00, 0x00, 0x57, 0x45, 0x42, 0x50, 0x56, 0x50,
0x38, 0x20, 0x30, 0x00, 0x00, 0x00, 0xd0, 0x01, 0x00, 0x9d, 0x01, 0x2a, 0x02, 0x00,
0x02, 0x00, 0x02, 0x00, 0x34, 0x25, 0xa0, 0x02, 0x74, 0xba, 0x01, 0xf8, 0x00, 0x03,
0xb0, 0x00, 0xfe, 0xf0, 0xc4, 0x0b, 0xff, 0x20, 0xb9, 0x61, 0x75, 0xc8, 0xd7, 0xff,
0x20, 0x3f, 0xe4, 0x07, 0xfc, 0x80, 0xff, 0xf8, 0xf2, 0x00, 0x00, 0x00,
];
let mut data = [0; NUM_PIXELS];
let mut decoder = WebPDecoder::new(&bytes).unwrap();
decoder.read_image(&mut data).unwrap();
let first_pixel = &data[..RGB_BPP];
for (i, ch) in data.chunks_exact(3).enumerate() {
for c in 0..3 {
let diff = (ch[c] as i16 - first_pixel[c] as i16).unsigned_abs();
assert!(
diff <= 1,
"pixel {i} channel {c}: got {} expected {} (diff {diff})",
ch[c],
first_pixel[c]
);
}
}
}
#[test]
fn decode_3x3_single_color_image() {
const NUM_PIXELS: usize = 3 * 3 * RGB_BPP;
let bytes = [
0x52, 0x49, 0x46, 0x46, 0x3c, 0x00, 0x00, 0x00, 0x57, 0x45, 0x42, 0x50, 0x56, 0x50,
0x38, 0x20, 0x30, 0x00, 0x00, 0x00, 0xd0, 0x01, 0x00, 0x9d, 0x01, 0x2a, 0x03, 0x00,
0x03, 0x00, 0x02, 0x00, 0x34, 0x25, 0xa0, 0x02, 0x74, 0xba, 0x01, 0xf8, 0x00, 0x03,
0xb0, 0x00, 0xfe, 0xf0, 0xc4, 0x0b, 0xff, 0x20, 0xb9, 0x61, 0x75, 0xc8, 0xd7, 0xff,
0x20, 0x3f, 0xe4, 0x07, 0xfc, 0x80, 0xff, 0xf8, 0xf2, 0x00, 0x00, 0x00,
];
let mut data = [0; NUM_PIXELS];
let mut decoder = WebPDecoder::new(&bytes).unwrap();
decoder.read_image(&mut data).unwrap();
let first_pixel = &data[..RGB_BPP];
for (i, ch) in data.chunks_exact(3).enumerate() {
for c in 0..3 {
let diff = (ch[c] as i16 - first_pixel[c] as i16).unsigned_abs();
assert!(
diff <= 1,
"pixel {i} channel {c}: got {} expected {} (diff {diff})",
ch[c],
first_pixel[c]
);
}
}
}
}