Skip to main content

webp_rust/
compat.rs

1//! Compatibility wrapper for callback-based decode flows used by `wml2`.
2//!
3//! This module intentionally mirrors the shape of the `wml2` draw-side API so
4//! the WebP codec core can stay in `webp-rust` while callers keep a thin
5//! adapter around their own image buffer and metadata model.
6
7use crate::decoder::lossless::decode_lossless_webp_to_rgba;
8use crate::decoder::lossy::{decode_lossy_vp8_frame_to_rgba, decode_lossy_webp_to_rgba};
9use crate::decoder::{
10    get_features, parse_animation_webp, DecodedImage, DecoderError, ParsedAnimationFrame,
11    WebpFormat,
12};
13use bin_rs::io::read_u32_le;
14use bin_rs::reader::BinaryReader;
15use std::collections::HashMap;
16
17type Error = Box<dyn std::error::Error>;
18
19/// Metadata map used by the compatibility wrapper.
20pub type Metadata = HashMap<String, DataMap>;
21
22/// Minimal metadata value type used by the compatibility wrapper.
23#[derive(Debug, Clone, PartialEq, Eq)]
24pub enum DataMap {
25    UInt(u64),
26    UIntAllay(Vec<u64>),
27    Raw(Vec<u8>),
28    Ascii(String),
29    None,
30}
31
32/// RGBA color value used by animation initialization.
33#[derive(Debug, Clone, Copy, PartialEq, Eq)]
34pub struct RGBA {
35    pub red: u8,
36    pub green: u8,
37    pub blue: u8,
38    pub alpha: u8,
39}
40
41/// Callback response command.
42#[derive(Debug, Clone, Copy, PartialEq, Eq)]
43pub enum ResponseCommand {
44    Abort,
45    Continue,
46}
47
48/// Response returned by compatibility callbacks.
49#[derive(Debug, Clone, Copy, PartialEq, Eq)]
50pub struct CallbackResponse {
51    pub response: ResponseCommand,
52}
53
54impl CallbackResponse {
55    pub fn abort() -> Self {
56        Self {
57            response: ResponseCommand::Abort,
58        }
59    }
60
61    pub fn cont() -> Self {
62        Self {
63            response: ResponseCommand::Continue,
64        }
65    }
66}
67
68/// Receives decoded image data from compatibility decode entry points.
69pub trait DrawCallback: Sync + Send {
70    fn init(
71        &mut self,
72        width: usize,
73        height: usize,
74        option: Option<InitOptions>,
75    ) -> Result<Option<CallbackResponse>, Error>;
76    fn draw(
77        &mut self,
78        start_x: usize,
79        start_y: usize,
80        width: usize,
81        height: usize,
82        data: &[u8],
83        option: Option<DrawOptions>,
84    ) -> Result<Option<CallbackResponse>, Error>;
85    fn terminate(
86        &mut self,
87        term: Option<TerminateOptions>,
88    ) -> Result<Option<CallbackResponse>, Error>;
89    fn next(&mut self, next: Option<NextOptions>) -> Result<Option<CallbackResponse>, Error>;
90    fn verbose(
91        &mut self,
92        verbose: &str,
93        option: Option<VerboseOptions>,
94    ) -> Result<Option<CallbackResponse>, Error>;
95    fn set_metadata(
96        &mut self,
97        key: &str,
98        value: DataMap,
99    ) -> Result<Option<CallbackResponse>, Error>;
100}
101
102/// Decoder initialization options.
103#[derive(Debug, Clone, PartialEq, Eq)]
104pub struct InitOptions {
105    pub loop_count: u32,
106    pub background: Option<RGBA>,
107    pub animation: bool,
108}
109
110/// Draw options placeholder kept for shape compatibility.
111#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)]
112pub struct DrawOptions {}
113
114/// Termination options placeholder kept for shape compatibility.
115#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)]
116pub struct TerminateOptions {}
117
118/// Verbose options placeholder kept for shape compatibility.
119#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)]
120pub struct VerboseOptions {}
121
122/// Frame transition commands.
123#[derive(Debug, Clone, Copy, PartialEq, Eq)]
124pub enum NextOption {
125    Continue,
126    Next,
127    Dispose,
128    ClearAbort,
129    Terminate,
130}
131
132/// Disposal mode for an animation frame.
133#[derive(Debug, Clone, Copy, PartialEq, Eq)]
134pub enum NextDispose {
135    None,
136    Override,
137    Background,
138    Previous,
139}
140
141/// Blend mode for an animation frame.
142#[derive(Debug, Clone, Copy, PartialEq, Eq)]
143pub enum NextBlend {
144    Source,
145    Override,
146}
147
148/// Destination rectangle for a frame.
149#[derive(Debug, Clone, PartialEq, Eq)]
150pub struct ImageRect {
151    pub start_x: i32,
152    pub start_y: i32,
153    pub width: usize,
154    pub height: usize,
155}
156
157/// Per-frame transition options.
158#[derive(Debug, Clone, PartialEq, Eq)]
159pub struct NextOptions {
160    pub flag: NextOption,
161    pub await_time: u64,
162    pub image_rect: Option<ImageRect>,
163    pub dispose_option: Option<NextDispose>,
164    pub blend: Option<NextBlend>,
165}
166
167/// Decoder call options.
168pub struct DecodeOptions<'a> {
169    pub debug_flag: usize,
170    pub drawer: &'a mut dyn DrawCallback,
171    pub options: Option<Metadata>,
172}
173
174impl<'a> DecodeOptions<'a> {
175    pub fn new(drawer: &'a mut dyn DrawCallback) -> Self {
176        Self {
177            debug_flag: 0,
178            drawer,
179            options: None,
180        }
181    }
182}
183
184fn argb_to_rgba(argb: u32) -> RGBA {
185    RGBA {
186        red: ((argb >> 16) & 0xff) as u8,
187        green: ((argb >> 8) & 0xff) as u8,
188        blue: (argb & 0xff) as u8,
189        alpha: (argb >> 24) as u8,
190    }
191}
192
193fn map_error(error: DecoderError) -> Error {
194    Box::new(error)
195}
196
197fn read_container<B: BinaryReader>(reader: &mut B) -> Result<Vec<u8>, Error> {
198    let header = reader.read_bytes_no_move(12)?;
199    if header.len() < 12 || &header[0..4] != b"RIFF" || &header[8..12] != b"WEBP" {
200        return Err(Box::new(std::io::Error::new(
201            std::io::ErrorKind::InvalidData,
202            "not a WebP RIFF container",
203        )));
204    }
205
206    let riff_size = read_u32_le(&header, 4) as usize;
207    let total_size = riff_size + 8;
208    if total_size < 12 {
209        return Err(Box::new(std::io::Error::new(
210            std::io::ErrorKind::InvalidData,
211            "invalid WebP container length",
212        )));
213    }
214
215    Ok(reader.read_bytes_as_vec(total_size)?)
216}
217
218fn next_options(frame: &ParsedAnimationFrame<'_>) -> NextOptions {
219    NextOptions {
220        flag: NextOption::Continue,
221        await_time: frame.duration as u64,
222        image_rect: Some(ImageRect {
223            start_x: frame.x_offset as i32,
224            start_y: frame.y_offset as i32,
225            width: frame.width,
226            height: frame.height,
227        }),
228        dispose_option: Some(if frame.dispose_to_background {
229            NextDispose::Background
230        } else {
231            NextDispose::None
232        }),
233        blend: Some(if frame.blend {
234            NextBlend::Source
235        } else {
236            NextBlend::Override
237        }),
238    }
239}
240
241fn decode_frame_rgba(frame: &ParsedAnimationFrame<'_>) -> Result<DecodedImage, DecoderError> {
242    let image = match &frame.image_chunk.fourcc {
243        b"VP8L" => {
244            if frame.alpha_chunk.is_some() {
245                return Err(DecoderError::Bitstream(
246                    "VP8L animation frame must not carry ALPH chunk",
247                ));
248            }
249            crate::decoder::decode_lossless_vp8l_to_rgba(frame.image_data)?
250        }
251        b"VP8 " => decode_lossy_vp8_frame_to_rgba(frame.image_data, frame.alpha_data)?,
252        _ => return Err(DecoderError::Bitstream("unsupported animation frame chunk")),
253    };
254
255    if image.width != frame.width || image.height != frame.height {
256        return Err(DecoderError::Bitstream(
257            "animation frame dimensions do not match bitstream",
258        ));
259    }
260    Ok(image)
261}
262
263fn read_le32(bytes: &[u8]) -> usize {
264    u32::from_le_bytes([bytes[0], bytes[1], bytes[2], bytes[3]]) as usize
265}
266
267fn scan_chunks<'a>(data: &'a [u8]) -> Result<Vec<([u8; 4], &'a [u8])>, DecoderError> {
268    if data.len() < 12 {
269        return Err(DecoderError::NotEnoughData("RIFF header"));
270    }
271    if &data[0..4] != b"RIFF" || &data[8..12] != b"WEBP" {
272        return Err(DecoderError::Bitstream("wrong RIFF WEBP signature"));
273    }
274
275    let riff_size = read_le32(&data[4..8]);
276    let limit = riff_size + 8;
277    if limit > data.len() {
278        return Err(DecoderError::NotEnoughData("truncated RIFF payload"));
279    }
280
281    let mut offset = 12;
282    let mut chunks = Vec::new();
283    while offset + 8 <= limit {
284        let size = read_le32(&data[offset + 4..offset + 8]);
285        let padded_size = size + (size & 1);
286        let chunk_end = offset + 8 + padded_size;
287        if chunk_end > limit {
288            return Err(DecoderError::NotEnoughData("chunk payload"));
289        }
290
291        let fourcc: [u8; 4] = data[offset..offset + 4]
292            .try_into()
293            .expect("valid fourcc slice");
294        let payload = &data[offset + 8..offset + 8 + size];
295        chunks.push((fourcc, payload));
296        offset = chunk_end;
297    }
298
299    Ok(chunks)
300}
301
302fn webp_codec_name(format: WebpFormat, animated: bool) -> &'static str {
303    if animated {
304        "Animated"
305    } else {
306        match format {
307            WebpFormat::Lossy => "Lossy",
308            WebpFormat::Lossless => "Lossless",
309            WebpFormat::Undefined => "Undefined",
310        }
311    }
312}
313
314fn make_metadata(data: &[u8]) -> Result<Metadata, DecoderError> {
315    let features = get_features(data)?;
316    let chunks = scan_chunks(data)?;
317    let mut map = HashMap::new();
318
319    map.insert("Format".to_string(), DataMap::Ascii("WEBP".to_string()));
320    map.insert("width".to_string(), DataMap::UInt(features.width as u64));
321    map.insert("height".to_string(), DataMap::UInt(features.height as u64));
322    map.insert(
323        "WebP codec".to_string(),
324        DataMap::Ascii(webp_codec_name(features.format, features.has_animation).to_string()),
325    );
326    map.insert(
327        "WebP has alpha".to_string(),
328        DataMap::Ascii(features.has_alpha.to_string()),
329    );
330    map.insert(
331        "WebP animated".to_string(),
332        DataMap::Ascii(features.has_animation.to_string()),
333    );
334
335    if let Some(vp8x) = features.vp8x {
336        map.insert(
337            "canvas width".to_string(),
338            DataMap::UInt(vp8x.canvas_width as u64),
339        );
340        map.insert(
341            "canvas height".to_string(),
342            DataMap::UInt(vp8x.canvas_height as u64),
343        );
344    }
345
346    if features.has_animation {
347        let parsed = parse_animation_webp(data)?;
348        map.insert(
349            "Animation frames".to_string(),
350            DataMap::UInt(parsed.frames.len() as u64),
351        );
352        map.insert(
353            "Animation loop count".to_string(),
354            DataMap::UInt(parsed.animation.loop_count as u64),
355        );
356        map.insert(
357            "Animation background color".to_string(),
358            DataMap::UInt(parsed.animation.background_color as u64),
359        );
360        map.insert(
361            "Animation frame durations".to_string(),
362            DataMap::UIntAllay(
363                parsed
364                    .frames
365                    .iter()
366                    .map(|frame| frame.duration as u64)
367                    .collect(),
368            ),
369        );
370    }
371
372    for (fourcc, payload) in chunks {
373        match &fourcc {
374            b"ICCP" => {
375                map.insert("ICC Profile".to_string(), DataMap::Raw(payload.to_vec()));
376            }
377            b"EXIF" => {
378                map.insert("EXIF Raw".to_string(), DataMap::Raw(payload.to_vec()));
379            }
380            b"XMP " => match String::from_utf8(payload.to_vec()) {
381                Ok(xmp) => {
382                    map.insert("XMP".to_string(), DataMap::Ascii(xmp));
383                }
384                Err(_) => {
385                    map.insert("XMP Raw".to_string(), DataMap::Raw(payload.to_vec()));
386                }
387            },
388            _ => {}
389        }
390    }
391
392    Ok(map)
393}
394
395/// Decodes a WebP image using a callback-based interface compatible with
396/// `wml2`'s draw-side flow.
397pub fn decode<B: BinaryReader>(
398    reader: &mut B,
399    option: &mut DecodeOptions<'_>,
400) -> Result<(), Error> {
401    let data = read_container(reader)?;
402    let metadata = make_metadata(&data).map_err(map_error)?;
403    let features = get_features(&data).map_err(map_error)?;
404
405    if features.has_animation {
406        let parsed = parse_animation_webp(&data).map_err(map_error)?;
407        let init = InitOptions {
408            loop_count: parsed.animation.loop_count as u32,
409            background: Some(argb_to_rgba(parsed.animation.background_color)),
410            animation: true,
411        };
412        option
413            .drawer
414            .init(parsed.features.width, parsed.features.height, Some(init))?;
415
416        let mut allow_multi_image = false;
417        for (index, frame) in parsed.frames.iter().enumerate() {
418            let decoded = decode_frame_rgba(frame).map_err(map_error)?;
419            if index == 0 {
420                option.drawer.draw(
421                    frame.x_offset,
422                    frame.y_offset,
423                    frame.width,
424                    frame.height,
425                    &decoded.rgba,
426                    None,
427                )?;
428
429                let result = option.drawer.next(Some(next_options(frame)))?;
430                if let Some(response) = result {
431                    if response.response == ResponseCommand::Continue {
432                        allow_multi_image = true;
433                        option
434                            .drawer
435                            .draw(0, 0, frame.width, frame.height, &decoded.rgba, None)?;
436                    }
437                }
438                continue;
439            }
440
441            if !allow_multi_image {
442                continue;
443            }
444
445            let result = option.drawer.next(Some(next_options(frame)))?;
446            if let Some(response) = result {
447                if response.response == ResponseCommand::Abort {
448                    break;
449                }
450            }
451
452            option
453                .drawer
454                .draw(0, 0, frame.width, frame.height, &decoded.rgba, None)?;
455        }
456    } else {
457        let init = InitOptions {
458            loop_count: 0,
459            background: None,
460            animation: false,
461        };
462        option
463            .drawer
464            .init(features.width, features.height, Some(init))?;
465
466        let decoded = match features.format {
467            WebpFormat::Lossy => decode_lossy_webp_to_rgba(&data).map_err(map_error)?,
468            WebpFormat::Lossless => decode_lossless_webp_to_rgba(&data).map_err(map_error)?,
469            WebpFormat::Undefined => {
470                return Err(Box::new(std::io::Error::new(
471                    std::io::ErrorKind::Unsupported,
472                    "unsupported WebP format",
473                )));
474            }
475        };
476
477        option
478            .drawer
479            .draw(0, 0, decoded.width, decoded.height, &decoded.rgba, None)?;
480    }
481
482    for (key, value) in metadata {
483        option.drawer.set_metadata(&key, value)?;
484    }
485    option.drawer.terminate(None)?;
486
487    Ok(())
488}