jxl_frame/
lib.rs

1//! This crate provides types related to JPEG XL frames.
2//!
3//! A JPEG XL image contains one or more frames. A frame represents single unit of image that can
4//! be displayed or referenced by other frames.
5//!
6//! A frame consists of a few components:
7//! - [Frame header][FrameHeader].
8//! - [Table of contents (TOC)][data::Toc].
9//! - Actual frame data, in the following order, potentially permuted as specified in the TOC:
10//!   - one [`LfGlobal`],
11//!   - [`num_lf_groups`] [`LfGroup`]'s, in raster order,
12//!   - one [`HfGlobal`], potentially empty for Modular frames, and
13//!   - [`num_passes`] times [`num_groups`] [pass groups][data::decode_pass_group], in raster
14//!     order.
15//!
16//! [`num_lf_groups`]: FrameHeader::num_lf_groups
17//! [`num_groups`]: FrameHeader::num_groups
18//! [`num_passes`]: header::Passes::num_passes
19use std::collections::BTreeMap;
20use std::sync::Arc;
21use std::sync::atomic::{AtomicUsize, Ordering};
22
23use jxl_bitstream::Bitstream;
24use jxl_grid::{AllocHandle, AllocTracker};
25use jxl_image::ImageHeader;
26use jxl_oxide_common::Bundle;
27
28pub mod data;
29mod error;
30pub mod filter;
31pub mod header;
32
33pub use error::{Error, Result};
34pub use header::FrameHeader;
35use jxl_modular::Sample;
36use jxl_modular::{MaConfig, image::TransformedModularSubimage};
37use jxl_threadpool::JxlThreadPool;
38
39use crate::data::*;
40
41/// JPEG XL frame.
42///
43/// A frame represents a single unit of image that can be displayed or referenced by other frames.
44#[derive(Debug)]
45pub struct Frame {
46    pool: JxlThreadPool,
47    tracker: Option<AllocTracker>,
48    image_header: Arc<ImageHeader>,
49    header: FrameHeader,
50    toc: Toc,
51    data: Vec<GroupData>,
52    all_group_offsets: AllGroupOffsets,
53    reading_data_index: usize,
54    pass_shifts: BTreeMap<u32, (i32, i32)>,
55}
56
57#[derive(Debug, Default)]
58struct AllGroupOffsets {
59    lf_group: AtomicUsize,
60    hf_global: AtomicUsize,
61    pass_group: AtomicUsize,
62    has_error: AtomicUsize,
63}
64
65#[derive(Debug)]
66struct GroupData {
67    toc_group: TocGroup,
68    bytes: Vec<u8>,
69    handle: Option<AllocHandle>,
70}
71
72impl From<TocGroup> for GroupData {
73    fn from(value: TocGroup) -> Self {
74        Self {
75            toc_group: value,
76            bytes: Vec::new(),
77            handle: None,
78        }
79    }
80}
81
82impl GroupData {
83    fn ensure_allocated(&mut self, tracker: Option<&AllocTracker>) -> Result<()> {
84        if let Some(tracker) = tracker {
85            if self.handle.is_some() {
86                return Ok(());
87            }
88
89            let size = self.toc_group.size as usize;
90            let handle = tracker.alloc::<u8>(size)?;
91            self.bytes.try_reserve(size)?;
92            self.handle = Some(handle);
93        } else {
94            let additional = (self.toc_group.size as usize).saturating_sub(self.bytes.capacity());
95            self.bytes.try_reserve(additional)?;
96        }
97
98        Ok(())
99    }
100}
101
102#[derive(Debug, Clone)]
103pub struct FrameContext<'a> {
104    pub image_header: Arc<ImageHeader>,
105    pub tracker: Option<&'a AllocTracker>,
106    pub pool: JxlThreadPool,
107}
108
109impl Bundle<FrameContext<'_>> for Frame {
110    type Error = crate::Error;
111
112    fn parse(bitstream: &mut Bitstream, ctx: FrameContext) -> Result<Self> {
113        let FrameContext {
114            image_header,
115            tracker,
116            pool,
117        } = ctx;
118        let tracker = tracker.cloned();
119
120        bitstream.zero_pad_to_byte()?;
121        let base_offset = bitstream.num_read_bits() / 8;
122        let header = FrameHeader::parse(bitstream, &image_header)?;
123
124        let width = header.width as u64;
125        let height = header.height as u64;
126        if width > (1 << 30) {
127            tracing::error!(width, "Frame width too large; limit is 2^30");
128            return Err(jxl_bitstream::Error::ProfileConformance("frame width too large").into());
129        }
130        if height > (1 << 30) {
131            tracing::error!(width, "Frame height too large; limit is 2^30");
132            return Err(jxl_bitstream::Error::ProfileConformance("frame height too large").into());
133        }
134        if (width * height) > (1 << 40) {
135            tracing::error!(
136                area = width * height,
137                "Frame area (width * height) too large; limit is 2^40"
138            );
139            return Err(jxl_bitstream::Error::ProfileConformance("frame area too large").into());
140        }
141
142        let has_extra = !header.ec_blending_info.is_empty();
143        for blending_info in std::iter::once(&header.blending_info).chain(&header.ec_blending_info)
144        {
145            if blending_info.mode.use_alpha() && has_extra {
146                let alpha_idx = blending_info.alpha_channel as usize;
147                let Some(alpha_ec_info) = image_header.metadata.ec_info.get(alpha_idx) else {
148                    tracing::error!(?blending_info, "blending_info.alpha_channel out of range");
149                    return Err(jxl_bitstream::Error::ValidationFailed(
150                        "blending_info.alpha_channel out of range",
151                    )
152                    .into());
153                };
154                if !alpha_ec_info.is_alpha() {
155                    tracing::error!(
156                        ?blending_info,
157                        ?alpha_ec_info,
158                        "blending_info.alpha_channel is not the type of Alpha",
159                    );
160                    return Err(jxl_bitstream::Error::ValidationFailed(
161                        "blending_info.alpha_channel is not the type of Alpha",
162                    )
163                    .into());
164                }
165            }
166        }
167
168        if header.flags.use_lf_frame() && header.lf_level >= 4 {
169            return Err(jxl_bitstream::Error::ValidationFailed("lf_level out of range").into());
170        }
171
172        let color_upsampling_shift = header.upsampling.trailing_zeros();
173        for (ec_upsampling, ec_info) in header
174            .ec_upsampling
175            .iter()
176            .zip(image_header.metadata.ec_info.iter())
177        {
178            let ec_upsampling_shift = ec_upsampling.trailing_zeros();
179            let dim_shift = ec_info.dim_shift;
180
181            if ec_upsampling_shift + dim_shift < color_upsampling_shift {
182                return Err(jxl_bitstream::Error::ValidationFailed(
183                    "EC upsampling < color upsampling, which is invalid",
184                )
185                .into());
186            }
187
188            if ec_upsampling_shift + dim_shift > 6 {
189                tracing::error!(
190                    ec_upsampling,
191                    dim_shift = ec_info.dim_shift,
192                    "Cumulative EC upsampling factor is too large"
193                );
194                return Err(jxl_bitstream::Error::ValidationFailed(
195                    "cumulative EC upsampling factor is too large",
196                )
197                .into());
198            }
199
200            let actual_dim_shift = ec_upsampling_shift + dim_shift - color_upsampling_shift;
201
202            if actual_dim_shift > 7 + header.group_size_shift {
203                return Err(jxl_bitstream::Error::ValidationFailed("dim_shift too large").into());
204            }
205        }
206
207        if header.width == 0 || header.height == 0 {
208            return Err(jxl_bitstream::Error::ValidationFailed(
209                "Invalid crop dimensions for frame: zero width or height",
210            )
211            .into());
212        }
213
214        let mut toc = Toc::parse(bitstream, &header)?;
215        toc.adjust_offsets(base_offset);
216        let data = toc.iter_bitstream_order().map(GroupData::from).collect();
217
218        let passes = &header.passes;
219        let mut pass_shifts = BTreeMap::new();
220        let mut maxshift = 3i32;
221        for (&downsample, &last_pass) in passes.downsample.iter().zip(&passes.last_pass) {
222            let minshift = downsample.trailing_zeros() as i32;
223            pass_shifts.insert(last_pass, (minshift, maxshift));
224            maxshift = minshift;
225        }
226        pass_shifts.insert(header.passes.num_passes - 1, (0i32, maxshift));
227
228        Ok(Self {
229            pool,
230            tracker,
231            image_header,
232            header,
233            toc,
234            data,
235            all_group_offsets: AllGroupOffsets::default(),
236            reading_data_index: 0,
237            pass_shifts,
238        })
239    }
240}
241
242impl Frame {
243    #[inline]
244    pub fn alloc_tracker(&self) -> Option<&AllocTracker> {
245        self.tracker.as_ref()
246    }
247
248    pub fn image_header(&self) -> &ImageHeader {
249        &self.image_header
250    }
251
252    pub fn clone_image_header(&self) -> Arc<ImageHeader> {
253        Arc::clone(&self.image_header)
254    }
255
256    /// Returns the frame header.
257    pub fn header(&self) -> &FrameHeader {
258        &self.header
259    }
260
261    /// Returns the TOC.
262    ///
263    /// See the documentation of [`Toc`] for details.
264    pub fn toc(&self) -> &Toc {
265        &self.toc
266    }
267
268    pub fn pass_shifts(&self) -> &BTreeMap<u32, (i32, i32)> {
269        &self.pass_shifts
270    }
271
272    pub fn data(&self, group: TocGroupKind) -> Option<&[u8]> {
273        let idx = self.toc.group_index_bitstream_order(group);
274        self.data.get(idx).map(|b| &*b.bytes)
275    }
276}
277
278impl Frame {
279    pub fn feed_bytes<'buf>(&mut self, mut buf: &'buf [u8]) -> Result<&'buf [u8]> {
280        while let Some(group_data) = self.data.get_mut(self.reading_data_index) {
281            group_data.ensure_allocated(self.tracker.as_ref())?;
282            let bytes_left = group_data.toc_group.size as usize - group_data.bytes.len();
283            if buf.len() < bytes_left {
284                group_data.bytes.extend_from_slice(buf);
285                return Ok(&[]);
286            }
287            let (l, r) = buf.split_at(bytes_left);
288            group_data.bytes.extend_from_slice(l);
289            buf = r;
290            self.reading_data_index += 1;
291        }
292        Ok(buf)
293    }
294
295    #[inline]
296    pub fn current_loading_group(&self) -> Option<TocGroup> {
297        self.toc.iter_bitstream_order().nth(self.reading_data_index)
298    }
299
300    #[inline]
301    pub fn is_loading_done(&self) -> bool {
302        self.reading_data_index >= self.data.len()
303    }
304}
305
306impl Frame {
307    pub fn try_parse_lf_global<S: Sample>(&self) -> Option<Result<LfGlobal<S>>> {
308        Some(if self.toc.is_single_entry() {
309            if self.all_group_offsets.has_error.load(Ordering::Relaxed) != 0 {
310                return Some(Err(Error::HadError));
311            }
312
313            let group = self.data.first()?;
314            let loaded = self.reading_data_index != 0;
315            let mut bitstream = Bitstream::new(&group.bytes);
316            let lf_global = LfGlobal::parse(
317                &mut bitstream,
318                LfGlobalParams::new(
319                    &self.image_header,
320                    &self.header,
321                    self.tracker.as_ref(),
322                    !loaded,
323                ),
324            );
325            match lf_global {
326                Ok(lf_global) => {
327                    self.all_group_offsets
328                        .lf_group
329                        .store(bitstream.num_read_bits(), Ordering::Relaxed);
330                    Ok(lf_global)
331                }
332                Err(e) if !loaded && e.unexpected_eof() => Err(e),
333                Err(e) => {
334                    self.all_group_offsets.has_error.store(1, Ordering::Relaxed);
335                    Err(e)
336                }
337            }
338        } else {
339            let idx = self.toc.group_index_bitstream_order(TocGroupKind::LfGlobal);
340            let group = self.data.get(idx)?;
341            let allow_partial = group.bytes.len() < group.toc_group.size as usize;
342
343            let mut bitstream = Bitstream::new(&group.bytes);
344            LfGlobal::parse(
345                &mut bitstream,
346                LfGlobalParams::new(
347                    &self.image_header,
348                    &self.header,
349                    self.tracker.as_ref(),
350                    allow_partial,
351                ),
352            )
353        })
354    }
355
356    pub fn try_parse_lf_group<S: Sample>(
357        &self,
358        lf_global_vardct: Option<&LfGlobalVarDct>,
359        global_ma_config: Option<&MaConfig>,
360        mlf_group: Option<TransformedModularSubimage<S>>,
361        lf_group_idx: u32,
362    ) -> Option<Result<LfGroup<S>>> {
363        if self.toc.is_single_entry() {
364            if self.all_group_offsets.has_error.load(Ordering::Relaxed) != 0 {
365                return Some(Err(Error::HadError));
366            }
367
368            if lf_group_idx != 0 {
369                return None;
370            }
371
372            let group = self.data.first()?;
373            let loaded = self.reading_data_index != 0;
374            let mut bitstream = Bitstream::new(&group.bytes);
375            let offset = self.all_group_offsets.lf_group.load(Ordering::Relaxed);
376            if offset == 0 {
377                let lf_global = self.try_parse_lf_global::<S>().unwrap();
378                if let Err(e) = lf_global {
379                    return Some(Err(e));
380                }
381            }
382            let offset = self.all_group_offsets.lf_group.load(Ordering::Relaxed);
383            bitstream.skip_bits(offset).unwrap();
384
385            let result = LfGroup::parse(
386                &mut bitstream,
387                LfGroupParams {
388                    frame_header: &self.header,
389                    quantizer: lf_global_vardct.map(|x| &x.quantizer),
390                    global_ma_config,
391                    mlf_group,
392                    lf_group_idx,
393                    allow_partial: !loaded,
394                    tracker: self.tracker.as_ref(),
395                    pool: &self.pool,
396                },
397            );
398
399            match result {
400                Ok(result) => {
401                    self.all_group_offsets
402                        .hf_global
403                        .store(bitstream.num_read_bits(), Ordering::Relaxed);
404                    Some(Ok(result))
405                }
406                Err(e) if !loaded && e.unexpected_eof() => None,
407                Err(e) => {
408                    self.all_group_offsets.has_error.store(2, Ordering::Relaxed);
409                    Some(Err(e))
410                }
411            }
412        } else {
413            let idx = self
414                .toc
415                .group_index_bitstream_order(TocGroupKind::LfGroup(lf_group_idx));
416            let group = self.data.get(idx)?;
417            let allow_partial = group.bytes.len() < group.toc_group.size as usize;
418
419            let mut bitstream = Bitstream::new(&group.bytes);
420            let result = LfGroup::parse(
421                &mut bitstream,
422                LfGroupParams {
423                    frame_header: &self.header,
424                    quantizer: lf_global_vardct.map(|x| &x.quantizer),
425                    global_ma_config,
426                    mlf_group,
427                    lf_group_idx,
428                    allow_partial,
429                    tracker: self.tracker.as_ref(),
430                    pool: &self.pool,
431                },
432            );
433            if allow_partial && result.is_err() {
434                return None;
435            }
436            Some(result)
437        }
438    }
439
440    pub fn try_parse_hf_global<S: Sample>(
441        &self,
442        cached_lf_global: Option<&LfGlobal<S>>,
443    ) -> Option<Result<HfGlobal>> {
444        let is_modular = self.header.encoding == header::Encoding::Modular;
445
446        if self.toc.is_single_entry() {
447            if self.all_group_offsets.has_error.load(Ordering::Relaxed) != 0 {
448                return Some(Err(Error::HadError));
449            }
450
451            let group = self.data.first()?;
452            let loaded = self.reading_data_index != 0;
453            let mut bitstream = Bitstream::new(&group.bytes);
454            let offset = self.all_group_offsets.hf_global.load(Ordering::Relaxed);
455            let lf_global = if cached_lf_global.is_none() && (offset == 0 || !is_modular) {
456                match self.try_parse_lf_global()? {
457                    Ok(lf_global) => Some(lf_global),
458                    Err(e) => return Some(Err(e)),
459                }
460            } else {
461                None
462            };
463            let lf_global = cached_lf_global.or(lf_global.as_ref());
464
465            if offset == 0 {
466                let lf_global = lf_global.unwrap();
467                let mut gmodular = match lf_global.gmodular.try_clone() {
468                    Ok(gmodular) => gmodular,
469                    Err(e) => return Some(Err(e)),
470                };
471                let groups = gmodular
472                    .modular
473                    .image_mut()
474                    .map(|x| x.prepare_groups(&self.pass_shifts))
475                    .transpose();
476                let groups = match groups {
477                    Ok(groups) => groups,
478                    Err(e) => return Some(Err(e.into())),
479                };
480                let mlf_group = groups.and_then(|mut x| x.lf_groups.pop());
481                let lf_group = self
482                    .try_parse_lf_group(
483                        lf_global.vardct.as_ref(),
484                        lf_global.gmodular.ma_config(),
485                        mlf_group,
486                        0,
487                    )
488                    .ok_or(
489                        jxl_bitstream::Error::Io(std::io::ErrorKind::UnexpectedEof.into()).into(),
490                    )
491                    .and_then(|x| x);
492                if let Err(e) = lf_group {
493                    return Some(Err(e));
494                }
495            }
496            let offset = self.all_group_offsets.hf_global.load(Ordering::Relaxed);
497
498            if self.header.encoding == header::Encoding::Modular {
499                self.all_group_offsets
500                    .pass_group
501                    .store(offset, Ordering::Relaxed);
502                return None;
503            }
504
505            bitstream.skip_bits(offset).unwrap();
506            let lf_global = lf_global.unwrap();
507            let result = HfGlobal::parse(
508                &mut bitstream,
509                HfGlobalParams::new(
510                    &self.image_header.metadata,
511                    &self.header,
512                    lf_global,
513                    self.tracker.as_ref(),
514                    &self.pool,
515                ),
516            );
517
518            Some(match result {
519                Ok(result) => {
520                    self.all_group_offsets
521                        .pass_group
522                        .store(bitstream.num_read_bits(), Ordering::Relaxed);
523                    Ok(result)
524                }
525                Err(e) if !loaded && e.unexpected_eof() => Err(e),
526                Err(e) => {
527                    self.all_group_offsets.has_error.store(3, Ordering::Relaxed);
528                    Err(e)
529                }
530            })
531        } else {
532            if self.header.encoding == header::Encoding::Modular {
533                return None;
534            }
535
536            let idx = self.toc.group_index_bitstream_order(TocGroupKind::HfGlobal);
537            let group = self.data.get(idx)?;
538            if group.bytes.len() < group.toc_group.size as usize {
539                return None;
540            }
541
542            let mut bitstream = Bitstream::new(&group.bytes);
543            let lf_global = if cached_lf_global.is_none() {
544                match self.try_parse_lf_global()? {
545                    Ok(lf_global) => Some(lf_global),
546                    Err(e) => return Some(Err(e)),
547                }
548            } else {
549                None
550            };
551            let lf_global = cached_lf_global.or(lf_global.as_ref()).unwrap();
552            let params = HfGlobalParams::new(
553                &self.image_header.metadata,
554                &self.header,
555                lf_global,
556                self.tracker.as_ref(),
557                &self.pool,
558            );
559            Some(HfGlobal::parse(&mut bitstream, params))
560        }
561    }
562
563    pub fn pass_group_bitstream(
564        &self,
565        pass_idx: u32,
566        group_idx: u32,
567    ) -> Option<Result<PassGroupBitstream>> {
568        Some(if self.toc.is_single_entry() {
569            if self.all_group_offsets.has_error.load(Ordering::Relaxed) != 0 {
570                return Some(Err(Error::HadError));
571            }
572
573            if pass_idx != 0 || group_idx != 0 {
574                return None;
575            }
576
577            let group = self.data.first()?;
578            let loaded = self.reading_data_index != 0;
579            let mut bitstream = Bitstream::new(&group.bytes);
580            let mut offset = self.all_group_offsets.pass_group.load(Ordering::Relaxed);
581            if offset == 0 {
582                let hf_global = self.try_parse_hf_global::<i32>(None)?;
583                if let Err(e) = hf_global {
584                    return Some(Err(e));
585                }
586                offset = self.all_group_offsets.pass_group.load(Ordering::Relaxed);
587            }
588            bitstream.skip_bits(offset).unwrap();
589
590            Ok(PassGroupBitstream {
591                bitstream,
592                partial: !loaded,
593            })
594        } else {
595            let idx = self
596                .toc
597                .group_index_bitstream_order(TocGroupKind::GroupPass {
598                    pass_idx,
599                    group_idx,
600                });
601            let group = self.data.get(idx)?;
602            let partial = group.bytes.len() < group.toc_group.size as usize;
603
604            let bitstream = Bitstream::new(&group.bytes);
605            Ok(PassGroupBitstream { bitstream, partial })
606        })
607    }
608}
609
610#[derive(Debug)]
611pub struct PassGroupBitstream<'buf> {
612    pub bitstream: Bitstream<'buf>,
613    pub partial: bool,
614}
615
616impl Frame {
617    /// Adjusts the cropping region of the image to the actual decoding region of the frame.
618    ///
619    /// The cropping region of the *image* needs to be adjusted to be used in a *frame*, for a few
620    /// reasons:
621    /// - A frame may be blended to the canvas with offset, which makes the image and the frame
622    ///   have different coordinates.
623    /// - Some filters reference other samples, which requires padding to the region.
624    ///
625    /// This method takes care of those and adjusts the given region appropriately.
626    pub fn adjust_region(&self, (left, top, width, height): &mut (u32, u32, u32, u32)) {
627        if self.header.have_crop {
628            *left = left.saturating_add_signed(-self.header.x0);
629            *top = top.saturating_add_signed(-self.header.y0);
630        };
631
632        let mut padding = 0u32;
633        if self.header.restoration_filter.gab.enabled() {
634            tracing::debug!("Gabor-like filter requires padding of 1 pixel");
635            padding = 1;
636        }
637        if self.header.restoration_filter.epf.enabled() {
638            tracing::debug!("Edge-preserving filter requires padding of 3 pixels");
639            padding = 3;
640        }
641        if padding > 0 {
642            let delta_w = (*left).min(padding);
643            let delta_h = (*top).min(padding);
644            *left -= delta_w;
645            *top -= delta_h;
646            *width += delta_w + padding;
647            *height += delta_h + padding;
648        }
649    }
650}