use super::data::*;
use super::jbrd::{encode_jbrd, extract_exif, extract_icc, extract_xmp};
use crate::BLOCK_SIZE;
use crate::bit_writer::BitWriter;
use crate::container::wrap_in_container_jxlp;
use crate::entropy_coding::encode::{
OwnedAnsEntropyCode, build_entropy_code_ans, write_entropy_code_ans, write_tokens_ans,
};
use crate::entropy_coding::token::Token;
use crate::error::Result;
use crate::headers::color_encoding::ColorEncoding;
use crate::headers::file_header::{BitDepth, FileHeader, ImageMetadata};
use crate::headers::frame_header::{Encoding, FrameHeader};
use crate::vardct::ac_context;
use crate::vardct::ac_group::{collect_ac_coefficients_into, predict_from_top_and_left};
use crate::vardct::ac_strategy::AcStrategyMap;
use crate::vardct::chroma_from_luma::CflMap;
use crate::vardct::common::*;
use crate::vardct::dc_coding::{
NUM_DC_CONTEXTS, collect_ac_metadata_tokens_region, collect_dc_tokens_region,
};
use crate::vardct::frame::{
assemble_frame_sections, write_dc_group_from_tokens, write_quant_scales,
};
const NUM_QUANT_TABLES: usize = 17;
pub fn encode_jpeg_to_jxl(jpeg: &JpegData) -> Result<Vec<u8>> {
let (codestream, _split) = encode_jpeg_to_jxl_inner(jpeg)?;
Ok(codestream)
}
fn encode_jpeg_to_jxl_inner(jpeg: &JpegData) -> Result<(Vec<u8>, usize)> {
let width = jpeg.width as usize;
let height = jpeg.height as usize;
let jpeg_c_map: [usize; 3] = if jpeg.components.len() == 1 {
[0, 0, 0] } else {
match jpeg.component_type {
JpegComponentType::YCbCr => [1, 0, 2], _ => [0, 1, 2], }
};
let num_components = jpeg.components.len();
if num_components != 3 && num_components != 1 {
return Err(crate::error::Error::InvalidInput(format!(
"JPEG reencoding requires 1 or 3 components, got {num_components}"
)));
}
let jpeg_upsampling = if num_components == 3 {
compute_jpeg_upsampling(jpeg, &jpeg_c_map)
} else {
[0; 3] };
let max_raw_hs = jpeg_upsampling
.iter()
.map(|&u| JPEG_UPSAMPLING_H_SHIFT[u as usize])
.max()
.unwrap_or(0);
let max_raw_vs = jpeg_upsampling
.iter()
.map(|&u| JPEG_UPSAMPLING_V_SHIFT[u as usize])
.max()
.unwrap_or(0);
let channel_shifts: [(usize, usize); 3] = [
(
max_raw_hs - JPEG_UPSAMPLING_H_SHIFT[jpeg_upsampling[0] as usize],
max_raw_vs - JPEG_UPSAMPLING_V_SHIFT[jpeg_upsampling[0] as usize],
),
(
max_raw_hs - JPEG_UPSAMPLING_H_SHIFT[jpeg_upsampling[1] as usize],
max_raw_vs - JPEG_UPSAMPLING_V_SHIFT[jpeg_upsampling[1] as usize],
),
(
max_raw_hs - JPEG_UPSAMPLING_H_SHIFT[jpeg_upsampling[2] as usize],
max_raw_vs - JPEG_UPSAMPLING_V_SHIFT[jpeg_upsampling[2] as usize],
),
];
let max_hs = channel_shifts.iter().map(|&(hs, _)| hs).max().unwrap_or(0);
let max_vs = channel_shifts.iter().map(|&(_, vs)| vs).max().unwrap_or(0);
let xsize_blocks = div_ceil(width, 8 << max_hs) << max_hs;
let ysize_blocks = div_ceil(height, 8 << max_vs) << max_vs;
let (mut quant_dc, mut quant_ac, mut nzeros, mut raw_nzeros) =
map_jpeg_coefficients(jpeg, &jpeg_c_map)?;
let is_gray = num_components == 1;
if is_gray {
for c in [0, 2] {
for row in &mut quant_dc[c] {
row.fill(0);
}
for row in &mut quant_ac[c] {
for block in row.iter_mut() {
block.fill(0);
}
}
for row in &mut nzeros[c] {
row.fill(0);
}
for row in &mut raw_nzeros[c] {
row.fill(0);
}
}
}
let ac_strategy = AcStrategyMap::new_dct8(xsize_blocks, ysize_blocks);
let xsize_tiles = div_ceil(xsize_blocks, TILE_DIM_IN_BLOCKS);
let ysize_tiles = div_ceil(ysize_blocks, TILE_DIM_IN_BLOCKS);
let cfl_map = CflMap::zeros(xsize_tiles, ysize_tiles);
let quant_field = vec![1u8; xsize_blocks * ysize_blocks];
let xsize_groups = div_ceil(width, GROUP_DIM);
let ysize_groups = div_ceil(height, GROUP_DIM);
let xsize_dc_groups = div_ceil(width, DC_GROUP_DIM);
let ysize_dc_groups = div_ceil(height, DC_GROUP_DIM);
let num_groups = xsize_groups * ysize_groups;
let num_dc_groups = xsize_dc_groups * ysize_dc_groups;
let raw_qtables = build_raw_qtables(jpeg, &jpeg_c_map)?;
let dc_dequant = build_dc_dequant(jpeg, &jpeg_c_map)?;
let mut dc_tokens_per_group: Vec<Vec<Token>> = Vec::with_capacity(num_dc_groups);
let mut ac_metadata_tokens_per_group: Vec<Vec<Token>> = Vec::with_capacity(num_dc_groups);
for dc_group_idx in 0..num_dc_groups {
let dc_gx = dc_group_idx % xsize_dc_groups;
let dc_gy = dc_group_idx / xsize_dc_groups;
let start_bx = dc_gx * DC_GROUP_DIM_IN_BLOCKS;
let start_by = dc_gy * DC_GROUP_DIM_IN_BLOCKS;
let end_bx = (start_bx + DC_GROUP_DIM_IN_BLOCKS).min(xsize_blocks);
let end_by = (start_by + DC_GROUP_DIM_IN_BLOCKS).min(ysize_blocks);
let region_xsize = end_bx - start_bx;
let region_ysize = end_by - start_by;
let dc_tokens = collect_dc_tokens_region(
&quant_dc,
start_bx,
start_by,
end_bx,
end_by,
&channel_shifts,
);
let md_tokens = collect_ac_metadata_tokens_region(
region_xsize,
region_ysize,
&quant_field,
xsize_blocks,
start_bx,
start_by,
&cfl_map,
&ac_strategy,
None, );
dc_tokens_per_group.push(dc_tokens);
ac_metadata_tokens_per_group.push(md_tokens);
}
let block_ctx_map = ac_context::BlockCtxMap::default();
let mut ac_section_tokens: Vec<Vec<Token>> = Vec::with_capacity(num_groups);
for group_idx in 0..num_groups {
let group_x = group_idx % xsize_groups;
let group_y = group_idx / xsize_groups;
let start_bx = group_x * GROUP_DIM_IN_BLOCKS;
let start_by = group_y * GROUP_DIM_IN_BLOCKS;
let end_bx = (start_bx + GROUP_DIM_IN_BLOCKS).min(xsize_blocks);
let end_by = (start_by + GROUP_DIM_IN_BLOCKS).min(ysize_blocks);
let mut tokens = Vec::new();
for by in start_by..end_by {
for bx in start_bx..end_bx {
let strategy_code = 0u8; let raw_strategy = 0u8;
for &c in &[1usize, 0, 2] {
let (hs, vs) = channel_shifts[c];
if hs > 0 && (bx & ((1 << hs) - 1)) != 0 {
continue;
}
if vs > 0 && (by & ((1 << vs) - 1)) != 0 {
continue;
}
let ch_bx = bx >> hs;
let ch_by = by >> vs;
let ch_start_bx = start_bx >> hs;
let ch_start_by = start_by >> vs;
let nz = raw_nzeros[c][ch_by][ch_bx];
let local_bx = ch_bx - ch_start_bx;
let row_top = if ch_by > ch_start_by {
Some(nzeros[c][ch_by - 1].as_slice())
} else {
None
};
let predicted_nz = if local_bx == 0 {
match row_top {
Some(top) => top[ch_bx] as i32,
None => 32,
}
} else {
predict_from_top_and_left(row_top, &nzeros[c][ch_by], ch_bx, 32)
};
let qf_val = quant_field[by * xsize_blocks + bx] as u32;
let block_ctx = block_ctx_map.block_context(c, strategy_code, qf_val);
collect_ac_coefficients_into(
&mut tokens,
&quant_ac[c][ch_by][ch_bx],
raw_strategy,
nz,
predicted_nz,
block_ctx,
block_ctx_map.num_ctxs,
None, );
}
}
}
ac_section_tokens.push(tokens);
}
let dc_num_contexts = NUM_DC_CONTEXTS;
let total_dc_tokens: usize = dc_tokens_per_group.iter().map(|t| t.len()).sum::<usize>()
+ ac_metadata_tokens_per_group
.iter()
.map(|t| t.len())
.sum::<usize>();
let mut all_dc_tokens = Vec::with_capacity(total_dc_tokens);
for section in &dc_tokens_per_group {
all_dc_tokens.extend_from_slice(section);
}
for section in &ac_metadata_tokens_per_group {
all_dc_tokens.extend_from_slice(section);
}
let dc_code = build_entropy_code_ans(&all_dc_tokens, dc_num_contexts);
let ac_num_contexts = block_ctx_map.num_ac_contexts();
let total_ac_tokens: usize = ac_section_tokens.iter().map(|t| t.len()).sum();
let mut all_ac_tokens = Vec::with_capacity(total_ac_tokens);
for section in &ac_section_tokens {
all_ac_tokens.extend_from_slice(section);
}
let ac_code = build_entropy_code_ans(&all_ac_tokens, ac_num_contexts);
let mut writer = BitWriter::with_capacity(width * height * 4);
let icc_profile = extract_icc(jpeg);
let mut file_header = build_jpeg_file_header(width, height, is_gray);
if icc_profile.is_some() {
file_header.metadata.color_encoding.want_icc = true;
}
file_header.write(&mut writer)?;
if let Some(ref icc) = icc_profile {
crate::icc::write_icc(icc, &mut writer)?;
}
writer.zero_pad_to_byte();
let file_header_bytes = writer.bytes_written();
let frame_header = build_jpeg_frame_header(jpeg, jpeg_upsampling);
frame_header.write(&mut writer)?;
let write_tok = |tokens: &[Token], w: &mut BitWriter| -> Result<()> {
write_tokens_ans(tokens, &dc_code, None, w)
};
let mut dc_global = BitWriter::new();
write_dc_global_jpeg(&dc_dequant, &dc_code, num_dc_groups, &mut dc_global)?;
let mut dc_groups = Vec::with_capacity(num_dc_groups);
for dc_group_idx in 0..num_dc_groups {
let mut dc_group = BitWriter::new();
write_dc_group_from_tokens(
dc_group_idx,
xsize_blocks,
ysize_blocks,
xsize_dc_groups,
&dc_tokens_per_group[dc_group_idx],
&ac_metadata_tokens_per_group[dc_group_idx],
&ac_strategy,
&write_tok,
&mut dc_group,
)?;
dc_groups.push(dc_group);
}
let mut ac_global = BitWriter::new();
write_ac_global_jpeg(&raw_qtables, num_groups, &ac_code, &mut ac_global)?;
let mut ac_groups = Vec::with_capacity(num_groups);
for ac_tokens in &ac_section_tokens {
let mut ac_group_writer = BitWriter::new();
write_tokens_ans(ac_tokens, &ac_code, None, &mut ac_group_writer)?;
ac_groups.push(ac_group_writer);
}
assemble_frame_sections(dc_global, dc_groups, ac_global, ac_groups, &mut writer)?;
Ok((writer.finish_with_padding(), file_header_bytes))
}
pub fn encode_jpeg_to_jxl_container(jpeg: &JpegData) -> Result<Vec<u8>> {
let (codestream, file_header_size) = encode_jpeg_to_jxl_inner(jpeg)?;
let jbrd = encode_jbrd(jpeg)?;
let exif = extract_exif(jpeg);
let xmp = extract_xmp(jpeg);
let cs_part1 = &codestream[..file_header_size];
let cs_part2 = &codestream[file_header_size..];
Ok(wrap_in_container_jxlp(
cs_part1,
cs_part2,
&jbrd,
exif.as_deref(),
xmp.as_deref(),
))
}
#[allow(clippy::type_complexity)]
fn map_jpeg_coefficients(
jpeg: &JpegData,
jpeg_c_map: &[usize; 3],
) -> Result<(
[Vec<Vec<i16>>; 3],
[Vec<Vec<[i32; BLOCK_SIZE]>>; 3],
[Vec<Vec<u8>>; 3],
[Vec<Vec<u16>>; 3],
)> {
let mut quant_dc: [Vec<Vec<i16>>; 3] = [Vec::new(), Vec::new(), Vec::new()];
let mut quant_ac: [Vec<Vec<[i32; BLOCK_SIZE]>>; 3] = [Vec::new(), Vec::new(), Vec::new()];
let mut nzeros: [Vec<Vec<u8>>; 3] = [Vec::new(), Vec::new(), Vec::new()];
let mut raw_nzeros: [Vec<Vec<u16>>; 3] = [Vec::new(), Vec::new(), Vec::new()];
for jxl_c in 0..3 {
let jpeg_c = jpeg_c_map[jxl_c];
let comp = &jpeg.components[jpeg_c];
let xb = comp.width_in_blocks as usize;
let yb = comp.height_in_blocks as usize;
let mut dc_rows = Vec::with_capacity(yb);
let mut ac_rows = Vec::with_capacity(yb);
let mut nz_rows = Vec::with_capacity(yb);
let mut raw_nz_rows = Vec::with_capacity(yb);
for by in 0..yb {
let mut dc_row = Vec::with_capacity(xb);
let mut ac_row: Vec<[i32; BLOCK_SIZE]> = Vec::with_capacity(xb);
let mut nz_row = Vec::with_capacity(xb);
let mut raw_nz_row = Vec::with_capacity(xb);
for bx in 0..xb {
let blk_idx = by * xb + bx;
let base = blk_idx * 64;
let dc = comp.coeffs[base];
dc_row.push(dc);
let mut ac_block = [0i32; BLOCK_SIZE];
let mut nz_count = 0u16;
for y in 0..8 {
for x in 0..8 {
if x == 0 && y == 0 {
continue; }
let natural_idx = y * 8 + x;
let transposed_idx = x * 8 + y;
ac_block[transposed_idx] = comp.coeffs[base + natural_idx] as i32;
if ac_block[transposed_idx] != 0 {
nz_count += 1;
}
}
}
ac_row.push(ac_block);
nz_row.push(nz_count as u8);
raw_nz_row.push(nz_count);
}
dc_rows.push(dc_row);
ac_rows.push(ac_row);
nz_rows.push(nz_row);
raw_nz_rows.push(raw_nz_row);
}
quant_dc[jxl_c] = dc_rows;
quant_ac[jxl_c] = ac_rows;
nzeros[jxl_c] = nz_rows;
raw_nzeros[jxl_c] = raw_nz_rows;
}
Ok((quant_dc, quant_ac, nzeros, raw_nzeros))
}
fn build_raw_qtables(jpeg: &JpegData, jpeg_c_map: &[usize; 3]) -> Result<Vec<i32>> {
let mut qtables = vec![0i32; 3 * 64];
for jxl_c in 0..3 {
let jpeg_c = jpeg_c_map[jxl_c];
let quant_idx = jpeg.components[jpeg_c].quant_idx as usize;
let qt = &jpeg.quant[quant_idx].values;
for y in 0..8 {
for x in 0..8 {
qtables[jxl_c * 64 + x * 8 + y] = qt[y * 8 + x];
}
}
}
Ok(qtables)
}
fn build_dc_dequant(jpeg: &JpegData, jpeg_c_map: &[usize; 3]) -> Result<[f32; 3]> {
let mut dc_dequant = [0.0f32; 3];
for jxl_c in 0..3 {
let jpeg_c = jpeg_c_map[jxl_c];
let quant_idx = jpeg.components[jpeg_c].quant_idx as usize;
let q_dc = jpeg.quant[quant_idx].values[0] as f32;
dc_dequant[jxl_c] = q_dc / (255.0 * 8.0);
}
Ok(dc_dequant)
}
fn build_jpeg_file_header(width: usize, height: usize, is_gray: bool) -> FileHeader {
let color_encoding = if is_gray {
ColorEncoding {
rendering_intent: crate::headers::color_encoding::RenderingIntent::Relative,
..ColorEncoding::gray()
}
} else {
ColorEncoding::srgb() };
FileHeader {
width: width as u32,
height: height as u32,
metadata: ImageMetadata {
bit_depth: BitDepth::uint8(),
color_encoding,
extra_channels: Vec::new(),
xyb_encoded: false, ..ImageMetadata::default()
},
}
}
fn compute_jpeg_upsampling(jpeg: &JpegData, jpeg_c_map: &[usize; 3]) -> [u8; 3] {
let mut upsampling = [0u8; 3];
for jxl_c in 0..3 {
let jpeg_c = jpeg_c_map[jxl_c];
let h = jpeg.components[jpeg_c].h_samp_factor;
let v = jpeg.components[jpeg_c].v_samp_factor;
let hs = h.trailing_zeros();
let vs = v.trailing_zeros();
upsampling[jxl_c] = match (hs > 0, vs > 0) {
(false, false) => 0,
(true, true) => 1,
(true, false) => 2,
(false, true) => 3,
};
}
upsampling
}
fn build_jpeg_frame_header(jpeg: &JpegData, jpeg_upsampling: [u8; 3]) -> FrameHeader {
let is_ycbcr = jpeg.component_type == JpegComponentType::YCbCr || jpeg.components.len() == 1;
FrameHeader {
encoding: Encoding::VarDct,
xyb_encoded: false,
do_ycbcr: is_ycbcr,
jpeg_upsampling,
flags: 0x80, gaborish: false,
epf_iters: 0,
x_qm_scale: 2,
b_qm_scale: 2,
..FrameHeader::default()
}
}
fn write_dc_global_jpeg(
dc_dequant: &[f32; 3],
dc_code: &OwnedAnsEntropyCode,
num_dc_groups: usize,
writer: &mut BitWriter,
) -> Result<()> {
writer.write(1, 0)?; for &dcq in dc_dequant.iter() {
write_f16(dcq * 128.0, writer)?;
}
write_quant_scales(65536, 1, writer)?;
writer.write(1, 0)?; writer.write(16, 0)?; crate::vardct::context_tree::write_block_context_map(writer)?;
writer.write(1, 0)?; writer.write(2, 0)?; write_f16(0.0, writer)?; write_f16(0.0, writer)?; writer.write(8, 128)?; writer.write(8, 128)?;
crate::vardct::context_tree::write_context_tree(num_dc_groups, writer)?;
writer.write(1, 0)?;
write_entropy_code_ans(dc_code, writer)?;
Ok(())
}
fn write_ac_global_jpeg(
raw_qtables: &[i32],
num_groups: usize,
ac_code: &OwnedAnsEntropyCode,
writer: &mut BitWriter,
) -> Result<()> {
writer.write(1, 0)?; write_quant_matrices_jpeg(raw_qtables, writer)?;
let num_histo_bits = ceil_log2_nonzero(num_groups);
if num_histo_bits != 0 {
writer.write(num_histo_bits as usize, 0)?;
}
writer.write(2, 2)?;
writer.write(1, 0)?;
write_entropy_code_ans(ac_code, writer)?;
Ok(())
}
fn write_quant_matrices_jpeg(raw_qtables: &[i32], writer: &mut BitWriter) -> Result<()> {
for table_idx in 0..NUM_QUANT_TABLES {
if table_idx == 0 {
writer.write(3, 7)?;
let qtable_den = 1.0f32 / (8.0 * 255.0);
write_f16(qtable_den, writer)?;
write_raw_quant_table_modular(raw_qtables, writer)?;
} else {
writer.write(3, 0)?; }
}
Ok(())
}
fn write_raw_quant_table_modular(qtables: &[i32], writer: &mut BitWriter) -> Result<()> {
use crate::modular::channel::{Channel, ModularImage};
use crate::modular::section::collect_all_residuals;
let mut channels = Vec::with_capacity(3);
for c in 0..3 {
let data: Vec<i32> = (0..64).map(|i| qtables[c * 64 + i]).collect();
channels.push(Channel::from_vec(data, 8, 8)?);
}
let image = ModularImage {
channels,
bit_depth: 8,
is_grayscale: false,
has_alpha: false,
};
let (residuals, _max_residual) = collect_all_residuals(&image);
writer.write(1, 0)?; writer.write(1, 1)?; writer.write(2, 0)?;
let (tree_depths, tree_codes) =
crate::modular::encode::write_tree_histogram_for_gradient(writer)?;
crate::modular::encode::write_gradient_tree_tokens(writer, &tree_depths, &tree_codes)?;
let (tokens, code) = crate::modular::encode::build_ans_modular_code(&residuals);
crate::modular::encode::write_ans_modular_header(writer, &code)?;
crate::modular::encode::write_ans_modular_tokens(writer, &tokens, &code)?;
Ok(())
}
#[cfg(not(test))]
use crate::f16::write_f16;
#[cfg(test)]
use crate::f16::{f32_to_f16_bits, write_f16};
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_f16_conversion() {
assert_eq!(f32_to_f16_bits(1.0).unwrap(), 0x3C00);
assert_eq!(f32_to_f16_bits(0.0).unwrap(), 0x0000);
assert_eq!(f32_to_f16_bits(-1.0).unwrap(), 0xBC00);
let qtable_den = 1.0f32 / 2040.0;
let bits = f32_to_f16_bits(qtable_den).unwrap();
assert!(
bits > 0 && bits < 0x4000,
"qtable_den f16 bits = 0x{bits:04X}"
);
}
#[test]
fn test_encode_real_jpeg() {
crate::skip_without_corpus!();
let path = format!(
"{}/imageflow/test_inputs/orientation/Landscape_1.jpg",
crate::test_helpers::corpus_dir().display()
);
let data = std::fs::read(path).expect("failed to read test JPEG");
let jpeg = super::super::parse::read_jpeg(&data).expect("failed to parse JPEG");
let jxl = encode_jpeg_to_jxl(&jpeg).expect("failed to encode JPEG to JXL");
assert!(jxl.len() > 10, "JXL output too short: {} bytes", jxl.len());
assert_eq!(jxl[0], 0xFF);
assert_eq!(jxl[1], 0x0A);
eprintln!(
"Encoded {}x{} JPEG to {} bytes JXL",
jpeg.width,
jpeg.height,
jxl.len()
);
crate::test_helpers::save_test_output("jpeg-reencoding", "landscape1.jxl", &jxl);
}
#[test]
fn test_encode_420_jpeg() {
let path =
crate::test_helpers::output_dir_for("jpeg-reencoding", "").join("test128_420.jpg");
let data = std::fs::read(&path).expect("failed to read test JPEG");
let jpeg = super::super::parse::read_jpeg(&data).expect("failed to parse JPEG");
assert_eq!(jpeg.components[0].h_samp_factor, 2);
assert_eq!(jpeg.components[0].v_samp_factor, 2);
assert_eq!(jpeg.components[1].h_samp_factor, 1);
assert_eq!(jpeg.components[1].v_samp_factor, 1);
let jxl = encode_jpeg_to_jxl(&jpeg).expect("failed to encode 4:2:0 JPEG to JXL");
assert!(jxl.len() > 10, "JXL output too short: {} bytes", jxl.len());
assert_eq!(jxl[0], 0xFF);
assert_eq!(jxl[1], 0x0A);
eprintln!(
"Encoded {}x{} 4:2:0 JPEG to {} bytes JXL",
jpeg.width,
jpeg.height,
jxl.len()
);
}
#[test]
fn test_compute_jpeg_upsampling() {
let jpeg = JpegData {
width: 128,
height: 128,
restart_interval: 0,
app_data: Vec::new(),
app_marker_type: Vec::new(),
com_data: Vec::new(),
quant: Vec::new(),
huffman_code: Vec::new(),
components: vec![
JpegComponent {
id: 1,
h_samp_factor: 2,
v_samp_factor: 2,
quant_idx: 0,
width_in_blocks: 16,
height_in_blocks: 16,
coeffs: Vec::new(),
},
JpegComponent {
id: 2,
h_samp_factor: 1,
v_samp_factor: 1,
quant_idx: 1,
width_in_blocks: 8,
height_in_blocks: 8,
coeffs: Vec::new(),
},
JpegComponent {
id: 3,
h_samp_factor: 1,
v_samp_factor: 1,
quant_idx: 1,
width_in_blocks: 8,
height_in_blocks: 8,
coeffs: Vec::new(),
},
],
scan_info: Vec::new(),
marker_order: Vec::new(),
inter_marker_data: Vec::new(),
tail_data: Vec::new(),
has_zero_padding_bit: false,
padding_bits: Vec::new(),
component_type: JpegComponentType::YCbCr,
};
let c_map = [1usize, 0, 2];
let up = compute_jpeg_upsampling(&jpeg, &c_map);
assert_eq!(up, [0, 1, 0], "expected [0,1,0] for 4:2:0 YCbCr");
let mut jpeg_422 = jpeg.clone();
jpeg_422.components[0].v_samp_factor = 1;
let up_422 = compute_jpeg_upsampling(&jpeg_422, &c_map);
assert_eq!(up_422, [0, 2, 0], "expected [0,2,0] for 4:2:2 YCbCr");
let mut jpeg_440 = jpeg.clone();
jpeg_440.components[0].h_samp_factor = 1;
let up_440 = compute_jpeg_upsampling(&jpeg_440, &c_map);
assert_eq!(up_440, [0, 3, 0], "expected [0,3,0] for 4:4:0 YCbCr");
let mut jpeg_444 = jpeg.clone();
jpeg_444.components[0].h_samp_factor = 1;
jpeg_444.components[0].v_samp_factor = 1;
let up_444 = compute_jpeg_upsampling(&jpeg_444, &c_map);
assert_eq!(up_444, [0, 0, 0], "expected [0,0,0] for 4:4:4 YCbCr");
}
}