#![allow(dead_code)]
extern crate std;
use alloc::borrow::Cow;
use alloc::vec::Vec;
use whereat::{At, at};
use zencodec::decode::{AnimationFrame, DecodeCapabilities, DecodeOutput, OutputInfo};
use zencodec::encode::{EncodeCapabilities, EncodeOutput};
use zencodec::{ImageFormat, ImageInfo, Metadata, ResourceLimits};
use zenpixels::{Pixel, PixelDescriptor, PixelSlice, PixelSliceMut};
use crate::decode::PngDecodeConfig;
use crate::encode::EncodeConfig;
use crate::error::PngError;
const DEFAULT_TIMEOUT_MS: u64 = 120_000;
static ENCODE_DESCRIPTORS: &[PixelDescriptor] = &[
PixelDescriptor::RGB8_SRGB,
PixelDescriptor::RGBA8_SRGB,
PixelDescriptor::GRAY8_SRGB,
PixelDescriptor::BGRA8_SRGB,
PixelDescriptor::RGB16_SRGB,
PixelDescriptor::RGBA16_SRGB,
PixelDescriptor::GRAY16_SRGB,
PixelDescriptor::RGBF32_LINEAR,
PixelDescriptor::RGBAF32_LINEAR,
PixelDescriptor::GRAYF32_LINEAR,
];
static DECODE_DESCRIPTORS: &[PixelDescriptor] = &[
PixelDescriptor::RGB8_SRGB,
PixelDescriptor::RGBA8_SRGB,
PixelDescriptor::GRAY8_SRGB,
PixelDescriptor::BGRA8_SRGB,
PixelDescriptor::RGB16_SRGB,
PixelDescriptor::RGBA16_SRGB,
PixelDescriptor::GRAY16_SRGB,
PixelDescriptor::RGBF32_LINEAR,
PixelDescriptor::RGBAF32_LINEAR,
PixelDescriptor::GRAYF32_LINEAR,
];
#[derive(Clone, Debug)]
pub struct PngEncoderConfig {
config: EncodeConfig,
effort: Option<i32>,
quality: Option<f32>,
lossless: bool,
}
impl PngEncoderConfig {
#[must_use]
pub fn new() -> Self {
Self {
config: EncodeConfig::default(),
effort: None,
quality: None,
lossless: true,
}
}
#[must_use]
pub fn with_compression(mut self, compression: crate::Compression) -> Self {
self.config.compression = compression;
self
}
#[must_use]
pub fn with_filter(mut self, filter: crate::Filter) -> Self {
self.config.filter = filter;
self
}
#[must_use]
pub fn with_near_lossless_bits(mut self, bits: u8) -> Self {
self.config.near_lossless_bits = bits;
self
}
pub fn encode_rgb8(
&self,
img: imgref::ImgRef<'_, Rgb<u8>>,
) -> Result<EncodeOutput, At<PngError>> {
use zencodec::encode::{EncodeJob, Encoder, EncoderConfig};
self.clone()
.job()
.encoder()?
.encode(PixelSlice::from(img).erase())
}
pub fn encode_rgba8(
&self,
img: imgref::ImgRef<'_, Rgba<u8>>,
) -> Result<EncodeOutput, At<PngError>> {
use zencodec::encode::{EncodeJob, Encoder, EncoderConfig};
self.clone()
.job()
.encoder()?
.encode(PixelSlice::from(img).erase())
}
pub fn encode_gray8(
&self,
img: imgref::ImgRef<'_, Gray<u8>>,
) -> Result<EncodeOutput, At<PngError>> {
use zencodec::encode::{EncodeJob, Encoder, EncoderConfig};
self.clone()
.job()
.encoder()?
.encode(PixelSlice::from(img).erase())
}
pub fn encode_rgb16(
&self,
img: imgref::ImgRef<'_, Rgb<u16>>,
) -> Result<EncodeOutput, At<PngError>> {
use zencodec::encode::{EncodeJob, Encoder, EncoderConfig};
self.clone()
.job()
.encoder()?
.encode(PixelSlice::from(img).erase())
}
pub fn encode_rgba16(
&self,
img: imgref::ImgRef<'_, Rgba<u16>>,
) -> Result<EncodeOutput, At<PngError>> {
use zencodec::encode::{EncodeJob, Encoder, EncoderConfig};
self.clone()
.job()
.encoder()?
.encode(PixelSlice::from(img).erase())
}
pub fn encode_gray16(
&self,
img: imgref::ImgRef<'_, Gray<u16>>,
) -> Result<EncodeOutput, At<PngError>> {
use zencodec::encode::{EncodeJob, Encoder, EncoderConfig};
self.clone()
.job()
.encoder()?
.encode(PixelSlice::from(img).erase())
}
pub fn encode_rgb_f32(
&self,
img: imgref::ImgRef<'_, Rgb<f32>>,
) -> Result<EncodeOutput, At<PngError>> {
use zencodec::encode::{EncodeJob, Encoder, EncoderConfig};
self.clone()
.job()
.encoder()?
.encode(PixelSlice::from(img).erase())
}
pub fn encode_rgba_f32(
&self,
img: imgref::ImgRef<'_, Rgba<f32>>,
) -> Result<EncodeOutput, At<PngError>> {
use zencodec::encode::{EncodeJob, Encoder, EncoderConfig};
self.clone()
.job()
.encoder()?
.encode(PixelSlice::from(img).erase())
}
pub fn encode_gray_f32(
&self,
img: imgref::ImgRef<'_, Gray<f32>>,
) -> Result<EncodeOutput, At<PngError>> {
use zencodec::encode::{EncodeJob, Encoder, EncoderConfig};
self.clone()
.job()
.encoder()?
.encode(PixelSlice::from(img).erase())
}
pub fn encode_bgra8(
&self,
img: imgref::ImgRef<'_, rgb::alt::BGRA<u8>>,
) -> Result<EncodeOutput, At<PngError>> {
use zencodec::encode::{EncodeJob, Encoder, EncoderConfig};
self.clone()
.job()
.encoder()?
.encode(PixelSlice::from(img).erase())
}
}
impl Default for PngEncoderConfig {
fn default() -> Self {
Self::new()
}
}
fn effort_to_compression(effort: i32) -> crate::Compression {
use crate::Compression;
match effort {
..=0 => Compression::None,
1 => Compression::Fastest,
2 => Compression::Turbo,
3 => Compression::Fast,
4 => Compression::Balanced,
5 => Compression::Thorough,
6 => Compression::High,
7 => Compression::Aggressive,
8 => Compression::Intense,
9 => Compression::Crush,
10 => Compression::Maniac,
11 => Compression::Brag,
_ => Compression::Minutes,
}
}
fn quality_to_mpe(quality: f32) -> f32 {
const TABLE: [(f32, f32); 14] = [
(100.0, 0.000),
(99.0, 0.003),
(95.0, 0.007),
(90.0, 0.011),
(85.0, 0.015),
(80.0, 0.020),
(75.0, 0.026),
(70.0, 0.031),
(60.0, 0.037),
(50.0, 0.044),
(40.0, 0.052),
(30.0, 0.060),
(10.0, 0.085),
(0.0, 0.100),
];
let quality = quality.clamp(0.0, 100.0);
if quality >= TABLE[0].0 {
return TABLE[0].1;
}
let last = TABLE.len() - 1;
if quality <= TABLE[last].0 {
return TABLE[last].1;
}
for i in 0..last {
let (q_hi, mpe_hi) = TABLE[i];
let (q_lo, mpe_lo) = TABLE[i + 1];
if quality >= q_lo {
let t = (q_hi - quality) / (q_hi - q_lo);
return mpe_hi + t * (mpe_lo - mpe_hi);
}
}
TABLE[last].1
}
fn threading_to_count(policy: zencodec::ThreadingPolicy) -> usize {
match policy {
zencodec::ThreadingPolicy::SingleThread => 1,
zencodec::ThreadingPolicy::LimitOrSingle { max_threads } => max_threads as usize,
zencodec::ThreadingPolicy::LimitOrAny {
preferred_max_threads,
} => preferred_max_threads as usize,
zencodec::ThreadingPolicy::Balanced => {
std::thread::available_parallelism().map_or(1, |n| (n.get() / 2).max(1))
}
zencodec::ThreadingPolicy::Unlimited => 0, _ => 0, }
}
static PNG_ENCODE_CAPS: EncodeCapabilities = EncodeCapabilities::new()
.with_icc(true)
.with_exif(true)
.with_xmp(true)
.with_cicp(true)
.with_stop(true)
.with_animation(true)
.with_lossless(true)
.with_lossy(true)
.with_native_gray(true)
.with_native_16bit(true)
.with_native_f32(true)
.with_native_alpha(true)
.with_push_rows(true)
.with_enforces_max_pixels(true)
.with_enforces_max_memory(true)
.with_effort_range(0, 12)
.with_quality_range(0.0, 100.0)
.with_threads_supported_range(1, 16);
impl zencodec::encode::EncoderConfig for PngEncoderConfig {
type Error = At<PngError>;
type Job = PngEncodeJob;
fn format() -> ImageFormat {
ImageFormat::Png
}
fn supported_descriptors() -> &'static [PixelDescriptor] {
ENCODE_DESCRIPTORS
}
fn capabilities() -> &'static EncodeCapabilities {
&PNG_ENCODE_CAPS
}
fn with_generic_effort(mut self, effort: i32) -> Self {
self.effort = Some(effort);
self.config.compression = effort_to_compression(effort);
self
}
fn generic_effort(&self) -> Option<i32> {
self.effort
}
fn with_generic_quality(mut self, quality: f32) -> Self {
self.quality = Some(quality);
if quality < 100.0 {
self.lossless = false;
}
self
}
fn generic_quality(&self) -> Option<f32> {
self.quality
}
fn with_lossless(mut self, lossless: bool) -> Self {
self.lossless = lossless;
self
}
fn is_lossless(&self) -> Option<bool> {
Some(self.lossless)
}
fn job(self) -> PngEncodeJob {
PngEncodeJob {
config: self,
stop: None,
metadata: None,
limits: None,
policy: None,
canvas_width: 0,
canvas_height: 0,
loop_count: None,
}
}
}
pub struct PngEncodeJob {
config: PngEncoderConfig,
stop: Option<zencodec::StopToken>,
metadata: Option<Metadata>,
limits: Option<ResourceLimits>,
policy: Option<zencodec::encode::EncodePolicy>,
canvas_width: u32,
canvas_height: u32,
loop_count: Option<u32>,
}
impl zencodec::encode::EncodeJob for PngEncodeJob {
type Error = At<PngError>;
type Enc = PngEncoder;
type AnimationFrameEnc = PngAnimationFrameEncoder;
fn with_stop(mut self, stop: zencodec::StopToken) -> Self {
self.stop = Some(stop);
self
}
fn with_metadata(mut self, meta: Metadata) -> Self {
self.metadata = Some(meta);
self
}
fn with_limits(mut self, limits: ResourceLimits) -> Self {
self.limits = Some(limits);
self
}
fn with_policy(mut self, policy: zencodec::encode::EncodePolicy) -> Self {
self.policy = Some(policy);
self
}
fn with_canvas_size(mut self, width: u32, height: u32) -> Self {
self.canvas_width = width;
self.canvas_height = height;
self
}
fn with_loop_count(mut self, count: Option<u32>) -> Self {
self.loop_count = count;
self
}
fn encoder(self) -> Result<PngEncoder, At<PngError>> {
Ok(PngEncoder {
config: self.config,
stop: self.stop,
metadata: self.metadata,
limits: self.limits,
policy: self.policy,
canvas_width: self.canvas_width,
canvas_height: self.canvas_height,
streaming: None,
})
}
fn animation_frame_encoder(self) -> Result<PngAnimationFrameEncoder, At<PngError>> {
let effective_meta = apply_encode_policy(self.metadata.as_ref(), self.policy.as_ref());
let mut config = self.config.config.clone();
if let Some(ref limits) = self.limits {
let thread_count = threading_to_count(limits.threading());
config.max_threads = thread_count;
if thread_count == 1 {
config.parallel = false;
}
}
let mut enc = PngAnimationFrameEncoder::new(
config,
self.canvas_width,
self.canvas_height,
effective_meta,
);
enc.loop_count = self.loop_count.unwrap_or(0);
enc.limits = self.limits;
Ok(enc)
}
}
pub struct PngEncoder {
config: PngEncoderConfig,
stop: Option<zencodec::StopToken>,
metadata: Option<Metadata>,
limits: Option<ResourceLimits>,
policy: Option<zencodec::encode::EncodePolicy>,
canvas_width: u32,
canvas_height: u32,
streaming: Option<StreamingMode>,
}
enum StreamingMode {
Buffered(BufferedStreamingState),
TrueStreaming(TrueStreamingState),
PreFiltered(PreFilteredState),
}
struct BufferedStreamingState {
pixel_data: Vec<u8>,
color_type: crate::encode::ColorType,
bit_depth: crate::encode::BitDepth,
row_bytes: usize,
rows_pushed: u32,
}
struct TrueStreamingState {
output: Vec<u8>,
convert_buf: Vec<u8>,
row_bytes: usize,
rows_pushed: u32,
adler: u32,
idat_len_pos: usize,
block_remaining: usize,
filtered_remaining: usize,
}
struct PreFilteredState {
preamble: Vec<u8>,
filtered_data: Vec<u8>,
prev_row: Vec<u8>,
convert_buf: Vec<u8>,
filter_type: u8,
bpp: usize,
row_bytes: usize,
rows_pushed: u32,
color_type: crate::encode::ColorType,
bit_depth: crate::encode::BitDepth,
zenflate_effort: u32,
}
impl PngEncoder {
fn config_with_threading(&self) -> EncodeConfig {
let mut config = self.config.config.clone();
if let Some(ref limits) = self.limits {
let thread_count = threading_to_count(limits.threading());
config.max_threads = thread_count;
if thread_count == 1 {
config.parallel = false;
}
}
config
}
fn do_encode(
&self,
bytes: &[u8],
w: u32,
h: u32,
color_type: crate::encode::ColorType,
) -> Result<EncodeOutput, At<PngError>> {
self.do_encode_with_depth(bytes, w, h, color_type, crate::encode::BitDepth::Eight)
}
fn do_encode_with_depth(
&self,
bytes: &[u8],
w: u32,
h: u32,
color_type: crate::encode::ColorType,
bit_depth: crate::encode::BitDepth,
) -> Result<EncodeOutput, At<PngError>> {
let cancel: &dyn enough::Stop = match self.stop {
Some(ref s) => s as &dyn enough::Stop,
None => &enough::Unstoppable,
};
cancel.check().map_err(|e| at!(PngError::from(e)))?;
if let Some(ref limits) = self.limits {
limits
.check_dimensions(w, h)
.map_err(|e| at!(PngError::LimitExceeded(alloc::format!("{e}"))))?;
let channels: u64 = match color_type {
crate::encode::ColorType::Grayscale => 1,
crate::encode::ColorType::Rgb => 3,
crate::encode::ColorType::GrayscaleAlpha => 2,
crate::encode::ColorType::Rgba => 4,
};
let depth_bytes: u64 = match bit_depth {
crate::encode::BitDepth::Eight => 1,
crate::encode::BitDepth::Sixteen => 2,
};
let bpp = channels * depth_bytes;
let estimated_mem = w as u64 * h as u64 * bpp;
limits
.check_memory(estimated_mem)
.map_err(|e| at!(PngError::LimitExceeded(alloc::format!("{e}"))))?;
}
let config = self.config_with_threading();
let timeout = std::time::Duration::from_millis(DEFAULT_TIMEOUT_MS);
let deadline = almost_enough::time::WithTimeout::new(enough::Unstoppable, timeout);
let effective_meta = apply_encode_policy(self.metadata.as_ref(), self.policy.as_ref());
let meta_ref = effective_meta.as_ref();
let data = crate::encode::encode_raw(
bytes, w, h, color_type, bit_depth, meta_ref, &config, cancel, &deadline,
)?;
if let Some(ref limits) = self.limits {
limits
.check_output_size(data.len() as u64)
.map_err(|e| at!(PngError::LimitExceeded(alloc::format!("{e}"))))?;
}
Ok(EncodeOutput::new(data, ImageFormat::Png))
}
}
impl zencodec::encode::Encoder for PngEncoder {
type Error = At<PngError>;
fn reject(op: zencodec::UnsupportedOperation) -> At<PngError> {
at!(PngError::from(op))
}
fn preferred_strip_height(&self) -> u32 {
1 }
fn encode(self, pixels: PixelSlice<'_>) -> Result<EncodeOutput, At<PngError>> {
use linear_srgb::default::{linear_to_srgb_u8_rgba_slice, linear_to_srgb_u8_slice};
use zenpixels::PixelFormat;
let w = pixels.width();
let h = pixels.rows();
#[cfg(any(feature = "quantize", feature = "imagequant", feature = "quantette"))]
let effective_meta = apply_encode_policy(self.metadata.as_ref(), self.policy.as_ref());
#[cfg(any(feature = "quantize", feature = "imagequant", feature = "quantette"))]
let meta_ref = effective_meta.as_ref();
match pixels.descriptor().pixel_format() {
PixelFormat::Rgb8 => {
let bytes = contiguous_bytes(&pixels);
self.do_encode(&bytes, w, h, crate::encode::ColorType::Rgb)
}
PixelFormat::Rgba8 => {
#[cfg(any(feature = "quantize", feature = "imagequant", feature = "quantette"))]
if !self.config.lossless
&& let Some(q) = self.config.quality
&& q < 100.0
{
let bytes = contiguous_bytes(&pixels);
let rgba: &[Rgba<u8>] = bytemuck::cast_slice(&bytes);
let img = imgref::Img::new(rgba, w as usize, h as usize);
let mpe = quality_to_mpe(q);
let cancel: &dyn enough::Stop = match self.stop {
Some(ref s) => s as &dyn enough::Stop,
None => &enough::Unstoppable,
};
let timeout = std::time::Duration::from_millis(DEFAULT_TIMEOUT_MS);
let deadline =
almost_enough::time::WithTimeout::new(enough::Unstoppable, timeout);
let quantizer = crate::default_quantizer();
let config = self.config_with_threading();
let result = crate::encode_auto(
img,
&config,
&*quantizer,
crate::QualityGate::MaxMpe(mpe),
meta_ref,
cancel,
&deadline,
)?;
return Ok(EncodeOutput::new(result.data, ImageFormat::Png));
}
let bytes = contiguous_bytes(&pixels);
self.do_encode(&bytes, w, h, crate::encode::ColorType::Rgba)
}
PixelFormat::Gray8 => {
let bytes = contiguous_bytes(&pixels);
self.do_encode(&bytes, w, h, crate::encode::ColorType::Grayscale)
}
PixelFormat::Rgb16 => {
let bytes = contiguous_bytes(&pixels);
let be = native_to_be_16(&bytes);
self.do_encode_with_depth(
&be,
w,
h,
crate::encode::ColorType::Rgb,
crate::encode::BitDepth::Sixteen,
)
}
PixelFormat::Rgba16 => {
let bytes = contiguous_bytes(&pixels);
let be = native_to_be_16(&bytes);
self.do_encode_with_depth(
&be,
w,
h,
crate::encode::ColorType::Rgba,
crate::encode::BitDepth::Sixteen,
)
}
PixelFormat::Gray16 => {
let bytes = contiguous_bytes(&pixels);
let be = native_to_be_16(&bytes);
self.do_encode_with_depth(
&be,
w,
h,
crate::encode::ColorType::Grayscale,
crate::encode::BitDepth::Sixteen,
)
}
PixelFormat::RgbF32 => {
let src = contiguous_bytes(&pixels);
let floats: &[f32] = bytemuck::cast_slice(&src);
let mut srgb = vec![0u8; floats.len()];
linear_to_srgb_u8_slice(floats, &mut srgb);
self.do_encode(&srgb, w, h, crate::encode::ColorType::Rgb)
}
PixelFormat::RgbaF32 => {
let src = contiguous_bytes(&pixels);
let floats: &[f32] = bytemuck::cast_slice(&src);
let mut srgb = vec![0u8; floats.len()];
linear_to_srgb_u8_rgba_slice(floats, &mut srgb);
#[cfg(any(feature = "quantize", feature = "imagequant", feature = "quantette"))]
if !self.config.lossless
&& let Some(q) = self.config.quality
&& q < 100.0
{
let rgba: &[Rgba<u8>] = bytemuck::cast_slice(&srgb);
let img = imgref::Img::new(rgba, w as usize, h as usize);
let mpe = quality_to_mpe(q);
let cancel: &dyn enough::Stop = match self.stop {
Some(ref s) => s as &dyn enough::Stop,
None => &enough::Unstoppable,
};
let timeout = std::time::Duration::from_millis(DEFAULT_TIMEOUT_MS);
let deadline =
almost_enough::time::WithTimeout::new(enough::Unstoppable, timeout);
let quantizer = crate::default_quantizer();
let config = self.config_with_threading();
let result = crate::encode_auto(
img,
&config,
&*quantizer,
crate::QualityGate::MaxMpe(mpe),
meta_ref,
cancel,
&deadline,
)?;
return Ok(EncodeOutput::new(result.data, ImageFormat::Png));
}
self.do_encode(&srgb, w, h, crate::encode::ColorType::Rgba)
}
PixelFormat::GrayF32 => {
let src = contiguous_bytes(&pixels);
let floats: &[f32] = bytemuck::cast_slice(&src);
let mut srgb = vec![0u8; floats.len()];
linear_to_srgb_u8_slice(floats, &mut srgb);
self.do_encode(&srgb, w, h, crate::encode::ColorType::Grayscale)
}
PixelFormat::Bgra8 => {
let raw = contiguous_bytes(&pixels);
let rgba: Vec<u8> = raw
.chunks_exact(4)
.flat_map(|c| [c[2], c[1], c[0], c[3]])
.collect();
self.do_encode(&rgba, w, h, crate::encode::ColorType::Rgba)
}
_ => Err(at!(PngError::from(
zencodec::UnsupportedOperation::PixelFormat
))),
}
}
fn push_rows(&mut self, rows: PixelSlice<'_>) -> Result<(), At<PngError>> {
use linear_srgb::default::{linear_to_srgb_u8_rgba_slice, linear_to_srgb_u8_slice};
use zenpixels::PixelFormat;
let w = rows.width();
let h = rows.rows();
if h == 0 {
return Ok(());
}
if self.streaming.is_none() {
let (color_type, bit_depth) = pixel_format_to_png(rows.descriptor().pixel_format())
.ok_or_else(|| at!(PngError::from(zencodec::UnsupportedOperation::PixelFormat)))?;
if self.canvas_width == 0 {
self.canvas_width = w;
}
let channels: usize = match color_type {
crate::encode::ColorType::Grayscale => 1,
crate::encode::ColorType::Rgb => 3,
crate::encode::ColorType::GrayscaleAlpha => 2,
crate::encode::ColorType::Rgba => 4,
};
let depth_bytes: usize = match bit_depth {
crate::encode::BitDepth::Eight => 1,
crate::encode::BitDepth::Sixteen => 2,
};
let row_bytes = self.canvas_width as usize * channels * depth_bytes;
let effort = self.config.config.compression.effort();
let bpp = channels * depth_bytes;
if effort == 0 && self.canvas_height > 0 {
self.streaming = Some(StreamingMode::TrueStreaming(TrueStreamingState::new(
self.canvas_width,
self.canvas_height,
color_type,
bit_depth,
row_bytes,
self.metadata.as_ref(),
self.policy.as_ref(),
&self.config.config,
)?));
} else if effort == 1 && self.canvas_height > 0 {
self.streaming = Some(StreamingMode::PreFiltered(PreFilteredState::new(
self.canvas_width,
self.canvas_height,
color_type,
bit_depth,
row_bytes,
bpp,
self.metadata.as_ref(),
self.policy.as_ref(),
&self.config.config,
)?));
} else {
let capacity = if self.canvas_height > 0 {
row_bytes * self.canvas_height as usize
} else {
row_bytes * h as usize
};
self.streaming = Some(StreamingMode::Buffered(BufferedStreamingState {
pixel_data: Vec::with_capacity(capacity),
color_type,
bit_depth,
row_bytes,
rows_pushed: 0,
}));
}
}
if w != self.canvas_width && self.canvas_width > 0 {
return Err(at!(PngError::InvalidInput(alloc::format!(
"push_rows: width {} does not match canvas width {}",
w,
self.canvas_width
))));
}
let format = rows.descriptor().pixel_format();
let mode = self.streaming.as_mut().unwrap();
match mode {
StreamingMode::Buffered(state) => {
if self.canvas_height > 0 && state.rows_pushed + h > self.canvas_height {
return Err(at!(PngError::InvalidInput(alloc::format!(
"push_rows: would exceed canvas height {} (already pushed {}, pushing {})",
self.canvas_height,
state.rows_pushed,
h
))));
}
state.pixel_data.reserve(state.row_bytes * h as usize);
for y in 0..h {
let src = rows.row(y);
match format {
PixelFormat::Rgb8 | PixelFormat::Rgba8 | PixelFormat::Gray8 => {
state.pixel_data.extend_from_slice(&src[..state.row_bytes]);
}
PixelFormat::Rgb16 | PixelFormat::Rgba16 | PixelFormat::Gray16 => {
let samples: &[u16] = bytemuck::cast_slice(&src[..state.row_bytes]);
for &val in samples {
state.pixel_data.extend_from_slice(&val.to_be_bytes());
}
}
PixelFormat::RgbF32 | PixelFormat::GrayF32 => {
let floats: &[f32] = bytemuck::cast_slice(src);
let start = state.pixel_data.len();
state.pixel_data.resize(start + floats.len(), 0);
linear_to_srgb_u8_slice(floats, &mut state.pixel_data[start..]);
}
PixelFormat::RgbaF32 => {
let floats: &[f32] = bytemuck::cast_slice(src);
let start = state.pixel_data.len();
state.pixel_data.resize(start + floats.len(), 0);
linear_to_srgb_u8_rgba_slice(floats, &mut state.pixel_data[start..]);
}
PixelFormat::Bgra8 => {
for c in src.chunks_exact(4) {
state
.pixel_data
.extend_from_slice(&[c[2], c[1], c[0], c[3]]);
}
}
_ => {
return Err(at!(PngError::from(
zencodec::UnsupportedOperation::PixelFormat
)));
}
}
}
state.rows_pushed += h;
}
StreamingMode::TrueStreaming(state) => {
if state.rows_pushed + h > self.canvas_height {
return Err(at!(PngError::InvalidInput(alloc::format!(
"push_rows: would exceed canvas height {} (already pushed {}, pushing {})",
self.canvas_height,
state.rows_pushed,
h
))));
}
for y in 0..h {
let src = rows.row(y);
match format {
PixelFormat::Rgb8 | PixelFormat::Rgba8 | PixelFormat::Gray8 => {
state.push_raw_row(&src[..state.row_bytes]);
}
PixelFormat::Rgb16 | PixelFormat::Rgba16 | PixelFormat::Gray16 => {
let samples: &[u16] = bytemuck::cast_slice(&src[..state.row_bytes]);
for (i, &val) in samples.iter().enumerate() {
let be = val.to_be_bytes();
state.convert_buf[i * 2] = be[0];
state.convert_buf[i * 2 + 1] = be[1];
}
state.push_converted_row();
}
PixelFormat::RgbF32 | PixelFormat::GrayF32 => {
let floats: &[f32] = bytemuck::cast_slice(src);
linear_to_srgb_u8_slice(floats, &mut state.convert_buf);
state.push_converted_row();
}
PixelFormat::RgbaF32 => {
let floats: &[f32] = bytemuck::cast_slice(src);
linear_to_srgb_u8_rgba_slice(floats, &mut state.convert_buf);
state.push_converted_row();
}
PixelFormat::Bgra8 => {
for (i, c) in src.chunks_exact(4).enumerate() {
state.convert_buf[i * 4] = c[2];
state.convert_buf[i * 4 + 1] = c[1];
state.convert_buf[i * 4 + 2] = c[0];
state.convert_buf[i * 4 + 3] = c[3];
}
state.push_converted_row();
}
_ => {
return Err(at!(PngError::from(
zencodec::UnsupportedOperation::PixelFormat
)));
}
}
}
}
StreamingMode::PreFiltered(state) => {
if state.rows_pushed + h > self.canvas_height {
return Err(at!(PngError::InvalidInput(alloc::format!(
"push_rows: would exceed canvas height {} (already pushed {}, pushing {})",
self.canvas_height,
state.rows_pushed,
h
))));
}
for y in 0..h {
let src = rows.row(y);
match format {
PixelFormat::Rgb8 | PixelFormat::Rgba8 | PixelFormat::Gray8 => {
state.push_raw_row(&src[..state.row_bytes]);
}
PixelFormat::Rgb16 | PixelFormat::Rgba16 | PixelFormat::Gray16 => {
let samples: &[u16] = bytemuck::cast_slice(&src[..state.row_bytes]);
for (i, &val) in samples.iter().enumerate() {
let be = val.to_be_bytes();
state.convert_buf[i * 2] = be[0];
state.convert_buf[i * 2 + 1] = be[1];
}
state.push_converted_row();
}
PixelFormat::RgbF32 | PixelFormat::GrayF32 => {
let floats: &[f32] = bytemuck::cast_slice(src);
linear_to_srgb_u8_slice(floats, &mut state.convert_buf);
state.push_converted_row();
}
PixelFormat::RgbaF32 => {
let floats: &[f32] = bytemuck::cast_slice(src);
linear_to_srgb_u8_rgba_slice(floats, &mut state.convert_buf);
state.push_converted_row();
}
PixelFormat::Bgra8 => {
for (i, c) in src.chunks_exact(4).enumerate() {
state.convert_buf[i * 4] = c[2];
state.convert_buf[i * 4 + 1] = c[1];
state.convert_buf[i * 4 + 2] = c[0];
state.convert_buf[i * 4 + 3] = c[3];
}
state.push_converted_row();
}
_ => {
return Err(at!(PngError::from(
zencodec::UnsupportedOperation::PixelFormat
)));
}
}
}
}
}
Ok(())
}
fn finish(mut self) -> Result<EncodeOutput, At<PngError>> {
let mode = self.streaming.take().ok_or_else(|| {
at!(PngError::InvalidInput(
"finish() called without any push_rows() calls".into()
))
})?;
match mode {
StreamingMode::Buffered(state) => {
let h = state.rows_pushed;
let w = self.canvas_width;
if w == 0 || h == 0 {
return Err(at!(PngError::InvalidInput("no pixel data pushed".into())));
}
let expected = state.row_bytes * h as usize;
if state.pixel_data.len() != expected {
return Err(at!(PngError::InvalidInput(alloc::format!(
"finish: pixel data size {} does not match expected {} ({}×{} rows)",
state.pixel_data.len(),
expected,
state.row_bytes,
h
))));
}
self.do_encode_with_depth(
&state.pixel_data,
w,
h,
state.color_type,
state.bit_depth,
)
}
StreamingMode::TrueStreaming(state) => {
if state.rows_pushed == 0 {
return Err(at!(PngError::InvalidInput("no pixel data pushed".into())));
}
let data = state.finish();
if let Some(ref limits) = self.limits {
limits
.check_output_size(data.len() as u64)
.map_err(|e| at!(PngError::LimitExceeded(alloc::format!("{e}"))))?;
}
Ok(EncodeOutput::new(data, ImageFormat::Png))
}
StreamingMode::PreFiltered(state) => {
if state.rows_pushed == 0 {
return Err(at!(PngError::InvalidInput("no pixel data pushed".into())));
}
let cancel: &dyn enough::Stop = match self.stop {
Some(ref s) => s as &dyn enough::Stop,
None => &enough::Unstoppable,
};
let data = state.finish(cancel)?;
if let Some(ref limits) = self.limits {
limits
.check_output_size(data.len() as u64)
.map_err(|e| at!(PngError::LimitExceeded(alloc::format!("{e}"))))?;
}
Ok(EncodeOutput::new(data, ImageFormat::Png))
}
}
}
}
struct AccumulatedFrame {
pixels: Vec<u8>, duration_ms: u32,
}
pub struct PngAnimationFrameEncoder {
frames: Vec<AccumulatedFrame>,
canvas_width: u32,
canvas_height: u32,
config: crate::encode::EncodeConfig,
metadata: Option<Metadata>,
loop_count: u32,
building_frame: Option<BuildingFrame>,
limits: Option<ResourceLimits>,
cumulative_pixel_bytes: u64,
}
struct BuildingFrame {
pixels: Vec<u8>,
duration_ms: u32,
rows_pushed: u32,
}
impl PngAnimationFrameEncoder {
fn new(
config: crate::encode::EncodeConfig,
canvas_width: u32,
canvas_height: u32,
metadata: Option<Metadata>,
) -> Self {
Self {
frames: Vec::new(),
canvas_width,
canvas_height,
config,
metadata,
loop_count: 0,
building_frame: None,
limits: None,
cumulative_pixel_bytes: 0,
}
}
fn pixels_to_rgba8(pixels: &PixelSlice<'_>) -> Result<Vec<u8>, At<PngError>> {
let desc = pixels.descriptor();
match (desc.channel_type(), desc.layout()) {
(zenpixels::ChannelType::U8, zenpixels::ChannelLayout::Rgba) => {
Ok(contiguous_bytes(pixels).into_owned())
}
(zenpixels::ChannelType::U8, zenpixels::ChannelLayout::Bgra) => {
let src = contiguous_bytes(pixels);
Ok(src
.chunks_exact(4)
.flat_map(|c| [c[2], c[1], c[0], c[3]])
.collect())
}
(zenpixels::ChannelType::U8, zenpixels::ChannelLayout::Rgb) => {
let src = contiguous_bytes(pixels);
Ok(src
.chunks_exact(3)
.flat_map(|c| [c[0], c[1], c[2], 255])
.collect())
}
(zenpixels::ChannelType::U8, zenpixels::ChannelLayout::Gray) => {
let src = contiguous_bytes(pixels);
Ok(src.iter().flat_map(|&g| [g, g, g, 255]).collect())
}
_ => Err(at!(PngError::InvalidInput(alloc::format!(
"APNG frame encoder: unsupported pixel format {:?}; \
supported formats are RGBA8, BGRA8, RGB8, and Gray8",
desc
)))),
}
}
}
impl zencodec::encode::AnimationFrameEncoder for PngAnimationFrameEncoder {
type Error = At<PngError>;
fn reject(op: zencodec::UnsupportedOperation) -> At<PngError> {
at!(PngError::from(op))
}
fn push_frame(
&mut self,
pixels: PixelSlice<'_>,
duration_ms: u32,
_stop: Option<&dyn enough::Stop>,
) -> Result<(), At<PngError>> {
let rgba = Self::pixels_to_rgba8(&pixels)?;
if let Some(ref limits) = self.limits {
limits
.check_frames(self.frames.len() as u32 + 1)
.map_err(|e| at!(PngError::LimitExceeded(alloc::format!("{e}"))))?;
let new_cumulative = self.cumulative_pixel_bytes + rgba.len() as u64;
limits
.check_memory(new_cumulative)
.map_err(|e| at!(PngError::LimitExceeded(alloc::format!("{e}"))))?;
}
self.cumulative_pixel_bytes += rgba.len() as u64;
self.frames.push(AccumulatedFrame {
pixels: rgba,
duration_ms,
});
Ok(())
}
fn finish(self, stop: Option<&dyn enough::Stop>) -> Result<EncodeOutput, At<PngError>> {
self.do_finish(stop)
}
}
impl PngAnimationFrameEncoder {
fn do_finish(self, stop: Option<&dyn enough::Stop>) -> Result<EncodeOutput, At<PngError>> {
let cancel: &dyn enough::Stop = stop.unwrap_or(&enough::Unstoppable);
cancel.check().map_err(|e| at!(PngError::from(e)))?;
if self.frames.is_empty() {
return Err(at!(PngError::InvalidInput(
"APNG frame encoder: no frames pushed".into(),
)));
}
let inputs: Vec<crate::encode::ApngFrameInput<'_>> = self
.frames
.iter()
.map(|f| {
crate::encode::ApngFrameInput {
pixels: &f.pixels,
delay_num: f.duration_ms.min(65535) as u16,
delay_den: 1000,
}
})
.collect();
let apng_config = crate::encode::ApngEncodeConfig {
encode: self.config.clone(),
num_plays: self.loop_count,
};
let timeout = std::time::Duration::from_millis(DEFAULT_TIMEOUT_MS);
let deadline = almost_enough::time::WithTimeout::new(enough::Unstoppable, timeout);
let data = crate::encode::encode_apng(
&inputs,
self.canvas_width,
self.canvas_height,
&apng_config,
self.metadata.as_ref(),
cancel,
&deadline,
)
.map_err(|e| e.decompose().0)?;
Ok(EncodeOutput::new(data, ImageFormat::Png))
}
}
#[derive(Clone, Debug)]
pub struct PngDecoderConfig {
limits: ResourceLimits,
}
impl PngDecoderConfig {
#[must_use]
pub fn new() -> Self {
Self {
limits: ResourceLimits::none()
.with_max_pixels(PngDecodeConfig::DEFAULT_MAX_PIXELS)
.with_max_memory(PngDecodeConfig::DEFAULT_MAX_MEMORY),
}
}
}
impl PngDecoderConfig {
#[must_use]
pub fn job_static(self) -> PngDecodeJob {
PngDecodeJob {
config: self,
stop: None,
limits: None,
policy: None,
start_frame_index: 0,
}
}
pub fn decode(&self, data: &[u8]) -> Result<DecodeOutput, At<PngError>> {
use zencodec::decode::{Decode, DecodeJob, DecoderConfig};
self.clone()
.job()
.decoder(Cow::Borrowed(data), &[])?
.decode()
}
pub fn probe(&self, data: &[u8]) -> Result<ImageInfo, At<PngError>> {
use zencodec::decode::{DecodeJob, DecoderConfig};
self.clone().job().probe(data)
}
pub fn probe_header(&self, data: &[u8]) -> Result<ImageInfo, At<PngError>> {
self.probe(data)
}
pub fn decode_into_rgb8(
&self,
data: &[u8],
dst: imgref::ImgRefMut<'_, Rgb<u8>>,
) -> Result<ImageInfo, At<PngError>> {
let mut dst: PixelSliceMut<'_> = PixelSliceMut::from(dst).erase();
let output = self.decode(data)?;
let info = output.info().clone();
let pixels = output.into_buffer();
let src = to_rgb8(pixels);
copy_rows_u8(&src, &mut dst);
Ok(info)
}
pub fn decode_into_rgb16(
&self,
data: &[u8],
dst: imgref::ImgRefMut<'_, Rgb<u16>>,
) -> Result<ImageInfo, At<PngError>> {
let mut dst: PixelSliceMut<'_> = PixelSliceMut::from(dst).erase();
let output = self.decode(data)?;
let info = output.info().clone();
let pixels = output.into_buffer();
decode_into_rgb16(pixels, &mut dst);
Ok(info)
}
pub fn decode_into_rgb_f32(
&self,
data: &[u8],
dst: imgref::ImgRefMut<'_, Rgb<f32>>,
) -> Result<ImageInfo, At<PngError>> {
let mut dst: PixelSliceMut<'_> = PixelSliceMut::from(dst).erase();
let output = self.decode(data)?;
let info = output.info().clone();
let pixels = output.into_buffer();
decode_into_rgb_f32(pixels, &mut dst);
Ok(info)
}
pub fn decode_into_rgba_f32(
&self,
data: &[u8],
dst: imgref::ImgRefMut<'_, Rgba<f32>>,
) -> Result<ImageInfo, At<PngError>> {
let mut dst: PixelSliceMut<'_> = PixelSliceMut::from(dst).erase();
let output = self.decode(data)?;
let info = output.info().clone();
let pixels = output.into_buffer();
decode_into_rgba_f32(pixels, &mut dst);
Ok(info)
}
pub fn decode_into_gray_f32(
&self,
data: &[u8],
dst: imgref::ImgRefMut<'_, Gray<f32>>,
) -> Result<ImageInfo, At<PngError>> {
let mut dst: PixelSliceMut<'_> = PixelSliceMut::from(dst).erase();
let output = self.decode(data)?;
let info = output.info().clone();
let pixels = output.into_buffer();
decode_into_gray_f32(pixels, &mut dst);
Ok(info)
}
}
impl Default for PngDecoderConfig {
fn default() -> Self {
Self::new()
}
}
static PNG_DECODE_CAPS: DecodeCapabilities = DecodeCapabilities::new()
.with_icc(true)
.with_exif(true)
.with_xmp(true)
.with_cicp(true)
.with_stop(true)
.with_animation(true)
.with_cheap_probe(true)
.with_native_gray(true)
.with_native_16bit(true)
.with_native_alpha(true)
.with_streaming(true)
.with_decode_into(true)
.with_enforces_max_pixels(true)
.with_enforces_max_memory(true)
.with_enforces_max_input_bytes(true);
impl zencodec::decode::DecoderConfig for PngDecoderConfig {
type Error = At<PngError>;
type Job<'a> = PngDecodeJob;
fn formats() -> &'static [ImageFormat] {
&[ImageFormat::Png]
}
fn supported_descriptors() -> &'static [PixelDescriptor] {
DECODE_DESCRIPTORS
}
fn capabilities() -> &'static DecodeCapabilities {
&PNG_DECODE_CAPS
}
fn job<'a>(self) -> Self::Job<'a> {
PngDecodeJob {
config: self,
stop: None,
limits: None,
policy: None,
start_frame_index: 0,
}
}
}
pub struct PngDecodeJob {
config: PngDecoderConfig,
stop: Option<zencodec::StopToken>,
limits: Option<ResourceLimits>,
policy: Option<zencodec::decode::DecodePolicy>,
start_frame_index: u32,
}
impl<'a> zencodec::decode::DecodeJob<'a> for PngDecodeJob {
type Error = At<PngError>;
type Dec = PngDecoder<'a>;
type StreamDec = PngStreamingDecoder<'a>;
type AnimationFrameDec = PngAnimationFrameDecoder;
fn with_stop(mut self, stop: zencodec::StopToken) -> Self {
self.stop = Some(stop);
self
}
fn with_limits(mut self, limits: ResourceLimits) -> Self {
self.limits = Some(limits);
self
}
fn with_policy(mut self, policy: zencodec::decode::DecodePolicy) -> Self {
self.policy = Some(policy);
self
}
fn with_start_frame_index(mut self, index: u32) -> Self {
self.start_frame_index = index;
self
}
fn probe(&self, data: &[u8]) -> Result<ImageInfo, At<PngError>> {
let info = crate::decode::probe(data)?;
let mut image_info = convert_info(&info);
if let Ok(probe) = crate::detect::probe(data) {
image_info = image_info.with_source_encoding_details(probe);
}
apply_policy_to_info(&mut image_info, self.policy.as_ref());
Ok(image_info)
}
fn output_info(&self, data: &[u8]) -> Result<OutputInfo, At<PngError>> {
let info = crate::decode::probe(data)?;
let intrinsic_alpha = info.color_type == 4 || info.color_type == 6;
let has_trns = info.has_alpha && !intrinsic_alpha;
let native_format = native_output_descriptor(info.color_type, info.bit_depth, has_trns);
Ok(
OutputInfo::full_decode(info.width, info.height, native_format)
.with_alpha(info.has_alpha),
)
}
fn decoder(
self,
data: Cow<'a, [u8]>,
preferred: &[PixelDescriptor],
) -> Result<PngDecoder<'a>, At<PngError>> {
Ok(PngDecoder {
config: self.config,
stop: self.stop,
limits: self.limits,
policy: self.policy,
data,
preferred: preferred.to_vec(),
})
}
fn streaming_decoder(
self,
data: Cow<'a, [u8]>,
preferred: &[PixelDescriptor],
) -> Result<PngStreamingDecoder<'a>, At<PngError>> {
PngStreamingDecoder::new(
data,
&self.config,
self.stop,
self.limits.as_ref(),
self.policy.as_ref(),
preferred,
)
}
fn animation_frame_decoder(
self,
data: Cow<'a, [u8]>,
preferred: &[PixelDescriptor],
) -> Result<PngAnimationFrameDecoder, At<PngError>> {
if let Some(ref policy) = self.policy
&& !policy.resolve_animation(true)
{
return Err(at!(PngError::InvalidInput(
"animation rejected by decode policy".into()
)));
}
let effective_limits = self.limits.as_ref().unwrap_or(&self.config.limits);
effective_limits
.check_input_size(data.len() as u64)
.map_err(|e| at!(PngError::LimitExceeded(alloc::format!("{e}"))))?;
PngAnimationFrameDecoder::new(
&data,
&self.config,
self.stop,
self.policy.as_ref(),
preferred,
self.start_frame_index,
)
}
fn push_decoder(
self,
data: Cow<'a, [u8]>,
sink: &mut dyn zencodec::decode::DecodeRowSink,
preferred: &[PixelDescriptor],
) -> Result<OutputInfo, At<PngError>> {
push_decoder_native(self, data, sink, preferred)
}
}
pub struct PngDecoder<'a> {
config: PngDecoderConfig,
stop: Option<zencodec::StopToken>,
limits: Option<ResourceLimits>,
policy: Option<zencodec::decode::DecodePolicy>,
data: Cow<'a, [u8]>,
preferred: Vec<PixelDescriptor>,
}
impl PngDecoder<'_> {
fn effective_config(&self) -> PngDecodeConfig {
let limits = self.limits.as_ref().unwrap_or(&self.config.limits);
let config = PngDecodeConfig {
max_pixels: limits.max_pixels,
max_memory_bytes: limits.max_memory_bytes,
skip_decompression_checksum: true,
skip_critical_chunk_crc: true,
};
apply_decode_policy(config, self.policy.as_ref())
}
}
impl zencodec::decode::Decode for PngDecoder<'_> {
type Error = At<PngError>;
fn decode(self) -> Result<DecodeOutput, At<PngError>> {
let cancel: &dyn enough::Stop = match self.stop {
Some(ref s) => s as &dyn enough::Stop,
None => &enough::Unstoppable,
};
cancel.check().map_err(|e| at!(PngError::from(e)))?;
check_progressive_policy(&self.data, self.policy.as_ref())?;
let effective_limits = self.limits.as_ref().unwrap_or(&self.config.limits);
effective_limits
.check_input_size(self.data.len() as u64)
.map_err(|e| at!(PngError::LimitExceeded(alloc::format!("{e}"))))?;
let probe_info = crate::decode::probe(&self.data)?;
effective_limits
.check_dimensions(probe_info.width, probe_info.height)
.map_err(|e| at!(PngError::LimitExceeded(alloc::format!("{e}"))))?;
let png_config = self.effective_config();
let result = crate::decode::decode(&self.data, &png_config, cancel)?;
let mut info = convert_info(&result.info);
apply_policy_to_info(&mut info, self.policy.as_ref());
let pixels = if self.preferred.is_empty() {
result.pixels
} else {
negotiate_and_convert(result.pixels, &self.preferred)
};
let mut output = DecodeOutput::new(pixels, info);
if let Ok(probe) = crate::detect::probe(&self.data) {
output = output.with_source_encoding_details(probe);
}
Ok(output)
}
}
fn native_output_descriptor(color_type: u8, bit_depth: u8, has_trns: bool) -> PixelDescriptor {
match (color_type, bit_depth, has_trns) {
(0, 16, false) => PixelDescriptor::GRAY16_SRGB,
(0, 16, true) => GrayAlpha16::DESCRIPTOR, (0, _, false) => PixelDescriptor::GRAY8_SRGB,
(0, _, true) => PixelDescriptor::RGBA8_SRGB,
(2, 16, false) => PixelDescriptor::RGB16_SRGB,
(2, 16, true) => PixelDescriptor::RGBA16_SRGB,
(2, 8, false) => PixelDescriptor::RGB8_SRGB,
(2, 8, true) => PixelDescriptor::RGBA8_SRGB,
(3, _, true) => PixelDescriptor::RGBA8_SRGB,
(3, _, false) => PixelDescriptor::RGB8_SRGB,
(4, 16, _) => GrayAlpha16::DESCRIPTOR, (4, 8, _) => PixelDescriptor::RGBA8_SRGB,
(6, 16, _) => PixelDescriptor::RGBA16_SRGB,
(6, 8, _) => PixelDescriptor::RGBA8_SRGB,
_ => PixelDescriptor::RGBA8_SRGB, }
}
fn push_decoder_native<'a>(
job: PngDecodeJob,
data: Cow<'a, [u8]>,
sink: &mut dyn zencodec::decode::DecodeRowSink,
preferred: &[PixelDescriptor],
) -> Result<OutputInfo, At<PngError>> {
use crate::decoder::postprocess::post_process_row;
use crate::decoder::row::RowDecoder;
let wrap_sink = |e: zencodec::decode::SinkError| -> At<PngError> {
at!(PngError::InvalidInput(alloc::format!("sink error: {e}")))
};
check_progressive_policy(&data, job.policy.as_ref())?;
let effective_limits = job.limits.as_ref().unwrap_or(&job.config.limits);
effective_limits
.check_input_size(data.len() as u64)
.map_err(|e| at!(PngError::LimitExceeded(alloc::format!("{e}"))))?;
if data.len() >= 29 && data[..8] == crate::chunk::PNG_SIGNATURE && data[28] == 1 {
return zencodec::helpers::copy_decode_to_sink(job, data, sink, preferred, |e| {
at!(PngError::InvalidInput(alloc::format!("sink error: {e}")))
});
}
let limits = job.limits.as_ref().unwrap_or(&job.config.limits);
let png_config = PngDecodeConfig {
max_pixels: limits.max_pixels,
max_memory_bytes: limits.max_memory_bytes,
skip_decompression_checksum: true,
skip_critical_chunk_crc: true,
};
let png_config = apply_decode_policy(png_config, job.policy.as_ref());
let cancel: &dyn enough::Stop = match &job.stop {
Some(s) => s,
None => &enough::Unstoppable,
};
cancel.check().map_err(|e| at!(PngError::from(e)))?;
let mut reader = RowDecoder::new(data, &png_config)?;
let ihdr = *reader.ihdr();
let has_trns = reader.ancillary().trns.is_some();
let w = ihdr.width;
let h = ihdr.height;
limits
.check_dimensions(w, h)
.map_err(|e| at!(PngError::LimitExceeded(alloc::format!("{e}"))))?;
let descriptor = native_output_descriptor(ihdr.color_type, ihdr.bit_depth, has_trns);
sink.begin(w, h, descriptor).map_err(wrap_sink)?;
let is_passthrough =
!has_trns && ihdr.bit_depth == 8 && (ihdr.color_type == 6 || ihdr.color_type == 2);
if is_passthrough {
let raw_row_bytes = ihdr.raw_row_bytes()?;
let mut dst = sink
.provide_next_buffer(0, h, w, descriptor)
.map_err(wrap_sink)?;
if h > 0 {
let zeros = alloc::vec![0u8; raw_row_bytes];
let row_slice = dst.row_mut(0);
match reader.next_raw_row_direct(&mut row_slice[..raw_row_bytes], &zeros) {
Some(Ok(())) => {}
Some(Err(e)) => return Err(e),
None => {
return Err(at!(PngError::Decode(
"unexpected end of image data at row 0".into()
)));
}
}
cancel.check().map_err(|e| at!(PngError::from(e)))?;
}
let mut prev_buf = alloc::vec![0u8; raw_row_bytes];
for y in 1..h {
prev_buf.copy_from_slice(&dst.row_mut(y - 1)[..raw_row_bytes]);
let row_slice = dst.row_mut(y);
match reader.next_raw_row_direct(&mut row_slice[..raw_row_bytes], &prev_buf) {
Some(Ok(())) => {}
Some(Err(e)) => return Err(e),
None => {
return Err(at!(PngError::Decode(alloc::format!(
"unexpected end of image data at row {y}"
))));
}
}
cancel.check().map_err(|e| at!(PngError::from(e)))?;
}
drop(dst);
} else {
let out_bpp = descriptor.bytes_per_pixel();
let out_row_bytes = w as usize * out_bpp;
let mut row_buf = Vec::new();
let mut raw_copy = alloc::vec![0u8; ihdr.raw_row_bytes()?];
let mut dst = sink
.provide_next_buffer(0, h, w, descriptor)
.map_err(wrap_sink)?;
let mut y = 0u32;
while let Some(result) = reader.next_raw_row() {
let raw = result?;
cancel.check().map_err(|e| at!(PngError::from(e)))?;
raw_copy[..raw.len()].copy_from_slice(raw);
post_process_row(
&raw_copy[..ihdr.raw_row_bytes()?],
&ihdr,
reader.ancillary(),
&mut row_buf,
);
let sink_row = dst.row_mut(y);
let copy_len = out_row_bytes.min(row_buf.len()).min(sink_row.len());
sink_row[..copy_len].copy_from_slice(&row_buf[..copy_len]);
y += 1;
}
drop(dst);
}
reader.finish_metadata();
sink.finish().map_err(wrap_sink)?;
let has_alpha = descriptor.has_alpha();
Ok(OutputInfo::full_decode(w, h, descriptor).with_alpha(has_alpha))
}
pub struct PngStreamingDecoder<'a> {
reader: crate::decoder::row::RowDecoder<'a>,
info: ImageInfo,
descriptor: PixelDescriptor,
row_buf: Vec<u8>,
raw_copy: Vec<u8>,
y: u32,
width: u32,
height: u32,
is_passthrough: bool,
stop: Option<zencodec::StopToken>,
}
impl<'a> PngStreamingDecoder<'a> {
fn new(
data: Cow<'a, [u8]>,
config: &PngDecoderConfig,
stop: Option<zencodec::StopToken>,
limits: Option<&ResourceLimits>,
policy: Option<&zencodec::decode::DecodePolicy>,
_preferred: &[PixelDescriptor],
) -> Result<Self, At<PngError>> {
check_progressive_policy(&data, policy)?;
if data.len() >= 29 && data[..8] == crate::chunk::PNG_SIGNATURE && data[28] == 1 {
return Err(at!(PngError::from(
zencodec::UnsupportedOperation::RowLevelDecode
)));
}
let cancel: &dyn enough::Stop = match &stop {
Some(s) => s,
None => &enough::Unstoppable,
};
cancel.check().map_err(|e| at!(PngError::from(e)))?;
let effective_limits = limits.unwrap_or(&config.limits);
effective_limits
.check_input_size(data.len() as u64)
.map_err(|e| at!(PngError::LimitExceeded(alloc::format!("{e}"))))?;
let png_config = PngDecodeConfig {
max_pixels: effective_limits.max_pixels,
max_memory_bytes: effective_limits.max_memory_bytes,
skip_decompression_checksum: true,
skip_critical_chunk_crc: true,
};
let png_config = apply_decode_policy(png_config, policy);
let probe_info = crate::decode::probe(&data)?;
let mut info = convert_info(&probe_info);
apply_policy_to_info(&mut info, policy);
let reader = crate::decoder::row::RowDecoder::new(data, &png_config)?;
let ihdr = *reader.ihdr();
let has_trns = reader.ancillary().trns.is_some();
let w = ihdr.width;
let h = ihdr.height;
effective_limits
.check_dimensions(w, h)
.map_err(|e| at!(PngError::LimitExceeded(alloc::format!("{e}"))))?;
let descriptor = native_output_descriptor(ihdr.color_type, ihdr.bit_depth, has_trns);
let is_passthrough =
!has_trns && ihdr.bit_depth == 8 && (ihdr.color_type == 6 || ihdr.color_type == 2);
let raw_row_bytes = ihdr.raw_row_bytes()?;
let out_row_bytes = w as usize * descriptor.bytes_per_pixel();
Ok(Self {
reader,
info,
descriptor,
row_buf: alloc::vec![0u8; out_row_bytes],
raw_copy: alloc::vec![0u8; raw_row_bytes],
y: 0,
width: w,
height: h,
is_passthrough,
stop,
})
}
}
impl zencodec::decode::StreamingDecode for PngStreamingDecoder<'_> {
type Error = At<PngError>;
fn next_batch(&mut self) -> Result<Option<(u32, PixelSlice<'_>)>, At<PngError>> {
use crate::decoder::postprocess::post_process_row;
if self.y >= self.height {
return Ok(None);
}
if let Some(ref stop) = self.stop {
let cancel: &dyn enough::Stop = stop;
cancel.check().map_err(|e| at!(PngError::from(e)))?;
}
let raw = match self.reader.next_raw_row() {
Some(Ok(row)) => row,
Some(Err(e)) => return Err(e),
None => return Ok(None),
};
let y = self.y;
self.y += 1;
if self.is_passthrough {
let copy_len = raw.len().min(self.row_buf.len());
self.row_buf[..copy_len].copy_from_slice(&raw[..copy_len]);
} else {
self.raw_copy[..raw.len()].copy_from_slice(raw);
let raw_len = self.reader.ihdr().raw_row_bytes()?;
let mut tmp = core::mem::take(&mut self.row_buf);
post_process_row(
&self.raw_copy[..raw_len],
self.reader.ihdr(),
self.reader.ancillary(),
&mut tmp,
);
self.row_buf = tmp;
}
let stride = self.width as usize * self.descriptor.bytes_per_pixel();
let slice = PixelSlice::new(
&self.row_buf[..stride],
self.width,
1,
stride,
self.descriptor,
)
.map_err(|e| at!(PngError::InvalidInput(alloc::format!("pixel slice: {e}"))))?;
Ok(Some((y, slice)))
}
fn info(&self) -> &ImageInfo {
&self.info
}
}
pub struct PngAnimationFrameDecoder {
file_data: Vec<u8>,
info: ImageInfo,
decoder_state: crate::decoder::apng::ApngDecoderState,
preferred: Vec<PixelDescriptor>,
canvas: Option<PixelBuffer>,
stop: Option<zencodec::StopToken>,
start_frame_index: u32,
frames_decoded: u32,
}
impl PngAnimationFrameDecoder {
fn new(
data: &[u8],
config: &PngDecoderConfig,
stop: Option<zencodec::StopToken>,
policy: Option<&zencodec::decode::DecodePolicy>,
preferred: &[PixelDescriptor],
start_frame_index: u32,
) -> Result<Self, At<PngError>> {
let probe_info = crate::decode::probe(data)?;
let mut image_info = convert_info(&probe_info);
apply_policy_to_info(&mut image_info, policy);
let decode_config = PngDecodeConfig {
max_pixels: config.limits.max_pixels,
max_memory_bytes: config.limits.max_memory_bytes,
skip_decompression_checksum: true,
skip_critical_chunk_crc: true,
};
let decoder = crate::decoder::apng::ApngDecoder::new(data, &decode_config)?;
let decoder_state = decoder.save_state();
Ok(Self {
file_data: data.to_vec(),
info: image_info,
decoder_state,
preferred: preferred.to_vec(),
canvas: None,
stop,
start_frame_index,
frames_decoded: 0,
})
}
}
impl zencodec::decode::AnimationFrameDecoder for PngAnimationFrameDecoder {
type Error = At<PngError>;
fn wrap_sink_error(err: zencodec::decode::SinkError) -> At<PngError> {
at!(PngError::InvalidInput(alloc::format!("sink error: {err}")))
}
fn info(&self) -> &ImageInfo {
&self.info
}
fn frame_count(&self) -> Option<u32> {
Some(self.decoder_state.num_frames)
}
fn loop_count(&self) -> Option<u32> {
Some(self.decoder_state.num_plays)
}
fn render_next_frame_to_sink(
&mut self,
stop: Option<&dyn enough::Stop>,
sink: &mut dyn zencodec::decode::DecodeRowSink,
) -> Result<Option<OutputInfo>, At<PngError>> {
zencodec::helpers::copy_frame_to_sink(self, stop, sink)
}
fn render_next_frame(
&mut self,
stop: Option<&dyn enough::Stop>,
) -> Result<Option<AnimationFrame<'_>>, At<PngError>> {
let cancel: &dyn enough::Stop = if let Some(s) = stop {
s
} else if let Some(ref s) = self.stop {
s as &dyn enough::Stop
} else {
&enough::Unstoppable
};
loop {
let mut decoder = crate::decoder::apng::ApngDecoder::from_state(
&self.file_data,
self.decoder_state.clone(),
);
let raw = match decoder.next_frame(cancel)? {
Some(f) => f,
None => return Ok(None),
};
let idx = self.decoder_state.current_frame;
self.decoder_state = decoder.save_state();
self.frames_decoded += 1;
if idx < self.start_frame_index {
continue;
}
let delay_ms = raw.fctl.delay_ms();
let pixels = if self.preferred.is_empty() {
raw.pixels
} else {
negotiate_and_convert(raw.pixels, &self.preferred)
};
self.canvas = Some(pixels);
let canvas = self.canvas.as_ref().unwrap();
let pixel_slice = canvas.as_slice();
let frame = AnimationFrame::new(pixel_slice, delay_ms, idx);
return Ok(Some(frame));
}
}
}
use rgb::{Gray, Rgb, Rgba};
use zenpixels::{ChannelLayout, ChannelType, GrayAlpha16, PixelBuffer};
use zenpixels_convert::{PixelBufferConvertExt as _, PixelBufferConvertTypedExt as _};
fn negotiate_and_convert(pixels: PixelBuffer, preferred: &[PixelDescriptor]) -> PixelBuffer {
let native_desc = pixels.descriptor();
let target = zencodec::decode::negotiate_pixel_format(preferred, DECODE_DESCRIPTORS);
let Some(target) = target else {
return pixels;
};
if native_desc == target {
return pixels;
}
match pixels.convert_to(target) {
Ok(converted) => converted,
Err(_) => pixels, }
}
#[allow(unused_imports)]
use zenpixels_convert::PixelBufferConvertTypedExt as _;
fn to_rgb8(pixels: PixelBuffer) -> imgref::ImgVec<Rgb<u8>> {
let converted = pixels.to_rgb8();
let w = converted.width() as usize;
let h = converted.height() as usize;
let img = converted.as_imgref();
let buf: Vec<Rgb<u8>> = img.pixels().collect();
imgref::ImgVec::new(buf, w, h)
}
fn to_rgba8(pixels: PixelBuffer) -> imgref::ImgVec<Rgba<u8>> {
let converted = pixels.to_rgba8();
let w = converted.width() as usize;
let h = converted.height() as usize;
let img = converted.as_imgref();
let buf: Vec<Rgba<u8>> = img.pixels().collect();
imgref::ImgVec::new(buf, w, h)
}
fn to_gray8(pixels: PixelBuffer) -> imgref::ImgVec<Gray<u8>> {
let converted = pixels.to_gray8();
let w = converted.width() as usize;
let h = converted.height() as usize;
let img = converted.as_imgref();
let buf: Vec<Gray<u8>> = img.pixels().collect();
imgref::ImgVec::new(buf, w, h)
}
fn to_bgra8(pixels: PixelBuffer) -> imgref::ImgVec<rgb::alt::BGRA<u8>> {
let converted = pixels.to_bgra8();
let w = converted.width() as usize;
let h = converted.height() as usize;
let img = converted.as_imgref();
let buf: Vec<rgb::alt::BGRA<u8>> = img.pixels().collect();
imgref::ImgVec::new(buf, w, h)
}
fn convert_info(info: &crate::decode::PngInfo) -> ImageInfo {
let mut zi = ImageInfo::new(info.width, info.height, ImageFormat::Png);
if info.has_alpha {
zi = zi.with_alpha(true);
}
zi = zi.with_sequence(info.sequence.clone());
zi = zi.with_bit_depth(info.bit_depth);
let channel_count = match info.color_type {
0 => 1,
2 => 3,
3 => 1,
4 => 2,
6 => 4,
_ => 1,
};
zi = zi.with_channel_count(channel_count);
if let (Some(ppx), Some(ppy)) = (info.pixels_per_unit_x, info.pixels_per_unit_y) {
let unit = match info.phys_unit {
Some(crate::decode::PhysUnit::Meter) => zencodec::ResolutionUnit::Meter,
_ => zencodec::ResolutionUnit::Unknown,
};
zi = zi.with_resolution(zencodec::Resolution {
x: ppx as f64,
y: ppy as f64,
unit,
});
}
if let Some(ref icc) = info.icc_profile {
zi = zi.with_icc_profile(icc.clone());
}
if let Some(ref exif) = info.exif {
zi = zi.with_exif(exif.clone());
}
if let Some(ref xmp) = info.xmp {
zi = zi.with_xmp(xmp.clone());
}
if let Some(cicp) = info.cicp {
zi = zi.with_cicp(cicp);
}
if let Some(clli) = info.content_light_level {
zi = zi.with_content_light_level(clli);
}
if let Some(mdcv) = info.mastering_display {
zi = zi.with_mastering_display(mdcv);
}
zi = zi.with_progressive(info.interlaced);
zi
}
fn apply_encode_policy(
metadata: Option<&Metadata>,
policy: Option<&zencodec::encode::EncodePolicy>,
) -> Option<Metadata> {
let meta = metadata?;
let Some(policy) = policy else {
return Some(meta.clone());
};
let mut filtered = meta.clone();
if policy.embed_icc == Some(false) {
filtered.icc_profile = None;
}
if policy.embed_exif == Some(false) {
filtered.exif = None;
}
if policy.embed_xmp == Some(false) {
filtered.xmp = None;
}
Some(filtered)
}
fn apply_decode_policy(
mut config: PngDecodeConfig,
policy: Option<&zencodec::decode::DecodePolicy>,
) -> PngDecodeConfig {
let Some(policy) = policy else {
return config;
};
if policy.strict == Some(true) {
config.skip_critical_chunk_crc = false;
config.skip_decompression_checksum = false;
}
config
}
fn apply_policy_to_info(info: &mut ImageInfo, policy: Option<&zencodec::decode::DecodePolicy>) {
let Some(policy) = policy else {
return;
};
if !policy.resolve_icc(true) {
info.source_color.icc_profile = None;
}
if !policy.resolve_exif(true) {
info.embedded_metadata.exif = None;
}
if !policy.resolve_xmp(true) {
info.embedded_metadata.xmp = None;
}
}
fn check_progressive_policy(
data: &[u8],
policy: Option<&zencodec::decode::DecodePolicy>,
) -> Result<(), At<PngError>> {
let Some(policy) = policy else {
return Ok(());
};
if policy.resolve_progressive(true) {
return Ok(());
}
if data.len() >= 29 && data[..8] == crate::chunk::PNG_SIGNATURE && data[28] == 1 {
return Err(at!(PngError::InvalidInput(
"interlaced (progressive) PNG rejected by decode policy".into()
)));
}
Ok(())
}
fn contiguous_bytes<'a>(pixels: &'a PixelSlice<'a>) -> alloc::borrow::Cow<'a, [u8]> {
pixels.contiguous_bytes()
}
fn pixel_format_to_png(
format: zenpixels::PixelFormat,
) -> Option<(crate::encode::ColorType, crate::encode::BitDepth)> {
use zenpixels::PixelFormat;
match format {
PixelFormat::Rgb8 => Some((
crate::encode::ColorType::Rgb,
crate::encode::BitDepth::Eight,
)),
PixelFormat::Rgba8 => Some((
crate::encode::ColorType::Rgba,
crate::encode::BitDepth::Eight,
)),
PixelFormat::Gray8 => Some((
crate::encode::ColorType::Grayscale,
crate::encode::BitDepth::Eight,
)),
PixelFormat::Rgb16 => Some((
crate::encode::ColorType::Rgb,
crate::encode::BitDepth::Sixteen,
)),
PixelFormat::Rgba16 => Some((
crate::encode::ColorType::Rgba,
crate::encode::BitDepth::Sixteen,
)),
PixelFormat::Gray16 => Some((
crate::encode::ColorType::Grayscale,
crate::encode::BitDepth::Sixteen,
)),
PixelFormat::RgbF32 => Some((
crate::encode::ColorType::Rgb,
crate::encode::BitDepth::Eight,
)),
PixelFormat::RgbaF32 => Some((
crate::encode::ColorType::Rgba,
crate::encode::BitDepth::Eight,
)),
PixelFormat::GrayF32 => Some((
crate::encode::ColorType::Grayscale,
crate::encode::BitDepth::Eight,
)),
PixelFormat::Bgra8 => Some((
crate::encode::ColorType::Rgba,
crate::encode::BitDepth::Eight,
)),
_ => None,
}
}
impl TrueStreamingState {
#[allow(clippy::too_many_arguments)]
fn new(
width: u32,
height: u32,
color_type: crate::encode::ColorType,
bit_depth: crate::encode::BitDepth,
row_bytes: usize,
metadata: Option<&Metadata>,
policy: Option<&zencodec::encode::EncodePolicy>,
config: &EncodeConfig,
) -> Result<Self, At<PngError>> {
use crate::chunk::{PNG_SIGNATURE, write::write_chunk};
use crate::encoder::{PngWriteMetadata, metadata_size_estimate, write_all_metadata};
let filtered_row = row_bytes + 1; let total_filtered = filtered_row * height as usize;
let num_blocks = if total_filtered == 0 {
1
} else {
total_filtered.div_ceil(65535)
};
let idat_data_len = 2 + 5 * num_blocks + total_filtered + 4;
if idat_data_len > u32::MAX as usize {
return Err(at!(PngError::LimitExceeded(
"image too large for single IDAT chunk at effort 0".into(),
)));
}
let effective_meta = apply_encode_policy(metadata, policy);
let mut write_meta = PngWriteMetadata::from_metadata(effective_meta.as_ref());
write_meta.source_gamma = config.source_gamma;
write_meta.srgb_intent = config.srgb_intent;
write_meta.chromaticities = config.chromaticities;
write_meta.pixels_per_unit_x = config.pixels_per_unit_x;
write_meta.pixels_per_unit_y = config.pixels_per_unit_y;
write_meta.phys_unit = config.phys_unit;
write_meta.text_chunks.clone_from(&config.text_chunks);
write_meta.last_modified = config.last_modified;
let est = 8 + 25 + (12 + idat_data_len) + 12 + metadata_size_estimate(&write_meta);
let mut output = Vec::with_capacity(est);
output.extend_from_slice(&PNG_SIGNATURE);
let mut ihdr = [0u8; 13];
ihdr[0..4].copy_from_slice(&width.to_be_bytes());
ihdr[4..8].copy_from_slice(&height.to_be_bytes());
ihdr[8] = match bit_depth {
crate::encode::BitDepth::Eight => 8,
crate::encode::BitDepth::Sixteen => 16,
};
ihdr[9] = match color_type {
crate::encode::ColorType::Grayscale => 0,
crate::encode::ColorType::Rgb => 2,
crate::encode::ColorType::GrayscaleAlpha => 4,
crate::encode::ColorType::Rgba => 6,
};
write_chunk(&mut output, b"IHDR", &ihdr);
write_all_metadata(&mut output, &write_meta)?;
let idat_len_pos = output.len();
output.extend_from_slice(&(idat_data_len as u32).to_be_bytes());
output.extend_from_slice(b"IDAT");
output.extend_from_slice(&[0x78, 0x01]);
Ok(Self {
output,
convert_buf: vec![0u8; row_bytes],
row_bytes,
rows_pushed: 0,
adler: 1,
idat_len_pos,
block_remaining: 0,
filtered_remaining: total_filtered,
})
}
fn push_converted_row(&mut self) {
if self.block_remaining == 0 {
let block_len = self.filtered_remaining.min(65535);
let is_final = block_len >= self.filtered_remaining;
write_stored_block_header(&mut self.output, block_len, is_final);
self.block_remaining = block_len;
}
self.output.push(0u8);
self.block_remaining -= 1;
self.filtered_remaining -= 1;
let mut pos = 0;
let row_bytes = self.row_bytes;
while pos < row_bytes {
if self.block_remaining == 0 {
let block_len = self.filtered_remaining.min(65535);
let is_final = block_len >= self.filtered_remaining;
write_stored_block_header(&mut self.output, block_len, is_final);
self.block_remaining = block_len;
}
let n = (row_bytes - pos).min(self.block_remaining);
self.output
.extend_from_slice(&self.convert_buf[pos..pos + n]);
pos += n;
self.block_remaining -= n;
self.filtered_remaining -= n;
}
let s1 = self.adler & 0xFFFF;
let s2 = ((self.adler >> 16) + s1) % 65521;
self.adler = (s2 << 16) | s1;
self.adler = zenflate::adler32(self.adler, &self.convert_buf[..row_bytes]);
self.rows_pushed += 1;
}
fn push_raw_row(&mut self, row: &[u8]) {
debug_assert_eq!(row.len(), self.row_bytes);
if self.block_remaining == 0 {
let block_len = self.filtered_remaining.min(65535);
let is_final = block_len >= self.filtered_remaining;
write_stored_block_header(&mut self.output, block_len, is_final);
self.block_remaining = block_len;
}
self.output.push(0u8);
self.block_remaining -= 1;
self.filtered_remaining -= 1;
let mut data = row;
while !data.is_empty() {
if self.block_remaining == 0 {
let block_len = self.filtered_remaining.min(65535);
let is_final = block_len >= self.filtered_remaining;
write_stored_block_header(&mut self.output, block_len, is_final);
self.block_remaining = block_len;
}
let n = data.len().min(self.block_remaining);
self.output.extend_from_slice(&data[..n]);
data = &data[n..];
self.block_remaining -= n;
self.filtered_remaining -= n;
}
let s1 = self.adler & 0xFFFF;
let s2 = ((self.adler >> 16) + s1) % 65521;
self.adler = (s2 << 16) | s1;
self.adler = zenflate::adler32(self.adler, row);
self.rows_pushed += 1;
}
fn finish(mut self) -> Vec<u8> {
self.output.extend_from_slice(&self.adler.to_be_bytes());
let crc_start = self.idat_len_pos + 4;
let crc = zenflate::crc32(0, &self.output[crc_start..]);
self.output.extend_from_slice(&crc.to_be_bytes());
crate::chunk::write::write_chunk(&mut self.output, b"IEND", &[]);
self.output
}
}
impl PreFilteredState {
#[allow(clippy::too_many_arguments)]
fn new(
width: u32,
height: u32,
color_type: crate::encode::ColorType,
bit_depth: crate::encode::BitDepth,
row_bytes: usize,
bpp: usize,
metadata: Option<&Metadata>,
policy: Option<&zencodec::encode::EncodePolicy>,
config: &EncodeConfig,
) -> Result<Self, At<PngError>> {
use crate::chunk::{PNG_SIGNATURE, write::write_chunk};
use crate::encoder::{PngWriteMetadata, metadata_size_estimate, write_all_metadata};
let effective_meta = apply_encode_policy(metadata, policy);
let mut write_meta = PngWriteMetadata::from_metadata(effective_meta.as_ref());
write_meta.source_gamma = config.source_gamma;
write_meta.srgb_intent = config.srgb_intent;
write_meta.chromaticities = config.chromaticities;
write_meta.pixels_per_unit_x = config.pixels_per_unit_x;
write_meta.pixels_per_unit_y = config.pixels_per_unit_y;
write_meta.phys_unit = config.phys_unit;
write_meta.text_chunks.clone_from(&config.text_chunks);
write_meta.last_modified = config.last_modified;
let est = 8 + 25 + metadata_size_estimate(&write_meta);
let mut preamble = Vec::with_capacity(est);
preamble.extend_from_slice(&PNG_SIGNATURE);
let mut ihdr = [0u8; 13];
ihdr[0..4].copy_from_slice(&width.to_be_bytes());
ihdr[4..8].copy_from_slice(&height.to_be_bytes());
ihdr[8] = match bit_depth {
crate::encode::BitDepth::Eight => 8,
crate::encode::BitDepth::Sixteen => 16,
};
ihdr[9] = match color_type {
crate::encode::ColorType::Grayscale => 0,
crate::encode::ColorType::Rgb => 2,
crate::encode::ColorType::GrayscaleAlpha => 4,
crate::encode::ColorType::Rgba => 6,
};
write_chunk(&mut preamble, b"IHDR", &ihdr);
write_all_metadata(&mut preamble, &write_meta)?;
let filtered_row = row_bytes + 1;
let total_filtered = filtered_row * height as usize;
Ok(Self {
preamble,
filtered_data: Vec::with_capacity(total_filtered),
prev_row: vec![0u8; row_bytes],
convert_buf: vec![0u8; row_bytes],
filter_type: 4, bpp,
row_bytes,
rows_pushed: 0,
color_type,
bit_depth,
zenflate_effort: 1, })
}
fn push_raw_row(&mut self, row: &[u8]) {
debug_assert_eq!(row.len(), self.row_bytes);
self.filtered_data.push(self.filter_type);
let start = self.filtered_data.len();
self.filtered_data.resize(start + self.row_bytes, 0);
crate::encoder::filter::apply_filter(
self.filter_type,
row,
&self.prev_row,
self.bpp,
&mut self.filtered_data[start..],
);
self.prev_row.copy_from_slice(row);
self.rows_pushed += 1;
}
fn push_converted_row(&mut self) {
self.filtered_data.push(self.filter_type);
let start = self.filtered_data.len();
let row_bytes = self.row_bytes;
self.filtered_data.resize(start + row_bytes, 0);
crate::encoder::filter::apply_filter(
self.filter_type,
&self.convert_buf[..row_bytes],
&self.prev_row,
self.bpp,
&mut self.filtered_data[start..],
);
self.prev_row
.copy_from_slice(&self.convert_buf[..row_bytes]);
self.rows_pushed += 1;
}
fn finish(self, cancel: &dyn enough::Stop) -> Result<Vec<u8>, At<PngError>> {
use crate::chunk::write::write_chunk;
let Self {
preamble,
filtered_data,
zenflate_effort,
..
} = self;
let level = zenflate::CompressionLevel::new(zenflate_effort);
let mut compressor = zenflate::Compressor::new(level);
let bound = zenflate::Compressor::zlib_compress_bound(filtered_data.len());
let mut compressed = vec![0u8; bound];
let len = compressor
.zlib_compress(&filtered_data, &mut compressed, cancel)
.map_err(|e| match e {
zenflate::CompressionError::Stopped(reason) => PngError::Stopped(reason),
other => PngError::InvalidInput(alloc::format!("compression failed: {other}")),
})?;
compressed.truncate(len);
drop(filtered_data);
let mut out = preamble;
out.reserve(12 + compressed.len() + 12);
write_chunk(&mut out, b"IDAT", &compressed);
write_chunk(&mut out, b"IEND", &[]);
Ok(out)
}
}
fn write_stored_block_header(out: &mut Vec<u8>, len: usize, is_final: bool) {
out.push(if is_final { 1 } else { 0 });
out.push((len & 0xFF) as u8);
out.push(((len >> 8) & 0xFF) as u8);
let nlen = !len & 0xFFFF;
out.push((nlen & 0xFF) as u8);
out.push(((nlen >> 8) & 0xFF) as u8);
}
fn copy_rows_u8<P: Copy>(src: &imgref::ImgVec<P>, dst: &mut PixelSliceMut<'_>)
where
[P]: rgb::ComponentBytes<u8>,
{
use rgb::ComponentBytes;
for y in 0..src.height().min(dst.rows() as usize) {
let src_row = &src.buf()[y * src.stride()..][..src.width()];
let src_bytes = src_row.as_bytes();
let dst_row = dst.row_mut(y as u32);
let n = src_bytes.len().min(dst_row.len());
dst_row[..n].copy_from_slice(&src_bytes[..n]);
}
}
fn decode_into_rgb_f32(pixels: PixelBuffer, dst: &mut PixelSliceMut<'_>) {
use linear_srgb::default::srgb_u8_to_linear;
let src = to_rgb8(pixels);
for y in 0..src.height().min(dst.rows() as usize) {
let src_row = &src.buf()[y * src.stride()..][..src.width()];
let dst_row = dst.row_mut(y as u32);
for (i, s) in src_row.iter().enumerate() {
let offset = i * 12;
if offset + 12 > dst_row.len() {
break;
}
let r = srgb_u8_to_linear(s.r);
let g = srgb_u8_to_linear(s.g);
let b = srgb_u8_to_linear(s.b);
dst_row[offset..offset + 4].copy_from_slice(&r.to_ne_bytes());
dst_row[offset + 4..offset + 8].copy_from_slice(&g.to_ne_bytes());
dst_row[offset + 8..offset + 12].copy_from_slice(&b.to_ne_bytes());
}
}
}
fn decode_into_rgba_f32(pixels: PixelBuffer, dst: &mut PixelSliceMut<'_>) {
use linear_srgb::default::srgb_u8_to_linear;
let src = to_rgba8(pixels);
for y in 0..src.height().min(dst.rows() as usize) {
let src_row = &src.buf()[y * src.stride()..][..src.width()];
let dst_row = dst.row_mut(y as u32);
for (i, s) in src_row.iter().enumerate() {
let offset = i * 16;
if offset + 16 > dst_row.len() {
break;
}
let r = srgb_u8_to_linear(s.r);
let g = srgb_u8_to_linear(s.g);
let b = srgb_u8_to_linear(s.b);
dst_row[offset..offset + 4].copy_from_slice(&r.to_ne_bytes());
dst_row[offset + 4..offset + 8].copy_from_slice(&g.to_ne_bytes());
dst_row[offset + 8..offset + 12].copy_from_slice(&b.to_ne_bytes());
dst_row[offset + 12..offset + 16].copy_from_slice(&(s.a as f32 / 255.0).to_ne_bytes());
}
}
}
fn decode_into_gray_f32(pixels: PixelBuffer, dst: &mut PixelSliceMut<'_>) {
use linear_srgb::default::srgb_u8_to_linear;
let src = to_gray8(pixels);
for y in 0..src.height().min(dst.rows() as usize) {
let src_row = &src.buf()[y * src.stride()..][..src.width()];
let dst_row = dst.row_mut(y as u32);
for (i, s) in src_row.iter().enumerate() {
let offset = i * 4;
if offset + 4 > dst_row.len() {
break;
}
let v = srgb_u8_to_linear(s.value());
dst_row[offset..offset + 4].copy_from_slice(&v.to_ne_bytes());
}
}
}
fn native_to_be_16(native: &[u8]) -> Vec<u8> {
if cfg!(target_endian = "big") {
return native.to_vec();
}
let mut out = native.to_vec();
for chunk in out.chunks_exact_mut(2) {
chunk.swap(0, 1);
}
out
}
fn to_rgb16(pixels: PixelBuffer) -> imgref::ImgVec<Rgb<u16>> {
let desc = pixels.descriptor();
let w = pixels.width() as usize;
let h = pixels.height() as usize;
match (desc.channel_type(), desc.layout()) {
(ChannelType::U16, ChannelLayout::Rgb) => {
let img = pixels.try_as_imgref::<Rgb<u16>>().unwrap();
let buf: Vec<Rgb<u16>> = img.pixels().collect();
imgref::ImgVec::new(buf, w, h)
}
(ChannelType::U16, ChannelLayout::Rgba) => {
let img = pixels.try_as_imgref::<Rgba<u16>>().unwrap();
let buf: Vec<Rgb<u16>> = img
.pixels()
.map(|p| Rgb {
r: p.r,
g: p.g,
b: p.b,
})
.collect();
imgref::ImgVec::new(buf, w, h)
}
(ChannelType::U16, ChannelLayout::Gray) => {
let img = pixels.try_as_imgref::<Gray<u16>>().unwrap();
let buf: Vec<Rgb<u16>> = img
.pixels()
.map(|p| {
let v = p.value();
Rgb { r: v, g: v, b: v }
})
.collect();
imgref::ImgVec::new(buf, w, h)
}
(ChannelType::U16, ChannelLayout::GrayAlpha) => {
let img = pixels.try_as_imgref::<GrayAlpha16>().unwrap();
let buf: Vec<Rgb<u16>> = img
.pixels()
.map(|p| {
let v = p.v;
Rgb { r: v, g: v, b: v }
})
.collect();
imgref::ImgVec::new(buf, w, h)
}
_ => {
let rgb8 = to_rgb8(pixels);
let buf: Vec<Rgb<u16>> = rgb8
.into_buf()
.into_iter()
.map(|p| Rgb {
r: p.r as u16 * 257,
g: p.g as u16 * 257,
b: p.b as u16 * 257,
})
.collect();
imgref::ImgVec::new(buf, w, h)
}
}
}
fn to_rgba16(pixels: PixelBuffer) -> imgref::ImgVec<Rgba<u16>> {
let desc = pixels.descriptor();
let w = pixels.width() as usize;
let h = pixels.height() as usize;
match (desc.channel_type(), desc.layout()) {
(ChannelType::U16, ChannelLayout::Rgba) => {
let img = pixels.try_as_imgref::<Rgba<u16>>().unwrap();
let buf: Vec<Rgba<u16>> = img.pixels().collect();
imgref::ImgVec::new(buf, w, h)
}
(ChannelType::U16, ChannelLayout::Rgb) => {
let img = pixels.try_as_imgref::<Rgb<u16>>().unwrap();
let buf: Vec<Rgba<u16>> = img
.pixels()
.map(|p| Rgba {
r: p.r,
g: p.g,
b: p.b,
a: 65535,
})
.collect();
imgref::ImgVec::new(buf, w, h)
}
(ChannelType::U16, ChannelLayout::Gray) => {
let img = pixels.try_as_imgref::<Gray<u16>>().unwrap();
let buf: Vec<Rgba<u16>> = img
.pixels()
.map(|p| {
let v = p.value();
Rgba {
r: v,
g: v,
b: v,
a: 65535,
}
})
.collect();
imgref::ImgVec::new(buf, w, h)
}
(ChannelType::U16, ChannelLayout::GrayAlpha) => {
let img = pixels.try_as_imgref::<GrayAlpha16>().unwrap();
let buf: Vec<Rgba<u16>> = img
.pixels()
.map(|p| Rgba {
r: p.v,
g: p.v,
b: p.v,
a: p.a,
})
.collect();
imgref::ImgVec::new(buf, w, h)
}
_ => {
let rgba8 = to_rgba8(pixels);
let buf: Vec<Rgba<u16>> = rgba8
.into_buf()
.into_iter()
.map(|p| Rgba {
r: p.r as u16 * 257,
g: p.g as u16 * 257,
b: p.b as u16 * 257,
a: p.a as u16 * 257,
})
.collect();
imgref::ImgVec::new(buf, w, h)
}
}
}
fn to_gray16(pixels: PixelBuffer) -> imgref::ImgVec<Gray<u16>> {
let desc = pixels.descriptor();
let w = pixels.width() as usize;
let h = pixels.height() as usize;
match (desc.channel_type(), desc.layout()) {
(ChannelType::U16, ChannelLayout::Gray) => {
let img = pixels.try_as_imgref::<Gray<u16>>().unwrap();
let buf: Vec<Gray<u16>> = img.pixels().collect();
imgref::ImgVec::new(buf, w, h)
}
(ChannelType::U16, ChannelLayout::GrayAlpha) => {
let img = pixels.try_as_imgref::<GrayAlpha16>().unwrap();
let buf: Vec<Gray<u16>> = img.pixels().map(|p| Gray(p.v)).collect();
imgref::ImgVec::new(buf, w, h)
}
(ChannelType::U16, ChannelLayout::Rgb) => {
let img = pixels.try_as_imgref::<Rgb<u16>>().unwrap();
let buf: Vec<Gray<u16>> = img
.pixels()
.map(|p| {
let luma =
((p.r as u32 * 77 + p.g as u32 * 150 + p.b as u32 * 29 + 128) >> 8) as u16;
Gray(luma)
})
.collect();
imgref::ImgVec::new(buf, w, h)
}
(ChannelType::U16, ChannelLayout::Rgba) => {
let img = pixels.try_as_imgref::<Rgba<u16>>().unwrap();
let buf: Vec<Gray<u16>> = img
.pixels()
.map(|p| {
let luma =
((p.r as u32 * 77 + p.g as u32 * 150 + p.b as u32 * 29 + 128) >> 8) as u16;
Gray(luma)
})
.collect();
imgref::ImgVec::new(buf, w, h)
}
_ => {
let gray8 = to_gray8(pixels);
let buf: Vec<Gray<u16>> = gray8
.into_buf()
.into_iter()
.map(|p| Gray(p.value() as u16 * 257))
.collect();
imgref::ImgVec::new(buf, w, h)
}
}
}
fn decode_into_rgb16(pixels: PixelBuffer, dst: &mut PixelSliceMut<'_>) {
let src = to_rgb16(pixels);
for y in 0..src.height().min(dst.rows() as usize) {
let src_row = &src.buf()[y * src.stride()..][..src.width()];
let dst_row = dst.row_mut(y as u32);
for (i, s) in src_row.iter().enumerate() {
let offset = i * 6;
if offset + 6 > dst_row.len() {
break;
}
dst_row[offset..offset + 2].copy_from_slice(&s.r.to_ne_bytes());
dst_row[offset + 2..offset + 4].copy_from_slice(&s.g.to_ne_bytes());
dst_row[offset + 4..offset + 6].copy_from_slice(&s.b.to_ne_bytes());
}
}
}
fn decode_into_rgba16(pixels: PixelBuffer, dst: &mut PixelSliceMut<'_>) {
let src = to_rgba16(pixels);
for y in 0..src.height().min(dst.rows() as usize) {
let src_row = &src.buf()[y * src.stride()..][..src.width()];
let dst_row = dst.row_mut(y as u32);
for (i, s) in src_row.iter().enumerate() {
let offset = i * 8;
if offset + 8 > dst_row.len() {
break;
}
dst_row[offset..offset + 2].copy_from_slice(&s.r.to_ne_bytes());
dst_row[offset + 2..offset + 4].copy_from_slice(&s.g.to_ne_bytes());
dst_row[offset + 4..offset + 6].copy_from_slice(&s.b.to_ne_bytes());
dst_row[offset + 6..offset + 8].copy_from_slice(&s.a.to_ne_bytes());
}
}
}
fn decode_into_gray16(pixels: PixelBuffer, dst: &mut PixelSliceMut<'_>) {
let src = to_gray16(pixels);
for y in 0..src.height().min(dst.rows() as usize) {
let src_row = &src.buf()[y * src.stride()..][..src.width()];
let dst_row = dst.row_mut(y as u32);
for (i, s) in src_row.iter().enumerate() {
let offset = i * 2;
if offset + 2 > dst_row.len() {
break;
}
dst_row[offset..offset + 2].copy_from_slice(&s.value().to_ne_bytes());
}
}
}
fn pixel_descriptor_for_data(pixels: &PixelBuffer) -> PixelDescriptor {
pixels.descriptor()
}
fn pixel_data_bytes(pixels: &PixelBuffer) -> Vec<u8> {
pixels.copy_to_contiguous_bytes()
}
#[cfg(test)]
mod tests {
use super::*;
use alloc::vec;
use imgref::Img;
use rgb::{Gray, Rgb, Rgba};
use zencodec::decode::{Decode, DecodeJob, DecoderConfig};
use zencodec::encode::{EncodeJob, EncoderConfig};
#[test]
fn encoding_rgb8() {
let enc = PngEncoderConfig::new();
let pixels: Vec<Rgb<u8>> = vec![
Rgb {
r: 128,
g: 64,
b: 32
};
64
];
let img = Img::new(pixels, 8, 8);
let output = enc.encode_rgb8(img.as_ref()).unwrap();
assert!(!output.data().is_empty());
assert_eq!(output.format(), ImageFormat::Png);
assert_eq!(
&output.data()[0..8],
&[0x89, 0x50, 0x4E, 0x47, 0x0D, 0x0A, 0x1A, 0x0A]
);
}
#[test]
fn encoding_rgba8() {
let enc = PngEncoderConfig::new();
let pixels: Vec<Rgba<u8>> = vec![
Rgba {
r: 100,
g: 150,
b: 200,
a: 128,
};
64
];
let img = Img::new(pixels, 8, 8);
let output = enc.encode_rgba8(img.as_ref()).unwrap();
assert!(!output.data().is_empty());
}
#[test]
fn encoding_gray8() {
let enc = PngEncoderConfig::new();
let pixels = vec![Gray::new(128u8); 64];
let img = Img::new(pixels, 8, 8);
let output = enc.encode_gray8(img.as_ref()).unwrap();
assert!(!output.data().is_empty());
}
#[test]
fn decode_roundtrip() {
let enc = PngEncoderConfig::new();
let pixels: Vec<Rgb<u8>> = vec![
Rgb {
r: 200,
g: 100,
b: 50
};
64
];
let img = Img::new(pixels, 8, 8);
let encoded = enc.encode_rgb8(img.as_ref()).unwrap();
let dec = PngDecoderConfig::new();
let output = dec.decode(encoded.data()).unwrap();
assert_eq!(output.info().width, 8);
assert_eq!(output.info().height, 8);
assert_eq!(output.info().format, ImageFormat::Png);
}
#[test]
fn probe_header_info() {
let enc = PngEncoderConfig::new();
let pixels = vec![Rgb { r: 0u8, g: 0, b: 0 }; 100];
let img = Img::new(pixels, 10, 10);
let encoded = enc.encode_rgb8(img.as_ref()).unwrap();
let dec = PngDecoderConfig::new();
let info = dec.probe_header(encoded.data()).unwrap();
assert_eq!(info.width, 10);
assert_eq!(info.height, 10);
assert_eq!(info.format, ImageFormat::Png);
}
#[test]
fn decode_into_rgb8_roundtrip() {
let enc = PngEncoderConfig::new();
let pixels = vec![
Rgb {
r: 128u8,
g: 64,
b: 32
};
64
];
let img = Img::new(pixels.clone(), 8, 8);
let encoded = enc.encode_rgb8(img.as_ref()).unwrap();
let dec = PngDecoderConfig::new();
let mut buf = vec![Rgb { r: 0u8, g: 0, b: 0 }; 64];
let mut dst = imgref::ImgVec::new(buf.clone(), 8, 8);
let info = dec.decode_into_rgb8(encoded.data(), dst.as_mut()).unwrap();
assert_eq!(info.width, 8);
assert_eq!(info.height, 8);
buf = dst.into_buf();
assert_eq!(buf[0], pixels[0]);
}
#[test]
fn encode_bgra8_roundtrip() {
let enc = PngEncoderConfig::new();
let pixels = vec![
rgb::alt::BGRA {
b: 0,
g: 0,
r: 255,
a: 255,
},
rgb::alt::BGRA {
b: 0,
g: 255,
r: 0,
a: 200,
},
rgb::alt::BGRA {
b: 255,
g: 0,
r: 0,
a: 128,
},
rgb::alt::BGRA {
b: 128,
g: 128,
r: 128,
a: 255,
},
];
let img = Img::new(pixels, 2, 2);
let output = enc.encode_bgra8(img.as_ref()).unwrap();
let dec = PngDecoderConfig::new();
let decoded = dec.decode(output.data()).unwrap();
let rgba = to_rgba8(decoded.into_buffer());
let buf = rgba.buf();
assert_eq!(
buf[0],
Rgba {
r: 255,
g: 0,
b: 0,
a: 255
}
);
assert_eq!(
buf[1],
Rgba {
r: 0,
g: 255,
b: 0,
a: 200
}
);
}
#[test]
fn f32_conversion_all_simd_tiers() {
use archmage::testing::{CompileTimePolicy, for_each_token_permutation};
use linear_srgb::default::{linear_to_srgb_u8, srgb_u8_to_linear};
let report = for_each_token_permutation(CompileTimePolicy::Warn, |_perm| {
let pixels = vec![
Rgb {
r: 0.0f32,
g: 0.5,
b: 1.0,
},
Rgb {
r: 0.25,
g: 0.75,
b: 0.1,
},
Rgb {
r: 0.0,
g: 0.0,
b: 0.0,
},
Rgb {
r: 1.0,
g: 1.0,
b: 1.0,
},
];
let img = Img::new(pixels.clone(), 2, 2);
let enc = PngEncoderConfig::new();
let output = enc.encode_rgb_f32(img.as_ref()).unwrap();
let dec = PngDecoderConfig::new();
let mut buf = vec![
Rgb {
r: 0.0f32,
g: 0.0,
b: 0.0
};
4
];
let mut dst = imgref::ImgVec::new(buf.clone(), 2, 2);
dec.decode_into_rgb_f32(output.data(), dst.as_mut())
.unwrap();
buf = dst.into_buf();
for (orig, decoded) in pixels.iter().zip(buf.iter()) {
let expected_r = srgb_u8_to_linear(linear_to_srgb_u8(orig.r.clamp(0.0, 1.0)));
let expected_g = srgb_u8_to_linear(linear_to_srgb_u8(orig.g.clamp(0.0, 1.0)));
let expected_b = srgb_u8_to_linear(linear_to_srgb_u8(orig.b.clamp(0.0, 1.0)));
assert!(
(decoded.r - expected_r).abs() < 1e-5,
"r mismatch: {} vs {}",
decoded.r,
expected_r
);
assert!(
(decoded.g - expected_g).abs() < 1e-5,
"g mismatch: {} vs {}",
decoded.g,
expected_g
);
assert!(
(decoded.b - expected_b).abs() < 1e-5,
"b mismatch: {} vs {}",
decoded.b,
expected_b
);
}
});
assert!(report.permutations_run >= 1);
}
#[test]
fn f32_rgba_roundtrip() {
use linear_srgb::default::{linear_to_srgb_u8, srgb_u8_to_linear};
let pixels = vec![
Rgba {
r: 0.0f32,
g: 0.5,
b: 1.0,
a: 1.0,
},
Rgba {
r: 0.25,
g: 0.75,
b: 0.1,
a: 0.5,
},
Rgba {
r: 0.0,
g: 0.0,
b: 0.0,
a: 0.0,
},
Rgba {
r: 1.0,
g: 1.0,
b: 1.0,
a: 1.0,
},
];
let img = Img::new(pixels.clone(), 2, 2);
let enc = PngEncoderConfig::new();
let output = enc.encode_rgba_f32(img.as_ref()).unwrap();
let dec = PngDecoderConfig::new();
let mut dst = imgref::ImgVec::new(
vec![
Rgba {
r: 0.0f32,
g: 0.0,
b: 0.0,
a: 0.0
};
4
],
2,
2,
);
dec.decode_into_rgba_f32(output.data(), dst.as_mut())
.unwrap();
for (orig, decoded) in pixels.iter().zip(dst.buf().iter()) {
let expected_r = srgb_u8_to_linear(linear_to_srgb_u8(orig.r.clamp(0.0, 1.0)));
let expected_g = srgb_u8_to_linear(linear_to_srgb_u8(orig.g.clamp(0.0, 1.0)));
let expected_b = srgb_u8_to_linear(linear_to_srgb_u8(orig.b.clamp(0.0, 1.0)));
let expected_a = (orig.a * 255.0).round() / 255.0;
assert!((decoded.r - expected_r).abs() < 1e-5, "r mismatch");
assert!((decoded.g - expected_g).abs() < 1e-5, "g mismatch");
assert!((decoded.b - expected_b).abs() < 1e-5, "b mismatch");
assert!(
(decoded.a - expected_a).abs() < 1e-2,
"a mismatch: {} vs {}",
decoded.a,
expected_a
);
}
}
#[test]
fn f32_gray_roundtrip() {
use linear_srgb::default::{linear_to_srgb_u8, srgb_u8_to_linear};
use rgb::Gray;
let pixels = vec![Gray(0.0f32), Gray(0.18), Gray(0.5), Gray(1.0)];
let img = Img::new(pixels.clone(), 2, 2);
let enc = PngEncoderConfig::new();
let output = enc.encode_gray_f32(img.as_ref()).unwrap();
let dec = PngDecoderConfig::new();
let mut dst = imgref::ImgVec::new(vec![Gray(0.0f32); 4], 2, 2);
dec.decode_into_gray_f32(output.data(), dst.as_mut())
.unwrap();
for (orig, decoded) in pixels.iter().zip(dst.buf().iter()) {
let expected = srgb_u8_to_linear(linear_to_srgb_u8(orig.value().clamp(0.0, 1.0)));
assert!(
(decoded.value() - expected).abs() < 1e-5,
"gray mismatch: {} vs {}",
decoded.value(),
expected
);
}
}
#[test]
fn f32_known_srgb_values() {
use linear_srgb::default::srgb_u8_to_linear;
let pixels = vec![
Rgb { r: 0u8, g: 0, b: 0 },
Rgb {
r: 128,
g: 128,
b: 128,
},
Rgb {
r: 255,
g: 255,
b: 255,
},
Rgb { r: 255, g: 0, b: 0 },
];
let img = Img::new(pixels, 2, 2);
let enc = PngEncoderConfig::new();
let output = enc.encode_rgb8(img.as_ref()).unwrap();
let dec = PngDecoderConfig::new();
let mut dst = imgref::ImgVec::new(
vec![
Rgb {
r: 0.0f32,
g: 0.0,
b: 0.0
};
4
],
2,
2,
);
dec.decode_into_rgb_f32(output.data(), dst.as_mut())
.unwrap();
let buf = dst.buf();
assert!(buf[0].r.abs() < 1e-6);
assert!(buf[0].g.abs() < 1e-6);
assert!(buf[0].b.abs() < 1e-6);
let expected_128 = srgb_u8_to_linear(128);
assert!((buf[1].r - expected_128).abs() < 1e-5);
assert!((buf[2].r - 1.0).abs() < 1e-6);
assert!((buf[2].g - 1.0).abs() < 1e-6);
assert!((buf[2].b - 1.0).abs() < 1e-6);
assert!((buf[3].r - 1.0).abs() < 1e-6);
assert!(buf[3].g.abs() < 1e-6);
assert!(buf[3].b.abs() < 1e-6);
}
#[test]
fn format_is_correct() {
assert_eq!(
<PngEncoderConfig as EncoderConfig>::format(),
ImageFormat::Png
);
assert_eq!(
<PngDecoderConfig as DecoderConfig>::formats(),
&[ImageFormat::Png]
);
}
#[test]
fn effort_getter_setter() {
let enc = PngEncoderConfig::new();
assert_eq!(enc.generic_effort(), None);
assert_eq!(enc.is_lossless(), Some(true));
let enc = PngEncoderConfig::new().with_generic_effort(0);
assert_eq!(enc.generic_effort(), Some(0));
let enc = PngEncoderConfig::new().with_generic_effort(1);
assert_eq!(enc.generic_effort(), Some(1));
let enc = PngEncoderConfig::new().with_generic_effort(5);
assert_eq!(enc.generic_effort(), Some(5));
assert_eq!(enc.is_lossless(), Some(true));
let enc = PngEncoderConfig::new().with_generic_effort(9);
assert_eq!(enc.generic_effort(), Some(9));
let enc = PngEncoderConfig::new().with_generic_effort(10);
assert_eq!(enc.generic_effort(), Some(10));
let enc = PngEncoderConfig::new().with_generic_effort(11);
assert_eq!(enc.generic_effort(), Some(11));
let enc = PngEncoderConfig::new().with_generic_effort(12);
assert_eq!(enc.generic_effort(), Some(12));
}
#[test]
fn output_info_matches_decode() {
let pixels = vec![Rgb { r: 1u8, g: 2, b: 3 }; 6];
let img = Img::new(pixels, 3, 2);
let enc = PngEncoderConfig::new();
let output = enc.encode_rgb8(img.as_ref()).unwrap();
let dec = PngDecoderConfig::new();
let info = dec.clone().job().output_info(output.data()).unwrap();
assert_eq!(info.width, 3);
assert_eq!(info.height, 2);
let decoded = dec.decode(output.data()).unwrap();
assert_eq!(decoded.width(), info.width);
assert_eq!(decoded.height(), info.height);
}
#[test]
fn four_layer_encode_flow() {
let pixels = vec![
Rgb::<u8> { r: 255, g: 0, b: 0 },
Rgb { r: 0, g: 255, b: 0 },
Rgb { r: 0, g: 0, b: 255 },
Rgb {
r: 128,
g: 128,
b: 128,
},
];
let img = imgref::ImgVec::new(pixels, 2, 2);
let config = PngEncoderConfig::new();
use zencodec::encode::Encoder;
let slice = PixelSlice::from(img.as_ref());
let output = config
.job()
.encoder()
.unwrap()
.encode(slice.erase())
.unwrap();
assert_eq!(output.format(), ImageFormat::Png);
assert!(!output.data().is_empty());
}
#[test]
fn four_layer_decode_flow() {
let pixels = vec![
Rgb {
r: 100u8,
g: 200,
b: 50
};
4
];
let img = imgref::ImgVec::new(pixels, 2, 2);
let enc = PngEncoderConfig::new();
let encoded = enc.encode_rgb8(img.as_ref()).unwrap();
let config = PngDecoderConfig::new();
let decoded = config
.job()
.decoder(Cow::Borrowed(encoded.data()), &[])
.unwrap()
.decode()
.unwrap();
assert_eq!(decoded.width(), 2);
assert_eq!(decoded.height(), 2);
}
#[test]
fn encoding_clone_send_sync() {
fn assert_traits<T: Clone + Send + Sync>() {}
assert_traits::<PngEncoderConfig>();
}
#[test]
fn decoding_clone_send_sync() {
fn assert_traits<T: Clone + Send + Sync>() {}
assert_traits::<PngDecoderConfig>();
}
#[test]
fn rgb16_roundtrip() {
let pixels = vec![
Rgb::<u16> {
r: 0,
g: 32768,
b: 65535,
},
Rgb {
r: 1000,
g: 50000,
b: 12345,
},
Rgb {
r: 65535,
g: 0,
b: 0,
},
Rgb {
r: 0,
g: 65535,
b: 0,
},
];
let img = imgref::ImgVec::new(pixels.clone(), 2, 2);
let encoded = crate::encode::encode_rgb16(
img.as_ref(),
None,
&EncodeConfig::default(),
&enough::Unstoppable,
&enough::Unstoppable,
)
.unwrap();
assert_eq!(
&encoded[..8],
&[0x89, 0x50, 0x4E, 0x47, 0x0D, 0x0A, 0x1A, 0x0A]
);
let decoded =
crate::decode::decode(&encoded, &PngDecodeConfig::none(), &enough::Unstoppable)
.unwrap();
assert_eq!(decoded.info.width, 2);
assert_eq!(decoded.info.height, 2);
assert_eq!(decoded.info.bit_depth, 16);
let img = decoded
.pixels
.try_as_imgref::<Rgb<u16>>()
.expect("expected Rgb16");
for (i, (orig, dec)) in pixels.iter().zip(img.pixels()).enumerate() {
assert_eq!(
*orig, dec,
"pixel {i} mismatch: expected {orig:?}, got {dec:?}"
);
}
}
#[test]
fn rgba16_roundtrip() {
let pixels = vec![
Rgba::<u16> {
r: 0x0102,
g: 0x0304,
b: 0x0506,
a: 0xFFFF,
},
Rgba {
r: 65535,
g: 0,
b: 0,
a: 32768,
},
Rgba {
r: 0,
g: 0,
b: 0,
a: 0,
},
Rgba {
r: 65535,
g: 65535,
b: 65535,
a: 65535,
},
];
let img = imgref::ImgVec::new(pixels.clone(), 2, 2);
let encoded = crate::encode::encode_rgba16(
img.as_ref(),
None,
&EncodeConfig::default(),
&enough::Unstoppable,
&enough::Unstoppable,
)
.unwrap();
let decoded =
crate::decode::decode(&encoded, &PngDecodeConfig::none(), &enough::Unstoppable)
.unwrap();
assert_eq!(decoded.info.bit_depth, 16);
let img = decoded
.pixels
.try_as_imgref::<Rgba<u16>>()
.expect("expected Rgba16");
for (i, (orig, dec)) in pixels.iter().zip(img.pixels()).enumerate() {
assert_eq!(
*orig, dec,
"pixel {i} mismatch: expected {orig:?}, got {dec:?}"
);
}
}
#[test]
fn gray16_roundtrip() {
let pixels = vec![Gray::<u16>(0), Gray(1000), Gray(32768), Gray(65535)];
let img = imgref::ImgVec::new(pixels.clone(), 2, 2);
let encoded = crate::encode::encode_gray16(
img.as_ref(),
None,
&EncodeConfig::default(),
&enough::Unstoppable,
&enough::Unstoppable,
)
.unwrap();
let decoded =
crate::decode::decode(&encoded, &PngDecodeConfig::none(), &enough::Unstoppable)
.unwrap();
assert_eq!(decoded.info.bit_depth, 16);
let img = decoded
.pixels
.try_as_imgref::<Gray<u16>>()
.expect("expected Gray16");
for (i, (orig, dec)) in pixels.iter().zip(img.pixels()).enumerate() {
assert_eq!(
*orig, dec,
"pixel {i} mismatch: expected {orig:?}, got {dec:?}"
);
}
}
#[test]
fn rgb16_metadata_roundtrip() {
let pixels = vec![
Rgb::<u16> {
r: 100,
g: 200,
b: 300
};
4
];
let img = imgref::ImgVec::new(pixels, 2, 2);
let fake_icc = vec![0x42u8; 200];
let exif_data = b"Exif\0\0test_exif";
let xmp_data = b"<x:xmpmeta>test</x:xmpmeta>";
let meta = Metadata::none()
.with_icc(fake_icc.as_slice())
.with_exif(exif_data.as_slice())
.with_xmp(xmp_data.as_slice());
let encoded = crate::encode::encode_rgb16(
img.as_ref(),
Some(&meta),
&EncodeConfig::default(),
&enough::Unstoppable,
&enough::Unstoppable,
)
.unwrap();
let decoded =
crate::decode::decode(&encoded, &PngDecodeConfig::none(), &enough::Unstoppable)
.unwrap();
assert_eq!(
decoded.info.icc_profile.as_deref(),
Some(fake_icc.as_slice())
);
assert_eq!(decoded.info.exif.as_deref(), Some(exif_data.as_slice()));
assert_eq!(decoded.info.xmp.as_deref(), Some(xmp_data.as_slice()));
}
#[test]
fn truecolor_zenflate_rgb8_roundtrip() {
let pixels = vec![
Rgb::<u8> {
r: 128,
g: 64,
b: 32,
},
Rgb { r: 0, g: 255, b: 0 },
Rgb {
r: 255,
g: 255,
b: 255,
},
Rgb { r: 0, g: 0, b: 0 },
];
let img = imgref::ImgVec::new(pixels.clone(), 2, 2);
let encoded = crate::encode::encode_rgb8(
img.as_ref(),
None,
&EncodeConfig::default(),
&enough::Unstoppable,
&enough::Unstoppable,
)
.unwrap();
let decoded =
crate::decode::decode(&encoded, &PngDecodeConfig::none(), &enough::Unstoppable)
.unwrap();
assert_eq!(decoded.info.width, 2);
assert_eq!(decoded.info.height, 2);
let img = decoded
.pixels
.try_as_imgref::<Rgb<u8>>()
.expect("expected Rgb8");
for (orig, dec) in pixels.iter().zip(img.pixels()) {
assert_eq!(*orig, dec);
}
}
#[test]
fn truecolor_zenflate_rgba8_roundtrip() {
let pixels = vec![
Rgba::<u8> {
r: 100,
g: 150,
b: 200,
a: 128,
},
Rgba {
r: 0,
g: 0,
b: 0,
a: 0,
},
Rgba {
r: 255,
g: 255,
b: 255,
a: 255,
},
Rgba {
r: 1,
g: 2,
b: 3,
a: 4,
},
];
let img = imgref::ImgVec::new(pixels.clone(), 2, 2);
let encoded = crate::encode::encode_rgba8(
img.as_ref(),
None,
&EncodeConfig::default(),
&enough::Unstoppable,
&enough::Unstoppable,
)
.unwrap();
let decoded =
crate::decode::decode(&encoded, &PngDecodeConfig::none(), &enough::Unstoppable)
.unwrap();
let img = decoded
.pixels
.try_as_imgref::<Rgba<u8>>()
.expect("expected Rgba8");
for (orig, dec) in pixels.iter().zip(img.pixels()) {
assert_eq!(*orig, dec);
}
}
#[test]
fn truecolor_zenflate_gray8_roundtrip() {
let pixels = vec![Gray(0u8), Gray(128), Gray(255), Gray(1)];
let img = imgref::ImgVec::new(pixels.clone(), 2, 2);
let encoded = crate::encode::encode_gray8(
img.as_ref(),
None,
&EncodeConfig::default(),
&enough::Unstoppable,
&enough::Unstoppable,
)
.unwrap();
let decoded =
crate::decode::decode(&encoded, &PngDecodeConfig::none(), &enough::Unstoppable)
.unwrap();
let img = decoded
.pixels
.try_as_imgref::<Gray<u8>>()
.expect("expected Gray8");
for (orig, dec) in pixels.iter().zip(img.pixels()) {
assert_eq!(*orig, dec);
}
}
#[test]
fn subbyte_gray_1bit_roundtrip() {
let mut pixels = Vec::new();
for i in 0..8 {
let v = if i % 2 == 0 { 0u8 } else { 255u8 };
pixels.push(Rgba {
r: v,
g: v,
b: v,
a: 255,
});
}
let img = imgref::ImgVec::new(pixels.clone(), 4, 2);
let encoded = crate::encode::encode_rgba8(
img.as_ref(),
None,
&EncodeConfig::default(),
&enough::Unstoppable,
&enough::Unstoppable,
)
.unwrap();
let decoded =
crate::decode::decode(&encoded, &PngDecodeConfig::none(), &enough::Unstoppable)
.unwrap();
let decoded_rgba = decoded.pixels.to_rgba8();
let decoded_img = decoded_rgba.as_imgref();
for (i, (orig, dec)) in pixels.iter().zip(decoded_img.pixels()).enumerate() {
assert_eq!(
(orig.r, orig.g, orig.b, orig.a),
(dec.r, dec.g, dec.b, dec.a),
"pixel {i} mismatch"
);
}
}
#[test]
fn subbyte_gray_4bit_roundtrip() {
let mut pixels = Vec::new();
for i in 0..16 {
let v = (i * 17) as u8;
pixels.push(Rgba {
r: v,
g: v,
b: v,
a: 255,
});
}
let img = imgref::ImgVec::new(pixels.clone(), 4, 4);
let encoded = crate::encode::encode_rgba8(
img.as_ref(),
None,
&EncodeConfig::default(),
&enough::Unstoppable,
&enough::Unstoppable,
)
.unwrap();
let decoded =
crate::decode::decode(&encoded, &PngDecodeConfig::none(), &enough::Unstoppable)
.unwrap();
let decoded_rgba = decoded.pixels.to_rgba8();
let decoded_img = decoded_rgba.as_imgref();
for (i, (orig, dec)) in pixels.iter().zip(decoded_img.pixels()).enumerate() {
assert_eq!(
(orig.r, orig.g, orig.b, orig.a),
(dec.r, dec.g, dec.b, dec.a),
"pixel {i} mismatch"
);
}
}
#[test]
fn rgba_trns_gray_roundtrip() {
let mut pixels = Vec::new();
for i in 0..100 {
if i == 0 {
pixels.push(Rgba {
r: 0,
g: 0,
b: 0,
a: 0,
}); } else {
let v = ((i % 15) * 17 + 17) as u8; pixels.push(Rgba {
r: v,
g: v,
b: v,
a: 255,
});
}
}
let img = imgref::ImgVec::new(pixels.clone(), 10, 10);
let encoded = crate::encode::encode_rgba8(
img.as_ref(),
None,
&EncodeConfig::default(),
&enough::Unstoppable,
&enough::Unstoppable,
)
.unwrap();
let decoded =
crate::decode::decode(&encoded, &PngDecodeConfig::none(), &enough::Unstoppable)
.unwrap();
let decoded_rgba = decoded.pixels.to_rgba8();
let decoded_img = decoded_rgba.as_imgref();
for (i, (orig, dec)) in pixels.iter().zip(decoded_img.pixels()).enumerate() {
if orig.a == 0 && dec.a == 0 {
continue; }
assert_eq!(
(orig.r, orig.g, orig.b, orig.a),
(dec.r, dec.g, dec.b, dec.a),
"pixel {i} mismatch"
);
}
}
#[test]
fn rgba_trns_rgb_roundtrip() {
let mut pixels = Vec::new();
for r in 0..20u8 {
for g in 0..21u8 {
let b = 128u8;
pixels.push(Rgba {
r: r.wrapping_mul(13),
g: g.wrapping_mul(12),
b,
a: 255,
});
}
}
pixels[0] = Rgba {
r: 3,
g: 7,
b: 11,
a: 0,
};
let w = 20;
let h = pixels.len() / w;
let img = imgref::ImgVec::new(pixels.clone(), w, h);
let encoded = crate::encode::encode_rgba8(
img.as_ref(),
None,
&EncodeConfig::default(),
&enough::Unstoppable,
&enough::Unstoppable,
)
.unwrap();
let decoded =
crate::decode::decode(&encoded, &PngDecodeConfig::none(), &enough::Unstoppable)
.unwrap();
let decoded_rgba = decoded.pixels.to_rgba8();
let decoded_img = decoded_rgba.as_imgref();
for (i, (orig, dec)) in pixels.iter().zip(decoded_img.pixels()).enumerate() {
if orig.a == 0 && dec.a == 0 {
continue;
}
assert_eq!(
(orig.r, orig.g, orig.b, orig.a),
(dec.r, dec.g, dec.b, dec.a),
"pixel {i} mismatch"
);
}
}
#[test]
fn zencodec_u16_encode_decode() {
let pixels = vec![
Rgb::<u16> {
r: 100,
g: 200,
b: 300,
};
4
];
let img = imgref::ImgVec::new(pixels.clone(), 2, 2);
let enc = PngEncoderConfig::new();
let slice = PixelSlice::from(img.as_ref());
use zencodec::encode::Encoder;
let output = enc.job().encoder().unwrap().encode(slice.erase()).unwrap();
assert_eq!(output.format(), ImageFormat::Png);
let dec = PngDecoderConfig::new();
let mut dst = imgref::ImgVec::new(vec![Rgb::<u16> { r: 0, g: 0, b: 0 }; 4], 2, 2);
dec.decode_into_rgb16(output.data(), dst.as_mut()).unwrap();
for (orig, dec) in pixels.iter().zip(dst.buf().iter()) {
assert_eq!(orig, dec);
}
}
#[test]
fn srgb_suppresses_gama_chrm() {
use crate::decode::PngChromaticities;
let pixels = vec![
Rgb::<u8> {
r: 128,
g: 64,
b: 32
};
4
];
let img = imgref::ImgVec::new(pixels, 2, 2);
let chrm = PngChromaticities {
white_x: 31270,
white_y: 32900,
red_x: 64000,
red_y: 33000,
green_x: 30000,
green_y: 60000,
blue_x: 15000,
blue_y: 6000,
};
let config = crate::encode::EncodeConfig {
source_gamma: Some(45455),
srgb_intent: Some(0), chromaticities: Some(chrm),
..Default::default()
};
let encoded = crate::encode::encode_rgb8(
img.as_ref(),
None,
&config,
&enough::Unstoppable,
&enough::Unstoppable,
)
.unwrap();
let decoded =
crate::decode::decode(&encoded, &PngDecodeConfig::none(), &enough::Unstoppable)
.unwrap();
assert_eq!(decoded.info.srgb_intent, Some(0));
assert_eq!(decoded.info.source_gamma, None);
assert!(decoded.info.chromaticities.is_none());
}
#[test]
fn gama_chrm_roundtrip_without_srgb() {
use crate::decode::PngChromaticities;
let pixels = vec![
Rgb::<u8> {
r: 128,
g: 64,
b: 32
};
4
];
let img = imgref::ImgVec::new(pixels, 2, 2);
let chrm = PngChromaticities {
white_x: 31270,
white_y: 32900,
red_x: 64000,
red_y: 33000,
green_x: 30000,
green_y: 60000,
blue_x: 15000,
blue_y: 6000,
};
let config = crate::encode::EncodeConfig {
source_gamma: Some(45455),
chromaticities: Some(chrm),
..Default::default()
};
let encoded = crate::encode::encode_rgb8(
img.as_ref(),
None,
&config,
&enough::Unstoppable,
&enough::Unstoppable,
)
.unwrap();
let decoded =
crate::decode::decode(&encoded, &PngDecodeConfig::none(), &enough::Unstoppable)
.unwrap();
assert_eq!(decoded.info.source_gamma, Some(45455));
assert!(decoded.info.srgb_intent.is_none());
let dc = decoded.info.chromaticities.expect("cHRM missing");
assert_eq!(dc.white_x, 31270);
assert_eq!(dc.white_y, 32900);
assert_eq!(dc.red_x, 64000);
assert_eq!(dc.red_y, 33000);
assert_eq!(dc.green_x, 30000);
assert_eq!(dc.green_y, 60000);
assert_eq!(dc.blue_x, 15000);
assert_eq!(dc.blue_y, 6000);
}
#[test]
fn cicp_suppresses_srgb_gama_chrm() {
use crate::decode::PngChromaticities;
use zencodec::{Cicp, Metadata};
let pixels = vec![
Rgb::<u8> {
r: 128,
g: 64,
b: 32
};
4
];
let img = imgref::ImgVec::new(pixels, 2, 2);
let cicp = Cicp::new(9, 16, 0, true);
let meta = Metadata::none().with_cicp(cicp);
let chrm = PngChromaticities {
white_x: 31270,
white_y: 32900,
red_x: 64000,
red_y: 33000,
green_x: 30000,
green_y: 60000,
blue_x: 15000,
blue_y: 6000,
};
let config = crate::encode::EncodeConfig {
source_gamma: Some(45455),
srgb_intent: Some(0),
chromaticities: Some(chrm),
..Default::default()
};
let encoded = crate::encode::encode_rgb8(
img.as_ref(),
Some(&meta),
&config,
&enough::Unstoppable,
&enough::Unstoppable,
)
.unwrap();
let decoded =
crate::decode::decode(&encoded, &PngDecodeConfig::none(), &enough::Unstoppable)
.unwrap();
assert!(decoded.info.cicp.is_some());
assert_eq!(decoded.info.srgb_intent, None);
assert_eq!(decoded.info.source_gamma, None);
assert!(decoded.info.chromaticities.is_none());
}
#[test]
fn iccp_suppresses_srgb_gama_chrm() {
use crate::decode::PngChromaticities;
use zencodec::Metadata;
let pixels = vec![
Rgb::<u8> {
r: 128,
g: 64,
b: 32
};
4
];
let img = imgref::ImgVec::new(pixels, 2, 2);
let srgb_icc: &[u8] = &[0u8; 128];
let meta = Metadata::none().with_icc(srgb_icc);
let chrm = PngChromaticities {
white_x: 31270,
white_y: 32900,
red_x: 64000,
red_y: 33000,
green_x: 30000,
green_y: 60000,
blue_x: 15000,
blue_y: 6000,
};
let config = crate::encode::EncodeConfig {
source_gamma: Some(45455),
srgb_intent: Some(0),
chromaticities: Some(chrm),
..Default::default()
};
let encoded = crate::encode::encode_rgb8(
img.as_ref(),
Some(&meta),
&config,
&enough::Unstoppable,
&enough::Unstoppable,
)
.unwrap();
let decoded =
crate::decode::decode(&encoded, &PngDecodeConfig::none(), &enough::Unstoppable)
.unwrap();
assert!(decoded.info.icc_profile.is_some());
assert_eq!(decoded.info.srgb_intent, None);
assert_eq!(decoded.info.source_gamma, None);
assert!(decoded.info.chromaticities.is_none());
}
#[test]
fn cicp_with_iccp_fallback() {
use zencodec::{Cicp, Metadata};
let pixels = vec![
Rgb::<u8> {
r: 128,
g: 64,
b: 32
};
4
];
let img = imgref::ImgVec::new(pixels, 2, 2);
let cicp = Cicp::new(1, 13, 0, true);
let srgb_icc: &[u8] = &[0u8; 128];
let meta = Metadata::none().with_cicp(cicp).with_icc(srgb_icc);
let config = crate::encode::EncodeConfig::default();
let encoded = crate::encode::encode_rgb8(
img.as_ref(),
Some(&meta),
&config,
&enough::Unstoppable,
&enough::Unstoppable,
)
.unwrap();
let decoded =
crate::decode::decode(&encoded, &PngDecodeConfig::none(), &enough::Unstoppable)
.unwrap();
assert!(decoded.info.cicp.is_some());
assert!(decoded.info.icc_profile.is_some());
assert_eq!(decoded.info.srgb_intent, None);
assert_eq!(decoded.info.source_gamma, None);
assert!(decoded.info.chromaticities.is_none());
}
#[test]
fn hdr_metadata_always_written_with_cicp() {
use zencodec::{Cicp, ContentLightLevel, MasteringDisplay, Metadata};
let pixels = vec![
Rgb::<u8> {
r: 128,
g: 64,
b: 32
};
4
];
let img = imgref::ImgVec::new(pixels, 2, 2);
let cicp = Cicp::new(9, 16, 0, true);
let clli = ContentLightLevel::new(1000, 400);
let mdcv = MasteringDisplay::new(
[[0.708, 0.292], [0.170, 0.797], [0.131, 0.046]],
[0.3127, 0.3290],
1000.0,
0.005,
);
let meta = Metadata::none()
.with_cicp(cicp)
.with_content_light_level(clli)
.with_mastering_display(mdcv);
let config = crate::encode::EncodeConfig {
source_gamma: Some(45455),
srgb_intent: Some(0),
..Default::default()
};
let encoded = crate::encode::encode_rgb8(
img.as_ref(),
Some(&meta),
&config,
&enough::Unstoppable,
&enough::Unstoppable,
)
.unwrap();
let decoded =
crate::decode::decode(&encoded, &PngDecodeConfig::none(), &enough::Unstoppable)
.unwrap();
assert!(decoded.info.cicp.is_some());
assert!(decoded.info.content_light_level.is_some());
assert!(decoded.info.mastering_display.is_some());
assert_eq!(decoded.info.srgb_intent, None);
assert_eq!(decoded.info.source_gamma, None);
}
#[test]
fn chrm_negative_values_roundtrip() {
use crate::decode::PngChromaticities;
let pixels = vec![
Rgb::<u8> {
r: 128,
g: 64,
b: 32
};
4
];
let img = imgref::ImgVec::new(pixels, 2, 2);
let chrm = PngChromaticities {
white_x: 32168,
white_y: 33767,
red_x: 71300,
red_y: 29300,
green_x: 16500,
green_y: 83000,
blue_x: -12800, blue_y: 4400,
};
let config = crate::encode::EncodeConfig {
source_gamma: Some(100000), chromaticities: Some(chrm),
..Default::default()
};
let encoded = crate::encode::encode_rgb8(
img.as_ref(),
None,
&config,
&enough::Unstoppable,
&enough::Unstoppable,
)
.unwrap();
let decoded =
crate::decode::decode(&encoded, &PngDecodeConfig::none(), &enough::Unstoppable)
.unwrap();
let dc = decoded.info.chromaticities.expect("cHRM missing");
assert_eq!(dc.blue_x, -12800);
assert_eq!(dc.blue_y, 4400);
assert_eq!(dc.white_x, 32168);
}
#[test]
fn cicp_roundtrip() {
use zencodec::{Cicp, Metadata};
let pixels = vec![
Rgb::<u8> {
r: 128,
g: 64,
b: 32
};
4
];
let img = imgref::ImgVec::new(pixels, 2, 2);
let cicp = Cicp::new(9, 16, 0, true); let meta = Metadata::none().with_cicp(cicp);
let config = crate::encode::EncodeConfig::default();
let encoded = crate::encode::encode_rgb8(
img.as_ref(),
Some(&meta),
&config,
&enough::Unstoppable,
&enough::Unstoppable,
)
.unwrap();
let decoded =
crate::decode::decode(&encoded, &PngDecodeConfig::none(), &enough::Unstoppable)
.unwrap();
let dc = decoded.info.cicp.expect("cICP missing");
assert_eq!(dc.color_primaries, 9);
assert_eq!(dc.transfer_characteristics, 16);
assert_eq!(dc.matrix_coefficients, 0);
assert!(dc.full_range);
}
#[test]
fn clli_mdcv_roundtrip() {
use zencodec::{ContentLightLevel, MasteringDisplay, Metadata};
let pixels = vec![
Rgb::<u8> {
r: 128,
g: 64,
b: 32
};
4
];
let img = imgref::ImgVec::new(pixels, 2, 2);
let clli = ContentLightLevel::new(1000, 400);
let mdcv = MasteringDisplay::new(
[[0.708, 0.292], [0.170, 0.797], [0.131, 0.046]], [0.3127, 0.3290], 1000.0, 0.005, );
let meta = Metadata::none()
.with_content_light_level(clli)
.with_mastering_display(mdcv);
let config = crate::encode::EncodeConfig::default();
let encoded = crate::encode::encode_rgb8(
img.as_ref(),
Some(&meta),
&config,
&enough::Unstoppable,
&enough::Unstoppable,
)
.unwrap();
let decoded =
crate::decode::decode(&encoded, &PngDecodeConfig::none(), &enough::Unstoppable)
.unwrap();
let dc = decoded.info.content_light_level.expect("cLLi missing");
assert_eq!(dc.max_content_light_level, 1000);
assert_eq!(dc.max_frame_average_light_level, 400);
let dm = decoded.info.mastering_display.expect("mDCV missing");
assert_eq!(
dm.primaries_xy,
[[0.708, 0.292], [0.170, 0.797], [0.131, 0.046]]
);
assert_eq!(dm.white_point_xy, [0.3127, 0.3290]);
assert_eq!(dm.max_luminance, 1000.0);
assert_eq!(dm.min_luminance, 0.005);
}
#[test]
#[ignore]
fn real_file_gama_chrm_roundtrip() {
let corpus = std::env::var("CODEC_CORPUS_DIR")
.unwrap_or_else(|_| "/home/lilith/work/codec-corpus".to_string());
let path = format!("{corpus}/imageflow/test_inputs/frymire.png");
let data = std::fs::read(&path).expect("frymire.png not found");
let orig =
crate::decode::decode(&data, &PngDecodeConfig::none(), &enough::Unstoppable).unwrap();
let gamma = orig
.info
.source_gamma
.expect("frymire.png should have gAMA");
let chrm = orig
.info
.chromaticities
.expect("frymire.png should have cHRM");
assert!(
orig.info.srgb_intent.is_none(),
"frymire.png should NOT have sRGB"
);
assert_eq!(gamma, 45454);
let config = crate::encode::EncodeConfig {
source_gamma: Some(gamma),
chromaticities: Some(chrm),
..Default::default()
};
let pixels = orig
.pixels
.try_as_imgref::<Rgb<u8>>()
.expect("frymire.png should decode as RGB8");
let encoded = crate::encode::encode_rgb8(
pixels,
None,
&config,
&enough::Unstoppable,
&enough::Unstoppable,
)
.unwrap();
let rt = crate::decode::decode(&encoded, &PngDecodeConfig::none(), &enough::Unstoppable)
.unwrap();
assert_eq!(rt.info.source_gamma, Some(gamma));
let rt_chrm = rt.info.chromaticities.expect("re-encoded should have cHRM");
assert_eq!(rt_chrm, chrm);
assert!(rt.info.srgb_intent.is_none());
}
#[test]
#[ignore]
fn real_file_srgb_roundtrip() {
let corpus = std::env::var("CODEC_CORPUS_DIR")
.unwrap_or_else(|_| "/home/lilith/work/codec-corpus".to_string());
let path = format!("{corpus}/imageflow/test_inputs/red-night.png");
let data = std::fs::read(&path).expect("red-night.png not found");
let orig =
crate::decode::decode(&data, &PngDecodeConfig::none(), &enough::Unstoppable).unwrap();
let intent = orig
.info
.srgb_intent
.expect("red-night.png should have sRGB");
assert!(orig.info.source_gamma.is_some());
assert!(orig.info.chromaticities.is_some());
assert_eq!(intent, 0);
let config = crate::encode::EncodeConfig {
source_gamma: orig.info.source_gamma,
srgb_intent: Some(intent),
chromaticities: orig.info.chromaticities,
..Default::default()
};
let pixels = orig
.pixels
.try_as_imgref::<Rgba<u8>>()
.expect("red-night.png should decode as RGBA8");
let encoded = crate::encode::encode_rgba8(
pixels,
None,
&config,
&enough::Unstoppable,
&enough::Unstoppable,
)
.unwrap();
let rt = crate::decode::decode(&encoded, &PngDecodeConfig::none(), &enough::Unstoppable)
.unwrap();
assert_eq!(rt.info.srgb_intent, Some(0));
assert_eq!(rt.info.source_gamma, None);
assert!(rt.info.chromaticities.is_none());
}
#[test]
#[ignore]
fn real_file_icc_roundtrip() {
let corpus = std::env::var("CODEC_CORPUS_DIR")
.unwrap_or_else(|_| "/home/lilith/work/codec-corpus".to_string());
let path = format!("{corpus}/imageflow/test_inputs/shirt_transparent.png");
let data = std::fs::read(&path).expect("shirt_transparent.png not found");
let orig =
crate::decode::decode(&data, &PngDecodeConfig::none(), &enough::Unstoppable).unwrap();
let icc = orig
.info
.icc_profile
.as_ref()
.expect("shirt_transparent.png should have iCCP");
assert!(!icc.is_empty());
let meta = zencodec::Metadata::none().with_icc(icc.as_slice());
let config = crate::encode::EncodeConfig::default();
let pixels = orig
.pixels
.try_as_imgref::<Rgba<u8>>()
.expect("shirt_transparent.png should decode as RGBA8");
let encoded = crate::encode::encode_rgba8(
pixels,
Some(&meta),
&config,
&enough::Unstoppable,
&enough::Unstoppable,
)
.unwrap();
let rt = crate::decode::decode(&encoded, &PngDecodeConfig::none(), &enough::Unstoppable)
.unwrap();
let rt_icc = rt
.info
.icc_profile
.as_ref()
.expect("re-encoded should have iCCP");
assert_eq!(icc, rt_icc);
}
#[test]
fn test_clic_0d154_roundtrip() {
let corpus = std::env::var("CODEC_CORPUS_DIR")
.unwrap_or_else(|_| "/home/lilith/work/codec-corpus".to_string());
let path = format!(
"{corpus}/clic2025-1024/0d154749c7771f58e89ad343653ec4e20d6f037da829f47f5598e5d0a4ab61f0.png"
);
let data = match std::fs::read(&path) {
Ok(d) => d,
Err(_) => return, };
let decoded =
crate::decode::decode(&data, &PngDecodeConfig::none(), &enough::Unstoppable).unwrap();
let info = &decoded.info;
let rgb_pixels = decoded
.pixels
.try_as_imgref::<Rgb<u8>>()
.unwrap_or_else(|| panic!("expected Rgb8, got {:?}", decoded.pixels.descriptor()));
for (name, comp) in [
("Fastest", crate::Compression::Fastest),
("Fast", crate::Compression::Fast),
("Balanced", crate::Compression::Balanced),
("Thorough", crate::Compression::Thorough),
("High", crate::Compression::High),
("Aggressive", crate::Compression::Aggressive),
] {
let config = crate::EncodeConfig {
source_gamma: info.source_gamma,
srgb_intent: info.srgb_intent,
chromaticities: info.chromaticities,
compression: comp,
..Default::default()
};
let encoded = crate::encode::encode_rgb8(
rgb_pixels,
None,
&config,
&enough::Unstoppable,
&enough::Unstoppable,
)
.unwrap();
match crate::decode::decode(&encoded, &PngDecodeConfig::none(), &enough::Unstoppable) {
Ok(_) => {}
Err(e) => panic!("{name}: full PNG re-decode failed: {e}"),
}
}
}
#[test]
fn test_large_rgb8_roundtrip() {
let width = 1024usize;
let height = 1024usize;
let pixels: Vec<rgb::Rgb<u8>> = (0..width * height)
.map(|i| {
let x = i % width;
let y = i / width;
rgb::Rgb {
r: (x & 0xFF) as u8,
g: (y & 0xFF) as u8,
b: ((x + y) & 0xFF) as u8,
}
})
.collect();
let img = imgref::ImgVec::new(pixels, width, height);
let config = crate::EncodeConfig::default();
let encoded = crate::encode::encode_rgb8(
img.as_ref(),
None,
&config,
&enough::Unstoppable,
&enough::Unstoppable,
)
.unwrap();
let decoded =
crate::decode::decode(&encoded, &PngDecodeConfig::none(), &enough::Unstoppable)
.unwrap();
let dec_img = decoded
.pixels
.try_as_imgref::<Rgb<u8>>()
.unwrap_or_else(|| panic!("expected Rgb8, got {:?}", decoded.pixels.descriptor()));
assert_eq!(dec_img.width(), width);
assert_eq!(dec_img.height(), height);
}
#[test]
fn immediate_cancel_encode_returns_stopped() {
use enough::{Stop, StopReason};
struct AlreadyCancelled;
impl Stop for AlreadyCancelled {
fn check(&self) -> Result<(), StopReason> {
Err(StopReason::Cancelled)
}
}
let config = PngEncoderConfig::new();
let stop = zencodec::StopToken::new(AlreadyCancelled);
let job = config.clone().job().with_stop(stop);
let encoder = job.encoder().unwrap();
let pixels = vec![Rgb { r: 0u8, g: 0, b: 0 }; 4];
let img = Img::new(pixels, 2, 2);
let slice = PixelSlice::from(img.as_ref());
use zencodec::encode::Encoder;
let result = encoder.encode(slice.erase());
assert!(result.is_err());
match result.unwrap_err().decompose().0 {
PngError::Stopped(reason) => {
assert_eq!(reason, StopReason::Cancelled);
}
other => panic!("expected PngError::Stopped, got: {other}"),
}
}
#[test]
fn zero_deadline_encode_still_succeeds() {
let config = crate::EncodeConfig::default();
let deadline =
almost_enough::time::WithTimeout::new(enough::Unstoppable, std::time::Duration::ZERO);
let pixels = vec![
rgb::Rgba {
r: 255u8,
g: 0,
b: 0,
a: 255,
};
16
];
let img = imgref::ImgVec::new(pixels, 4, 4);
let result = crate::encode::encode_rgba8(
img.as_ref(),
None,
&config,
&enough::Unstoppable,
&deadline,
);
assert!(result.is_ok(), "zero-deadline encode should still succeed");
let encoded = result.unwrap();
let decoded =
crate::decode::decode(&encoded, &PngDecodeConfig::none(), &enough::Unstoppable)
.unwrap();
assert_eq!(decoded.info.width, 4);
assert_eq!(decoded.info.height, 4);
}
#[test]
fn immediate_cancel_decode_returns_stopped() {
use enough::{Stop, StopReason};
struct AlreadyCancelled;
impl Stop for AlreadyCancelled {
fn check(&self) -> Result<(), StopReason> {
Err(StopReason::Cancelled)
}
}
let pixels = vec![
rgb::Rgba {
r: 128u8,
g: 64,
b: 32,
a: 255,
};
16
];
let img = imgref::ImgVec::new(pixels, 4, 4);
let config = crate::EncodeConfig::default();
let encoded = crate::encode::encode_rgba8(
img.as_ref(),
None,
&config,
&enough::Unstoppable,
&enough::Unstoppable,
)
.unwrap();
let result = crate::decode::decode(&encoded, &PngDecodeConfig::none(), &AlreadyCancelled);
assert!(result.is_err());
match result.unwrap_err().decompose().0 {
PngError::Stopped(reason) => {
assert_eq!(reason, StopReason::Cancelled);
}
other => panic!("expected PngError::Stopped, got: {other}"),
}
}
#[test]
fn quality_getter_setter() {
let enc = PngEncoderConfig::new();
assert_eq!(enc.generic_quality(), None);
assert_eq!(enc.is_lossless(), Some(true));
let enc = PngEncoderConfig::new().with_generic_quality(100.0);
assert_eq!(enc.generic_quality(), Some(100.0));
assert_eq!(enc.is_lossless(), Some(true));
let enc = PngEncoderConfig::new().with_generic_quality(90.0);
assert_eq!(enc.generic_quality(), Some(90.0));
assert_eq!(enc.is_lossless(), Some(false));
let enc = PngEncoderConfig::new().with_generic_quality(0.0);
assert_eq!(enc.generic_quality(), Some(0.0));
assert_eq!(enc.is_lossless(), Some(false));
let enc = PngEncoderConfig::new()
.with_generic_effort(5)
.with_generic_quality(75.0);
assert_eq!(enc.generic_effort(), Some(5));
assert_eq!(enc.generic_quality(), Some(75.0));
assert_eq!(enc.is_lossless(), Some(false));
}
#[test]
fn quality_to_mpe_curve() {
assert_eq!(quality_to_mpe(100.0), 0.0);
assert_eq!(quality_to_mpe(99.0), 0.003);
assert_eq!(quality_to_mpe(0.0), 0.100);
let mpe_95 = quality_to_mpe(95.0);
assert!((mpe_95 - 0.007).abs() < 0.001, "q95 mpe={mpe_95}");
let mpe_90 = quality_to_mpe(90.0);
assert!((mpe_90 - 0.011).abs() < 0.001, "q90 mpe={mpe_90}");
let mpe_75 = quality_to_mpe(75.0);
assert!((mpe_75 - 0.026).abs() < 0.001, "q75 mpe={mpe_75}");
let mpe_50 = quality_to_mpe(50.0);
assert!((mpe_50 - 0.044).abs() < 0.001, "q50 mpe={mpe_50}");
let mpe_97 = quality_to_mpe(97.0);
assert!(mpe_97 > 0.003 && mpe_97 < 0.007, "q97 mpe={mpe_97}");
assert!(quality_to_mpe(90.0) > quality_to_mpe(99.0));
assert!(quality_to_mpe(75.0) > quality_to_mpe(90.0));
assert!(quality_to_mpe(50.0) > quality_to_mpe(75.0));
assert!(quality_to_mpe(0.0) > quality_to_mpe(50.0));
assert_eq!(quality_to_mpe(-10.0), quality_to_mpe(0.0));
assert_eq!(quality_to_mpe(200.0), quality_to_mpe(100.0));
}
#[cfg(feature = "quantize")]
#[test]
fn quality_auto_indexed_rgba8() {
let pixels: Vec<Rgba<u8>> = vec![
Rgba {
r: 255,
g: 0,
b: 0,
a: 255,
},
Rgba {
r: 0,
g: 255,
b: 0,
a: 255,
},
Rgba {
r: 0,
g: 0,
b: 255,
a: 255,
},
Rgba {
r: 255,
g: 255,
b: 0,
a: 255,
},
];
let img = imgref::ImgVec::new(pixels, 2, 2);
let enc_lossless = PngEncoderConfig::new();
let out_lossless = enc_lossless.encode_rgba8(img.as_ref()).unwrap();
let enc_lossy = PngEncoderConfig::new().with_generic_quality(90.0);
let out_lossy = enc_lossy.encode_rgba8(img.as_ref()).unwrap();
assert_eq!(
&out_lossless.data()[..8],
&[0x89, 0x50, 0x4E, 0x47, 0x0D, 0x0A, 0x1A, 0x0A]
);
assert_eq!(
&out_lossy.data()[..8],
&[0x89, 0x50, 0x4E, 0x47, 0x0D, 0x0A, 0x1A, 0x0A]
);
let has_plte = out_lossy.data().windows(4).any(|w| w == b"PLTE");
assert!(
has_plte,
"expected indexed PNG with PLTE chunk for 4-color image"
);
let dec = PngDecoderConfig::new();
let d_lossless = dec.decode(out_lossless.data()).unwrap();
let d_lossy = dec.decode(out_lossy.data()).unwrap();
assert_eq!(d_lossless.width(), 2);
assert_eq!(d_lossy.width(), 2);
}
#[test]
fn with_compression_sets_config() {
let enc = PngEncoderConfig::new().with_compression(crate::Compression::Turbo);
let pixels = vec![Rgb { r: 0, g: 0, b: 0 }; 4];
let img = Img::new(pixels, 2, 2);
let out = enc.encode_rgb8(img.as_ref()).unwrap();
assert!(!out.data().is_empty());
}
#[test]
fn with_filter_sets_config() {
let enc = PngEncoderConfig::new().with_filter(crate::Filter::Auto);
let pixels = vec![Rgb { r: 0, g: 0, b: 0 }; 4];
let img = Img::new(pixels, 2, 2);
let out = enc.encode_rgb8(img.as_ref()).unwrap();
assert!(!out.data().is_empty());
}
#[test]
fn default_encoder_config() {
let enc: PngEncoderConfig = Default::default();
assert!(enc.generic_effort().is_none());
assert!(enc.generic_quality().is_none());
}
#[test]
fn encode_rgb16_roundtrip() {
let enc = PngEncoderConfig::new();
let pixels = vec![
Rgb {
r: 1000u16,
g: 2000,
b: 3000
};
4
];
let img = Img::new(pixels, 2, 2);
let out = enc.encode_rgb16(img.as_ref()).unwrap();
assert!(!out.data().is_empty());
assert_eq!(
&out.data()[..8],
&[0x89, 0x50, 0x4E, 0x47, 0x0D, 0x0A, 0x1A, 0x0A]
);
}
#[test]
fn encode_rgba16_roundtrip() {
let enc = PngEncoderConfig::new();
let pixels = vec![
Rgba {
r: 1000u16,
g: 2000,
b: 3000,
a: 65535
};
4
];
let img = Img::new(pixels, 2, 2);
let out = enc.encode_rgba16(img.as_ref()).unwrap();
assert!(!out.data().is_empty());
}
#[test]
fn encode_gray16_roundtrip() {
let enc = PngEncoderConfig::new();
let pixels = vec![Gray::new(30000u16); 4];
let img = Img::new(pixels, 2, 2);
let out = enc.encode_gray16(img.as_ref()).unwrap();
assert!(!out.data().is_empty());
}
#[test]
fn effort_to_compression_all_levels() {
use crate::Compression;
assert!(matches!(effort_to_compression(-1), Compression::None));
assert!(matches!(effort_to_compression(0), Compression::None));
assert!(matches!(effort_to_compression(1), Compression::Fastest));
assert!(matches!(effort_to_compression(2), Compression::Turbo));
assert!(matches!(effort_to_compression(3), Compression::Fast));
assert!(matches!(effort_to_compression(4), Compression::Balanced));
assert!(matches!(effort_to_compression(5), Compression::Thorough));
assert!(matches!(effort_to_compression(6), Compression::High));
assert!(matches!(effort_to_compression(7), Compression::Aggressive));
assert!(matches!(effort_to_compression(8), Compression::Intense));
assert!(matches!(effort_to_compression(9), Compression::Crush));
assert!(matches!(effort_to_compression(10), Compression::Maniac));
assert!(matches!(effort_to_compression(11), Compression::Brag));
assert!(matches!(effort_to_compression(12), Compression::Minutes));
assert!(matches!(effort_to_compression(100), Compression::Minutes));
}
#[test]
fn quality_to_mpe_endpoints() {
assert_eq!(quality_to_mpe(100.0), 0.0);
assert_eq!(quality_to_mpe(0.0), 0.1);
assert_eq!(quality_to_mpe(150.0), 0.0);
assert_eq!(quality_to_mpe(-10.0), 0.1);
}
#[test]
fn quality_to_mpe_interpolation() {
assert!((quality_to_mpe(95.0) - 0.007).abs() < 0.0001);
assert!((quality_to_mpe(50.0) - 0.044).abs() < 0.0001);
let mid = quality_to_mpe(97.0);
assert!(mid > 0.003 && mid < 0.007);
}
#[test]
fn encode_job_with_stop() {
let enc = PngEncoderConfig::new();
let stop = zencodec::StopToken::new(enough::Unstoppable);
let job = enc.job().with_stop(stop);
let encoder = job.encoder().unwrap();
let pixels = vec![Rgb { r: 0u8, g: 0, b: 0 }; 4];
let img = Img::new(pixels, 2, 2);
let out = encoder.do_encode(
bytemuck::cast_slice(img.buf()),
2,
2,
crate::encode::ColorType::Rgb,
);
assert!(out.is_ok());
}
#[test]
fn encode_job_with_limits() {
let enc = PngEncoderConfig::new();
let limits = ResourceLimits::default();
let job = enc.job().with_limits(limits);
let encoder = job.encoder().unwrap();
let pixels = vec![Rgb { r: 0u8, g: 0, b: 0 }; 4];
let img = Img::new(pixels, 2, 2);
let out = encoder.do_encode(
bytemuck::cast_slice(img.buf()),
2,
2,
crate::encode::ColorType::Rgb,
);
assert!(out.is_ok());
}
#[test]
fn encode_job_with_metadata() {
let enc = PngEncoderConfig::new();
let meta = Metadata::default();
let job = enc.job().with_metadata(meta);
let encoder = job.encoder().unwrap();
let pixels = vec![Rgb { r: 0u8, g: 0, b: 0 }; 4];
let img = Img::new(pixels, 2, 2);
let out = encoder.do_encode(
bytemuck::cast_slice(img.buf()),
2,
2,
crate::encode::ColorType::Rgb,
);
assert!(out.is_ok());
}
#[test]
fn encode_job_animation_frame_encoder() {
let enc = PngEncoderConfig::new();
let job = enc.job().with_canvas_size(8, 8).with_loop_count(Some(0));
let frame_enc = job.animation_frame_encoder();
assert!(frame_enc.is_ok());
}
#[test]
fn encoder_supported_descriptors() {
let descs = <PngEncoderConfig as EncoderConfig>::supported_descriptors();
assert!(!descs.is_empty());
assert!(descs.contains(&PixelDescriptor::RGB8_SRGB));
assert!(descs.contains(&PixelDescriptor::RGBA8_SRGB));
}
#[test]
fn encoder_is_lossless() {
let enc = PngEncoderConfig::new();
assert_eq!(enc.is_lossless(), Some(true));
let enc_lossy = enc.with_generic_quality(90.0);
assert_eq!(enc_lossy.is_lossless(), Some(false));
}
#[test]
fn encoder_trait_rgb8() {
use zencodec::encode::Encoder;
let pixels: Vec<Rgb<u8>> = (0..16 * 16)
.map(|i| Rgb {
r: (i % 256) as u8,
g: ((i * 3) % 256) as u8,
b: ((i * 7) % 256) as u8,
})
.collect();
let img = imgref::ImgVec::new(pixels, 16, 16);
let config = PngEncoderConfig::new();
let encoder = config.clone().job().encoder().unwrap();
let output = encoder
.encode(PixelSlice::from(img.as_ref()).into())
.unwrap();
assert!(!output.is_empty());
assert_eq!(output.format(), ImageFormat::Png);
}
#[test]
fn encoder_trait_rgba8() {
use zencodec::encode::Encoder;
let pixels: Vec<Rgba<u8>> = (0..16 * 16)
.map(|i| Rgba {
r: (i % 256) as u8,
g: ((i * 3) % 256) as u8,
b: ((i * 7) % 256) as u8,
a: 255,
})
.collect();
let img = imgref::ImgVec::new(pixels, 16, 16);
let config = PngEncoderConfig::new();
let encoder = config.clone().job().encoder().unwrap();
let output = encoder
.encode(PixelSlice::from(img.as_ref()).into())
.unwrap();
assert!(!output.is_empty());
assert_eq!(output.format(), ImageFormat::Png);
}
#[test]
fn encoder_trait_gray8() {
use zencodec::encode::Encoder;
let pixels: Vec<Gray<u8>> = (0..16 * 16).map(|i| Gray((i % 256) as u8)).collect();
let img = imgref::ImgVec::new(pixels, 16, 16);
let config = PngEncoderConfig::new();
let encoder = config.clone().job().encoder().unwrap();
let output = encoder
.encode(PixelSlice::from(img.as_ref()).into())
.unwrap();
assert!(!output.is_empty());
assert_eq!(output.format(), ImageFormat::Png);
}
#[test]
fn encoder_trait_rgb16() {
use zencodec::encode::Encoder;
let pixels: Vec<Rgb<u16>> = (0..16 * 16)
.map(|i| Rgb {
r: (i * 256) as u16,
g: ((i * 3 * 256) % 65536) as u16,
b: 0,
})
.collect();
let img = imgref::ImgVec::new(pixels, 16, 16);
let config = PngEncoderConfig::new();
let encoder = config.clone().job().encoder().unwrap();
let output = encoder
.encode(PixelSlice::from(img.as_ref()).into())
.unwrap();
assert!(!output.is_empty());
assert_eq!(output.format(), ImageFormat::Png);
}
#[test]
fn encoder_trait_rgba16() {
use zencodec::encode::Encoder;
let pixels: Vec<Rgba<u16>> = (0..16 * 16)
.map(|i| Rgba {
r: (i * 256) as u16,
g: ((i * 3 * 256) % 65536) as u16,
b: 0,
a: 65535,
})
.collect();
let img = imgref::ImgVec::new(pixels, 16, 16);
let config = PngEncoderConfig::new();
let encoder = config.clone().job().encoder().unwrap();
let output = encoder
.encode(PixelSlice::from(img.as_ref()).into())
.unwrap();
assert!(!output.is_empty());
assert_eq!(output.format(), ImageFormat::Png);
}
#[test]
fn encoder_trait_gray16() {
use zencodec::encode::Encoder;
let pixels: Vec<Gray<u16>> = (0..16 * 16).map(|i| Gray((i * 256) as u16)).collect();
let img = imgref::ImgVec::new(pixels, 16, 16);
let config = PngEncoderConfig::new();
let encoder = config.clone().job().encoder().unwrap();
let output = encoder
.encode(PixelSlice::from(img.as_ref()).into())
.unwrap();
assert!(!output.is_empty());
assert_eq!(output.format(), ImageFormat::Png);
}
#[test]
fn encoder_trait_rgb_f32() {
use zencodec::encode::Encoder;
let pixels: Vec<Rgb<f32>> = (0..16 * 16)
.map(|i| Rgb {
r: (i % 256) as f32 / 255.0,
g: ((i * 3) % 256) as f32 / 255.0,
b: ((i * 7) % 256) as f32 / 255.0,
})
.collect();
let img = imgref::ImgVec::new(pixels, 16, 16);
let config = PngEncoderConfig::new();
let encoder = config.clone().job().encoder().unwrap();
let output = encoder
.encode(PixelSlice::from(img.as_ref()).into())
.unwrap();
assert!(!output.is_empty());
assert_eq!(output.format(), ImageFormat::Png);
}
#[test]
fn encoder_trait_rgba_f32() {
use zencodec::encode::Encoder;
let pixels: Vec<Rgba<f32>> = (0..16 * 16)
.map(|i| Rgba {
r: (i % 256) as f32 / 255.0,
g: ((i * 3) % 256) as f32 / 255.0,
b: ((i * 7) % 256) as f32 / 255.0,
a: 1.0,
})
.collect();
let img = imgref::ImgVec::new(pixels, 16, 16);
let config = PngEncoderConfig::new();
let encoder = config.clone().job().encoder().unwrap();
let output = encoder
.encode(PixelSlice::from(img.as_ref()).into())
.unwrap();
assert!(!output.is_empty());
assert_eq!(output.format(), ImageFormat::Png);
}
#[test]
fn encoder_trait_gray_f32() {
use zencodec::encode::Encoder;
let pixels: Vec<Gray<f32>> = (0..16 * 16)
.map(|i| Gray((i % 256) as f32 / 255.0))
.collect();
let img = imgref::ImgVec::new(pixels, 16, 16);
let config = PngEncoderConfig::new();
let encoder = config.clone().job().encoder().unwrap();
let output = encoder
.encode(PixelSlice::from(img.as_ref()).into())
.unwrap();
assert!(!output.is_empty());
assert_eq!(output.format(), ImageFormat::Png);
}
#[test]
fn encoder_trait_bgra8() {
use zencodec::encode::Encoder;
let pixels: Vec<rgb::alt::BGRA<u8>> = (0..16 * 16)
.map(|i| rgb::alt::BGRA {
b: (i % 256) as u8,
g: 128,
r: 64,
a: 255,
})
.collect();
let img = imgref::ImgVec::new(pixels, 16, 16);
let config = PngEncoderConfig::new();
let encoder = config.clone().job().encoder().unwrap();
let output = encoder
.encode(PixelSlice::from(img.as_ref()).into())
.unwrap();
assert!(!output.is_empty());
assert_eq!(output.format(), ImageFormat::Png);
}
#[test]
fn encode_max_output_bytes_rejects_large_output() {
use zencodec::encode::Encoder;
let pixels: Vec<Rgb<u8>> = (0..32 * 32)
.map(|i| Rgb {
r: (i % 256) as u8,
g: ((i * 3) % 256) as u8,
b: ((i * 7) % 256) as u8,
})
.collect();
let img = imgref::ImgVec::new(pixels, 32, 32);
let config = PngEncoderConfig::new();
let limits = ResourceLimits::none().with_max_output(100);
let encoder = config.clone().job().with_limits(limits).encoder().unwrap();
let result = encoder.encode(PixelSlice::from(img.as_ref()).erase());
let err = result.unwrap_err();
let msg = alloc::format!("{}", err);
assert!(
msg.contains("limit exceeded") || msg.contains("output size"),
"expected limit exceeded error, got: {msg}"
);
}
#[test]
fn encode_max_output_bytes_allows_small_output() {
use zencodec::encode::Encoder;
let pixels: Vec<Rgb<u8>> = vec![Rgb { r: 0, g: 0, b: 0 }; 4];
let img = imgref::ImgVec::new(pixels, 2, 2);
let config = PngEncoderConfig::new();
let limits = ResourceLimits::none().with_max_output(10_000);
let encoder = config.clone().job().with_limits(limits).encoder().unwrap();
let result = encoder.encode(PixelSlice::from(img.as_ref()).erase());
assert!(result.is_ok(), "expected success, got: {:?}", result.err());
}
#[test]
fn apng_push_frame_rejects_over_max_frames() {
use zencodec::encode::AnimationFrameEncoder;
let config = PngEncoderConfig::new();
let limits = ResourceLimits::none().with_max_frames(2);
let job = config
.job()
.with_canvas_size(4, 4)
.with_loop_count(Some(0))
.with_limits(limits);
let mut enc = job.animation_frame_encoder().unwrap();
let make_frame = || {
let pixels: Vec<Rgba<u8>> = vec![
Rgba {
r: 0,
g: 0,
b: 0,
a: 255,
};
16
];
imgref::ImgVec::new(pixels, 4, 4)
};
let img1 = make_frame();
enc.push_frame(PixelSlice::from(img1.as_ref()).erase(), 100, None)
.unwrap();
let img2 = make_frame();
enc.push_frame(PixelSlice::from(img2.as_ref()).erase(), 100, None)
.unwrap();
let img3 = make_frame();
let result = enc.push_frame(PixelSlice::from(img3.as_ref()).erase(), 100, None);
let err = result.unwrap_err();
let msg = alloc::format!("{}", err);
assert!(
msg.contains("limit exceeded") || msg.contains("frame count"),
"expected frame limit error, got: {msg}"
);
}
#[test]
fn apng_push_frame_rejects_over_max_memory() {
use zencodec::encode::AnimationFrameEncoder;
let config = PngEncoderConfig::new();
let limits = ResourceLimits::none().with_max_memory(100);
let job = config
.job()
.with_canvas_size(4, 4)
.with_loop_count(Some(0))
.with_limits(limits);
let mut enc = job.animation_frame_encoder().unwrap();
let make_frame = || {
let pixels: Vec<Rgba<u8>> = vec![
Rgba {
r: 0,
g: 0,
b: 0,
a: 255,
};
16
];
imgref::ImgVec::new(pixels, 4, 4)
};
let img1 = make_frame();
enc.push_frame(PixelSlice::from(img1.as_ref()).erase(), 100, None)
.unwrap();
let img2 = make_frame();
let result = enc.push_frame(PixelSlice::from(img2.as_ref()).erase(), 100, None);
let err = result.unwrap_err();
let msg = alloc::format!("{}", err);
assert!(
msg.contains("limit exceeded") || msg.contains("memory"),
"expected memory limit error, got: {msg}"
);
}
#[test]
fn decode_max_input_bytes_rejects_large_input() {
use zencodec::decode::{Decode, DecodeJob, DecoderConfig};
let pixels: Vec<Rgb<u8>> = vec![
Rgb {
r: 128,
g: 64,
b: 32
};
64
];
let img = imgref::ImgVec::new(pixels, 8, 8);
let enc = PngEncoderConfig::new();
let encoded = enc.encode_rgb8(img.as_ref()).unwrap();
let data = encoded.data();
let dec = PngDecoderConfig::new();
let limits = ResourceLimits::none().with_max_input_bytes(10);
let result = dec
.job()
.with_limits(limits)
.decoder(Cow::Borrowed(data), &[])
.unwrap()
.decode();
let err = result.unwrap_err();
let msg = alloc::format!("{}", err);
assert!(
msg.contains("limit exceeded") || msg.contains("input size"),
"expected input size limit error, got: {msg}"
);
}
#[test]
fn decode_max_input_bytes_allows_small_input() {
use zencodec::decode::{Decode, DecodeJob, DecoderConfig};
let pixels: Vec<Rgb<u8>> = vec![Rgb { r: 0, g: 0, b: 0 }; 4];
let img = imgref::ImgVec::new(pixels, 2, 2);
let enc = PngEncoderConfig::new();
let encoded = enc.encode_rgb8(img.as_ref()).unwrap();
let data = encoded.data();
let dec = PngDecoderConfig::new();
let limits = ResourceLimits::none().with_max_input_bytes(100_000);
let result = dec
.job()
.with_limits(limits)
.decoder(Cow::Borrowed(data), &[])
.unwrap()
.decode();
assert!(result.is_ok(), "expected success, got: {:?}", result.err());
}
#[test]
fn encode_single_thread_produces_valid_png() {
use zencodec::ThreadingPolicy;
use zencodec::encode::{EncodeJob, Encoder, EncoderConfig};
let pixels: Vec<Rgb<u8>> = (0..32 * 32)
.map(|i| Rgb {
r: (i % 256) as u8,
g: ((i * 3) % 256) as u8,
b: ((i * 7) % 256) as u8,
})
.collect();
let img = imgref::ImgVec::new(pixels, 32, 32);
let config = PngEncoderConfig::new();
let limits = ResourceLimits::none().with_threading(ThreadingPolicy::SingleThread);
let encoder = config.clone().job().with_limits(limits).encoder().unwrap();
let output = encoder
.encode(PixelSlice::from(img.as_ref()).erase())
.unwrap();
assert_eq!(
&output.data()[0..8],
&[0x89, 0x50, 0x4E, 0x47, 0x0D, 0x0A, 0x1A, 0x0A]
);
use zencodec::decode::{Decode, DecodeJob, DecoderConfig};
let dec = PngDecoderConfig::new();
let result = dec
.job()
.decoder(Cow::Borrowed(output.data()), &[])
.unwrap()
.decode();
assert!(
result.is_ok(),
"roundtrip decode failed: {:?}",
result.err()
);
}
#[test]
fn encode_single_thread_matches_default_threading() {
use zencodec::ThreadingPolicy;
use zencodec::encode::{EncodeJob, Encoder, EncoderConfig};
let pixels: Vec<Rgb<u8>> = vec![
Rgb {
r: 100,
g: 150,
b: 200,
};
16 * 16
];
let img = imgref::ImgVec::new(pixels, 16, 16);
let config_st = PngEncoderConfig::new();
let limits = ResourceLimits::none().with_threading(ThreadingPolicy::SingleThread);
let st_output = config_st
.job()
.with_limits(limits)
.encoder()
.unwrap()
.encode(PixelSlice::from(img.as_ref()).erase())
.unwrap();
let config_def = PngEncoderConfig::new();
let def_output = config_def
.job()
.encoder()
.unwrap()
.encode(PixelSlice::from(img.as_ref()).erase())
.unwrap();
assert_eq!(
&st_output.data()[0..8],
&[0x89, 0x50, 0x4E, 0x47, 0x0D, 0x0A, 0x1A, 0x0A]
);
assert_eq!(st_output.data().len(), def_output.data().len());
}
#[test]
fn push_rows_rgb8_roundtrip() {
use zencodec::encode::{EncodeJob, Encoder, EncoderConfig};
let w = 4u32;
let h = 6u32;
let pixels: Vec<Rgb<u8>> = (0..w * h)
.map(|i| Rgb {
r: (i * 7) as u8,
g: (i * 13) as u8,
b: (i * 19) as u8,
})
.collect();
let img = Img::new(pixels.clone(), w as usize, h as usize);
let config = PngEncoderConfig::new();
let mut encoder = config
.clone()
.job()
.with_canvas_size(w, h)
.encoder()
.unwrap();
for strip_y in (0..h).step_by(2) {
let strip = img.sub_image(0, strip_y as usize, w as usize, 2);
let slice = PixelSlice::from(strip).erase();
encoder.push_rows(slice).unwrap();
}
let push_output = encoder.finish().unwrap();
let oneshot_output = config
.job()
.encoder()
.unwrap()
.encode(PixelSlice::from(img.as_ref()).erase())
.unwrap();
assert!(!push_output.data().is_empty());
let decoded_push = crate::decode(
push_output.data(),
&crate::PngDecodeConfig::strict(),
&enough::Unstoppable,
)
.unwrap();
let decoded_one = crate::decode(
oneshot_output.data(),
&crate::PngDecodeConfig::strict(),
&enough::Unstoppable,
)
.unwrap();
assert_eq!(decoded_push.info.width, w);
assert_eq!(decoded_push.info.height, h);
assert_eq!(
decoded_push.pixels.copy_to_contiguous_bytes(),
decoded_one.pixels.copy_to_contiguous_bytes()
);
}
#[test]
fn push_rows_rgba8_single_row_at_a_time() {
use zencodec::encode::{EncodeJob, Encoder, EncoderConfig};
let w = 3u32;
let h = 4u32;
let pixels: Vec<Rgba<u8>> = (0..w * h)
.map(|i| Rgba {
r: (i * 5) as u8,
g: (i * 11) as u8,
b: (i * 17) as u8,
a: 255,
})
.collect();
let img = Img::new(pixels, w as usize, h as usize);
let config = PngEncoderConfig::new();
let mut encoder = config
.clone()
.job()
.with_canvas_size(w, h)
.encoder()
.unwrap();
for y in 0..h {
let strip = img.sub_image(0, y as usize, w as usize, 1);
encoder.push_rows(PixelSlice::from(strip).erase()).unwrap();
}
let output = encoder.finish().unwrap();
let decoded = crate::decode(
output.data(),
&crate::PngDecodeConfig::strict(),
&enough::Unstoppable,
)
.unwrap();
assert_eq!(decoded.info.width, w);
assert_eq!(decoded.info.height, h);
}
#[test]
fn push_rows_all_at_once() {
use zencodec::encode::{EncodeJob, Encoder, EncoderConfig};
let w = 8u32;
let h = 8u32;
let pixels: Vec<Rgb<u8>> = vec![
Rgb {
r: 128,
g: 64,
b: 32,
};
(w * h) as usize
];
let img = Img::new(pixels, w as usize, h as usize);
let config = PngEncoderConfig::new();
let mut encoder = config
.clone()
.job()
.with_canvas_size(w, h)
.encoder()
.unwrap();
encoder
.push_rows(PixelSlice::from(img.as_ref()).erase())
.unwrap();
let output = encoder.finish().unwrap();
let decoded = crate::decode(
output.data(),
&crate::PngDecodeConfig::strict(),
&enough::Unstoppable,
)
.unwrap();
assert_eq!(decoded.info.width, w);
assert_eq!(decoded.info.height, h);
}
#[test]
fn push_rows_gray8() {
use zencodec::encode::{EncodeJob, Encoder, EncoderConfig};
let w = 5u32;
let h = 3u32;
let pixels: Vec<Gray<u8>> = (0..w * h).map(|i| Gray::new((i * 17) as u8)).collect();
let img = Img::new(pixels, w as usize, h as usize);
let config = PngEncoderConfig::new();
let mut encoder = config
.clone()
.job()
.with_canvas_size(w, h)
.encoder()
.unwrap();
encoder
.push_rows(PixelSlice::from(img.as_ref()).erase())
.unwrap();
let output = encoder.finish().unwrap();
let decoded = crate::decode(
output.data(),
&crate::PngDecodeConfig::strict(),
&enough::Unstoppable,
)
.unwrap();
assert_eq!(decoded.info.width, w);
assert_eq!(decoded.info.height, h);
}
#[test]
fn push_rows_finish_without_push_errors() {
use zencodec::encode::{EncodeJob, Encoder, EncoderConfig};
let config = PngEncoderConfig::new();
let encoder = config
.clone()
.job()
.with_canvas_size(4, 4)
.encoder()
.unwrap();
assert!(encoder.finish().is_err());
}
#[test]
fn push_rows_overflow_errors() {
use zencodec::encode::{EncodeJob, Encoder, EncoderConfig};
let w = 2u32;
let h = 2u32;
let pixels: Vec<Rgb<u8>> = vec![Rgb { r: 0, g: 0, b: 0 }; (w * 3) as usize]; let img = Img::new(pixels, w as usize, 3);
let config = PngEncoderConfig::new();
let mut encoder = config
.clone()
.job()
.with_canvas_size(w, h)
.encoder()
.unwrap();
let result = encoder.push_rows(PixelSlice::from(img.as_ref()).erase());
assert!(result.is_err());
}
#[test]
fn push_rows_matches_encode_output() {
use zencodec::encode::{EncodeJob, Encoder, EncoderConfig};
let w = 16u32;
let h = 16u32;
let pixels: Vec<Rgba<u8>> = (0..w * h)
.map(|i| Rgba {
r: (i % 256) as u8,
g: ((i * 3) % 256) as u8,
b: ((i * 7) % 256) as u8,
a: 255,
})
.collect();
let img = Img::new(pixels, w as usize, h as usize);
let config = PngEncoderConfig::new().with_generic_effort(3);
let mut encoder = config
.clone()
.job()
.with_canvas_size(w, h)
.encoder()
.unwrap();
for y in (0..h).step_by(4) {
let rows = (h - y).min(4);
let strip = img.sub_image(0, y as usize, w as usize, rows as usize);
encoder.push_rows(PixelSlice::from(strip).erase()).unwrap();
}
let push_out = encoder.finish().unwrap();
let one_enc = config.clone().job().encoder().unwrap();
let one_out = one_enc
.encode(PixelSlice::from(img.as_ref()).erase())
.unwrap();
let dec_push = crate::decode(
push_out.data(),
&crate::PngDecodeConfig::strict(),
&enough::Unstoppable,
)
.unwrap();
let dec_one = crate::decode(
one_out.data(),
&crate::PngDecodeConfig::strict(),
&enough::Unstoppable,
)
.unwrap();
assert_eq!(
dec_push.pixels.copy_to_contiguous_bytes(),
dec_one.pixels.copy_to_contiguous_bytes()
);
}
#[test]
fn push_rows_caps_advertised() {
use zencodec::encode::EncoderConfig;
let caps = PngEncoderConfig::capabilities();
assert!(caps.supports(zencodec::UnsupportedOperation::RowLevelEncode));
}
#[test]
fn push_rows_infer_canvas_width() {
use zencodec::encode::{EncodeJob, Encoder, EncoderConfig};
let w = 4u32;
let h = 2u32;
let pixels: Vec<Rgb<u8>> = vec![
Rgb {
r: 100,
g: 100,
b: 100,
};
(w * h) as usize
];
let img = Img::new(pixels, w as usize, h as usize);
let config = PngEncoderConfig::new();
let mut encoder = config.clone().job().encoder().unwrap(); encoder
.push_rows(PixelSlice::from(img.as_ref()).erase())
.unwrap();
let output = encoder.finish().unwrap();
let decoded = crate::decode(
output.data(),
&crate::PngDecodeConfig::strict(),
&enough::Unstoppable,
)
.unwrap();
assert_eq!(decoded.info.width, w);
assert_eq!(decoded.info.height, h);
}
#[test]
fn streaming_effort0_rgb8_roundtrip() {
use zencodec::encode::{EncodeJob, Encoder, EncoderConfig};
let w = 4u32;
let h = 6u32;
let pixels: Vec<Rgb<u8>> = (0..w * h)
.map(|i| Rgb {
r: (i * 7) as u8,
g: (i * 13) as u8,
b: (i * 19) as u8,
})
.collect();
let img = Img::new(pixels.clone(), w as usize, h as usize);
let config = PngEncoderConfig::new().with_compression(crate::Compression::None);
let mut encoder = config
.clone()
.job()
.with_canvas_size(w, h)
.encoder()
.unwrap();
for strip_y in (0..h).step_by(2) {
let strip = img.sub_image(0, strip_y as usize, w as usize, 2);
encoder.push_rows(PixelSlice::from(strip).erase()).unwrap();
}
let output = encoder.finish().unwrap();
let decoded = crate::decode(
output.data(),
&crate::PngDecodeConfig::strict(),
&enough::Unstoppable,
)
.unwrap();
assert_eq!(decoded.info.width, w);
assert_eq!(decoded.info.height, h);
let oneshot = config
.job()
.encoder()
.unwrap()
.encode(PixelSlice::from(img.as_ref()).erase())
.unwrap();
let decoded_one = crate::decode(
oneshot.data(),
&crate::PngDecodeConfig::strict(),
&enough::Unstoppable,
)
.unwrap();
assert_eq!(
decoded.pixels.copy_to_contiguous_bytes(),
decoded_one.pixels.copy_to_contiguous_bytes()
);
}
#[test]
fn streaming_effort0_rgba8_single_row() {
use zencodec::encode::{EncodeJob, Encoder, EncoderConfig};
let w = 3u32;
let h = 4u32;
let pixels: Vec<Rgba<u8>> = (0..w * h)
.map(|i| Rgba {
r: (i * 5) as u8,
g: (i * 11) as u8,
b: (i * 17) as u8,
a: 200,
})
.collect();
let img = Img::new(pixels, w as usize, h as usize);
let config = PngEncoderConfig::new().with_compression(crate::Compression::None);
let mut encoder = config
.clone()
.job()
.with_canvas_size(w, h)
.encoder()
.unwrap();
for y in 0..h {
let strip = img.sub_image(0, y as usize, w as usize, 1);
encoder.push_rows(PixelSlice::from(strip).erase()).unwrap();
}
let output = encoder.finish().unwrap();
let decoded = crate::decode(
output.data(),
&crate::PngDecodeConfig::strict(),
&enough::Unstoppable,
)
.unwrap();
assert_eq!(decoded.info.width, w);
assert_eq!(decoded.info.height, h);
}
#[test]
fn streaming_effort0_gray8() {
use zencodec::encode::{EncodeJob, Encoder, EncoderConfig};
let w = 8u32;
let h = 4u32;
let pixels: Vec<Gray<u8>> = (0..w * h).map(|i| Gray(i as u8)).collect();
let img = Img::new(pixels, w as usize, h as usize);
let config = PngEncoderConfig::new().with_compression(crate::Compression::None);
let mut encoder = config
.clone()
.job()
.with_canvas_size(w, h)
.encoder()
.unwrap();
encoder
.push_rows(PixelSlice::from(img.as_ref()).erase())
.unwrap();
let output = encoder.finish().unwrap();
let decoded = crate::decode(
output.data(),
&crate::PngDecodeConfig::strict(),
&enough::Unstoppable,
)
.unwrap();
assert_eq!(decoded.info.width, w);
assert_eq!(decoded.info.height, h);
}
#[test]
fn streaming_effort0_matches_oneshot_effort0() {
use zencodec::encode::{EncodeJob, Encoder, EncoderConfig};
let w = 16u32;
let h = 12u32;
let pixels: Vec<Rgb<u8>> = (0..w * h)
.map(|i| Rgb {
r: (i * 3 + 7) as u8,
g: (i * 5 + 11) as u8,
b: (i * 7 + 13) as u8,
})
.collect();
let img = Img::new(pixels, w as usize, h as usize);
let config = PngEncoderConfig::new().with_compression(crate::Compression::None);
let oneshot = config
.clone()
.job()
.encoder()
.unwrap()
.encode(PixelSlice::from(img.as_ref()).erase())
.unwrap();
let mut encoder = config.job().with_canvas_size(w, h).encoder().unwrap();
for strip_y in (0..h).step_by(3) {
let strip_h = (h - strip_y).min(3);
let strip = img.sub_image(0, strip_y as usize, w as usize, strip_h as usize);
encoder.push_rows(PixelSlice::from(strip).erase()).unwrap();
}
let streaming = encoder.finish().unwrap();
assert_eq!(oneshot.data(), streaming.data());
}
#[test]
fn streaming_effort0_large_row() {
use zencodec::encode::{EncodeJob, Encoder, EncoderConfig};
let w = 16384u32;
let h = 2u32;
let pixels: Vec<Rgba<u8>> = (0..w * h)
.map(|i| Rgba {
r: (i % 251) as u8,
g: (i % 241) as u8,
b: (i % 239) as u8,
a: 200,
})
.collect();
let img = Img::new(pixels, w as usize, h as usize);
let config = PngEncoderConfig::new().with_compression(crate::Compression::None);
let mut encoder = config
.clone()
.job()
.with_canvas_size(w, h)
.encoder()
.unwrap();
for y in 0..h {
let strip = img.sub_image(0, y as usize, w as usize, 1);
encoder.push_rows(PixelSlice::from(strip).erase()).unwrap();
}
let output = encoder.finish().unwrap();
let decoded = crate::decode(
output.data(),
&crate::PngDecodeConfig::strict(),
&enough::Unstoppable,
)
.unwrap();
assert_eq!(decoded.info.width, w);
assert_eq!(decoded.info.height, h);
let oneshot = config
.job()
.encoder()
.unwrap()
.encode(PixelSlice::from(img.as_ref()).erase())
.unwrap();
assert_eq!(oneshot.data(), output.data());
}
#[test]
fn streaming_effort0_fallback_without_canvas_height() {
use zencodec::encode::{EncodeJob, Encoder, EncoderConfig};
let w = 4u32;
let h = 3u32;
let pixels: Vec<Rgb<u8>> = vec![
Rgb {
r: 50,
g: 100,
b: 150,
};
(w * h) as usize
];
let img = Img::new(pixels, w as usize, h as usize);
let config = PngEncoderConfig::new().with_compression(crate::Compression::None);
let mut encoder = config.clone().job().encoder().unwrap(); encoder
.push_rows(PixelSlice::from(img.as_ref()).erase())
.unwrap();
let output = encoder.finish().unwrap();
let decoded = crate::decode(
output.data(),
&crate::PngDecodeConfig::strict(),
&enough::Unstoppable,
)
.unwrap();
assert_eq!(decoded.info.width, w);
assert_eq!(decoded.info.height, h);
}
#[test]
fn streaming_effort1_rgb8_roundtrip() {
use zencodec::encode::{EncodeJob, Encoder, EncoderConfig};
let w = 8u32;
let h = 6u32;
let pixels: Vec<Rgb<u8>> = (0..w * h)
.map(|i| Rgb {
r: (i * 7) as u8,
g: (i * 13) as u8,
b: (i * 19) as u8,
})
.collect();
let img = Img::new(pixels.clone(), w as usize, h as usize);
let config = PngEncoderConfig::new().with_compression(crate::Compression::Fastest);
let mut encoder = config
.clone()
.job()
.with_canvas_size(w, h)
.encoder()
.unwrap();
for strip_y in (0..h).step_by(2) {
let strip = img.sub_image(0, strip_y as usize, w as usize, 2);
encoder.push_rows(PixelSlice::from(strip).erase()).unwrap();
}
let output = encoder.finish().unwrap();
let decoded = crate::decode(
output.data(),
&crate::PngDecodeConfig::strict(),
&enough::Unstoppable,
)
.unwrap();
assert_eq!(decoded.info.width, w);
assert_eq!(decoded.info.height, h);
let oneshot = config
.job()
.encoder()
.unwrap()
.encode(PixelSlice::from(img.as_ref()).erase())
.unwrap();
let decoded_one = crate::decode(
oneshot.data(),
&crate::PngDecodeConfig::strict(),
&enough::Unstoppable,
)
.unwrap();
assert_eq!(
decoded.pixels.copy_to_contiguous_bytes(),
decoded_one.pixels.copy_to_contiguous_bytes()
);
}
#[test]
fn streaming_effort1_matches_oneshot_bytes() {
use zencodec::encode::{EncodeJob, Encoder, EncoderConfig};
let w = 12u32;
let h = 8u32;
let pixels: Vec<Rgb<u8>> = (0..w * h)
.map(|i| Rgb {
r: (i * 3 + 7) as u8,
g: (i * 5 + 11) as u8,
b: (i * 7 + 13) as u8,
})
.collect();
let img = Img::new(pixels, w as usize, h as usize);
let config = PngEncoderConfig::new().with_compression(crate::Compression::Fastest);
let oneshot = config
.clone()
.job()
.encoder()
.unwrap()
.encode(PixelSlice::from(img.as_ref()).erase())
.unwrap();
let mut encoder = config.job().with_canvas_size(w, h).encoder().unwrap();
for strip_y in (0..h).step_by(3) {
let strip_h = (h - strip_y).min(3);
let strip = img.sub_image(0, strip_y as usize, w as usize, strip_h as usize);
encoder.push_rows(PixelSlice::from(strip).erase()).unwrap();
}
let streaming = encoder.finish().unwrap();
assert_eq!(oneshot.data(), streaming.data());
}
#[test]
fn streaming_effort1_smaller_than_effort0() {
use zencodec::encode::{EncodeJob, Encoder, EncoderConfig};
let w = 64u32;
let h = 64u32;
let pixels: Vec<Rgb<u8>> = (0..w * h)
.map(|i| {
let x = (i % w) as u8;
let y = (i / w) as u8;
Rgb {
r: x,
g: y,
b: x.wrapping_add(y),
}
})
.collect();
let img = Img::new(pixels, w as usize, h as usize);
let config0 = PngEncoderConfig::new().with_compression(crate::Compression::None);
let config1 = PngEncoderConfig::new().with_compression(crate::Compression::Fastest);
let mut enc0 = config0.job().with_canvas_size(w, h).encoder().unwrap();
let mut enc1 = config1.job().with_canvas_size(w, h).encoder().unwrap();
enc0.push_rows(PixelSlice::from(img.as_ref()).erase())
.unwrap();
enc1.push_rows(PixelSlice::from(img.as_ref()).erase())
.unwrap();
let out0 = enc0.finish().unwrap();
let out1 = enc1.finish().unwrap();
assert!(
out1.data().len() < out0.data().len(),
"effort 1 ({}) should be smaller than effort 0 ({})",
out1.data().len(),
out0.data().len()
);
}
#[test]
fn streaming_effort1_rgba8_single_row() {
use zencodec::encode::{EncodeJob, Encoder, EncoderConfig};
let w = 5u32;
let h = 4u32;
let pixels: Vec<Rgba<u8>> = (0..w * h)
.map(|i| Rgba {
r: (i * 5) as u8,
g: (i * 11) as u8,
b: (i * 17) as u8,
a: 200,
})
.collect();
let img = Img::new(pixels, w as usize, h as usize);
let config = PngEncoderConfig::new().with_compression(crate::Compression::Fastest);
let mut encoder = config
.clone()
.job()
.with_canvas_size(w, h)
.encoder()
.unwrap();
for y in 0..h {
let strip = img.sub_image(0, y as usize, w as usize, 1);
encoder.push_rows(PixelSlice::from(strip).erase()).unwrap();
}
let output = encoder.finish().unwrap();
let decoded = crate::decode(
output.data(),
&crate::PngDecodeConfig::strict(),
&enough::Unstoppable,
)
.unwrap();
assert_eq!(decoded.info.width, w);
assert_eq!(decoded.info.height, h);
}
#[test]
fn output_info_returns_gray8_for_grayscale_png() {
let pixels: Vec<Gray<u8>> = vec![Gray(128); 16];
let img = Img::new(pixels, 4, 4);
let enc = PngEncoderConfig::new();
let output = enc.encode_gray8(img.as_ref()).unwrap();
let dec = PngDecoderConfig::new();
let info = dec.job().output_info(output.data()).unwrap();
assert_eq!(info.width, 4);
assert_eq!(info.height, 4);
assert_eq!(
info.native_format,
PixelDescriptor::GRAY8_SRGB,
"grayscale PNG should report GRAY8_SRGB, not {:?}",
info.native_format
);
}
#[test]
fn output_info_returns_gray16_for_grayscale16_png() {
let pixels: Vec<Gray<u16>> = vec![Gray(32769); 16];
let img = Img::new(pixels, 4, 4);
let enc = PngEncoderConfig::new();
let output = enc.encode_gray16(img.as_ref()).unwrap();
let dec = PngDecoderConfig::new();
let info = dec.job().output_info(output.data()).unwrap();
assert_eq!(
info.native_format,
PixelDescriptor::GRAY16_SRGB,
"16-bit grayscale PNG should report GRAY16_SRGB, not {:?}",
info.native_format
);
}
#[test]
fn output_info_returns_rgb8_for_rgb_png() {
let pixels: Vec<Rgb<u8>> = vec![
Rgb {
r: 100,
g: 150,
b: 200,
};
16
];
let img = Img::new(pixels, 4, 4);
let enc = PngEncoderConfig::new();
let output = enc.encode_rgb8(img.as_ref()).unwrap();
let dec = PngDecoderConfig::new();
let info = dec.job().output_info(output.data()).unwrap();
assert_eq!(
info.native_format,
PixelDescriptor::RGB8_SRGB,
"RGB PNG should report RGB8_SRGB, not {:?}",
info.native_format
);
}
#[test]
fn output_info_returns_rgba8_for_rgba_png() {
let pixels: Vec<Rgba<u8>> = vec![
Rgba {
r: 100,
g: 150,
b: 200,
a: 128,
};
16
];
let img = Img::new(pixels, 4, 4);
let enc = PngEncoderConfig::new();
let output = enc.encode_rgba8(img.as_ref()).unwrap();
let dec = PngDecoderConfig::new();
let info = dec.job().output_info(output.data()).unwrap();
assert_eq!(
info.native_format,
PixelDescriptor::RGBA8_SRGB,
"RGBA PNG should report RGBA8_SRGB, not {:?}",
info.native_format
);
}
#[test]
fn apng_finish_respects_stop_token() {
use enough::{Stop, StopReason};
use zencodec::encode::AnimationFrameEncoder;
struct AlreadyCancelled;
impl Stop for AlreadyCancelled {
fn check(&self) -> Result<(), StopReason> {
Err(StopReason::Cancelled)
}
}
let config = PngEncoderConfig::new();
let job = config
.clone()
.job()
.with_canvas_size(4, 4)
.with_loop_count(Some(0));
let mut enc = job.animation_frame_encoder().unwrap();
let pixels: Vec<Rgba<u8>> = vec![
Rgba {
r: 0,
g: 0,
b: 0,
a: 255,
};
16
];
let img = imgref::ImgVec::new(pixels, 4, 4);
enc.push_frame(PixelSlice::from(img.as_ref()).erase(), 100, None)
.unwrap();
let result = enc.finish(Some(&AlreadyCancelled));
assert!(result.is_err(), "finish with cancelled stop should fail");
match result.unwrap_err().decompose().0 {
PngError::Stopped(reason) => {
assert_eq!(reason, StopReason::Cancelled);
}
other => panic!("expected PngError::Stopped, got: {other}"),
}
}
#[test]
fn apng_finish_succeeds_without_stop_token() {
use zencodec::encode::AnimationFrameEncoder;
let config = PngEncoderConfig::new();
let job = config
.clone()
.job()
.with_canvas_size(4, 4)
.with_loop_count(Some(0));
let mut enc = job.animation_frame_encoder().unwrap();
let pixels: Vec<Rgba<u8>> = vec![
Rgba {
r: 255,
g: 0,
b: 0,
a: 255,
};
16
];
let img = imgref::ImgVec::new(pixels, 4, 4);
enc.push_frame(PixelSlice::from(img.as_ref()).erase(), 100, None)
.unwrap();
let result = enc.finish(None);
assert!(
result.is_ok(),
"finish without stop should succeed: {:?}",
result.err()
);
}
#[test]
fn apng_pixels_to_rgba8_rejects_unsupported_format() {
use zencodec::encode::AnimationFrameEncoder;
let config = PngEncoderConfig::new();
let job = config
.clone()
.job()
.with_canvas_size(4, 4)
.with_loop_count(Some(0));
let mut enc = job.animation_frame_encoder().unwrap();
let pixels: Vec<Rgba<u16>> = vec![
Rgba {
r: 1000,
g: 2000,
b: 3000,
a: 65535,
};
16
];
let img = imgref::ImgVec::new(pixels, 4, 4);
let result = enc.push_frame(PixelSlice::from(img.as_ref()).erase(), 100, None);
assert!(result.is_err(), "16-bit RGBA should be rejected");
let msg = alloc::format!("{}", result.unwrap_err());
assert!(
msg.contains("unsupported pixel format"),
"error should mention unsupported pixel format, got: {msg}"
);
assert!(
msg.contains("RGBA8") || msg.contains("supported formats"),
"error should list supported formats, got: {msg}"
);
}
#[test]
fn apng_pixels_to_rgba8_handles_gray8() {
use zencodec::encode::AnimationFrameEncoder;
let config = PngEncoderConfig::new();
let job = config
.clone()
.job()
.with_canvas_size(4, 4)
.with_loop_count(Some(0));
let mut enc = job.animation_frame_encoder().unwrap();
let pixels: Vec<Gray<u8>> = vec![Gray(128); 16];
let img = imgref::ImgVec::new(pixels, 4, 4);
enc.push_frame(PixelSlice::from(img.as_ref()).erase(), 100, None)
.unwrap();
let output = enc.finish(None).unwrap();
assert!(!output.data().is_empty());
assert_eq!(output.format(), ImageFormat::Png);
let decoded = crate::decode::decode(
output.data(),
&PngDecodeConfig::none(),
&enough::Unstoppable,
)
.unwrap();
assert_eq!(decoded.info.width, 4);
assert_eq!(decoded.info.height, 4);
}
}