use super::ac_context::BlockCtxMap;
use super::cluster::{Histogram, cluster_histograms};
use super::common::pack_signed;
use crate::bit_writer::BitWriter;
#[cfg(feature = "debug-tokens")]
use crate::debug_log;
use crate::entropy_coding::encode::{
ALPHABET_SIZE, EntropyCode, PrefixCode, convert_bit_depths_to_symbols, create_huffman_tree,
write_entropy_code, write_prefix_codes, write_token,
};
use crate::entropy_coding::token::{Token, UintCoder};
use crate::error::Result;
pub const NUM_TREE_CONTEXTS: usize = 6;
pub const NUM_CONTEXT_TREE_TOKENS: usize = 313;
pub static CONTEXT_TREE_TOKENS: [(u32, u32); NUM_CONTEXT_TREE_TOKENS] = [
(1, 2),
(0, 4),
(1, 1),
(0, 2),
(1, 10),
(0, 0),
(1, 1),
(0, 4),
(1, 1),
(0, 0),
(1, 10),
(0, 94),
(1, 10),
(0, 61),
(1, 0),
(2, 0),
(3, 0),
(4, 0),
(5, 0),
(1, 3),
(0, 0),
(1, 0),
(2, 5),
(3, 0),
(4, 0),
(5, 0),
(1, 0),
(2, 5),
(3, 0),
(4, 0),
(5, 0),
(1, 10),
(0, 382),
(1, 10),
(0, 22),
(1, 10),
(0, 13),
(1, 10),
(0, 253),
(1, 8),
(0, 10),
(1, 8),
(0, 10),
(1, 10),
(0, 784),
(1, 10),
(0, 190),
(1, 10),
(0, 46),
(1, 10),
(0, 10),
(1, 10),
(0, 5),
(1, 10),
(0, 29),
(1, 10),
(0, 125),
(1, 10),
(0, 509),
(1, 8),
(0, 22),
(1, 8),
(0, 6),
(1, 8),
(0, 22),
(1, 8),
(0, 6),
(1, 10),
(0, 1000),
(1, 10),
(0, 510),
(1, 10),
(0, 254),
(1, 10),
(0, 126),
(1, 10),
(0, 62),
(1, 10),
(0, 30),
(1, 10),
(0, 14),
(1, 10),
(0, 6),
(1, 10),
(0, 1),
(1, 10),
(0, 7),
(1, 10),
(0, 21),
(1, 10),
(0, 45),
(1, 10),
(0, 93),
(1, 10),
(0, 189),
(1, 10),
(0, 381),
(1, 10),
(0, 783),
(1, 0),
(2, 1),
(3, 0),
(4, 0),
(5, 0),
(1, 0),
(2, 1),
(3, 0),
(4, 0),
(5, 0),
(1, 0),
(2, 1),
(3, 0),
(4, 0),
(5, 0),
(1, 0),
(2, 1),
(3, 0),
(4, 0),
(5, 0),
(1, 0),
(2, 0),
(3, 0),
(4, 0),
(5, 0),
(1, 0),
(2, 0),
(3, 0),
(4, 0),
(5, 0),
(1, 0),
(2, 0),
(3, 0),
(4, 0),
(5, 0),
(1, 0),
(2, 0),
(3, 0),
(4, 0),
(5, 0),
(1, 0),
(2, 5),
(3, 0),
(4, 0),
(5, 0),
(1, 0),
(2, 5),
(3, 0),
(4, 0),
(5, 0),
(1, 0),
(2, 5),
(3, 0),
(4, 0),
(5, 0),
(1, 0),
(2, 5),
(3, 0),
(4, 0),
(5, 0),
(1, 0),
(2, 5),
(3, 0),
(4, 0),
(5, 0),
(1, 0),
(2, 5),
(3, 0),
(4, 0),
(5, 0),
(1, 0),
(2, 5),
(3, 0),
(4, 0),
(5, 0),
(1, 0),
(2, 5),
(3, 0),
(4, 0),
(5, 0),
(1, 0),
(2, 5),
(3, 0),
(4, 0),
(5, 0),
(1, 0),
(2, 5),
(3, 0),
(4, 0),
(5, 0),
(1, 0),
(2, 5),
(3, 0),
(4, 0),
(5, 0),
(1, 0),
(2, 5),
(3, 0),
(4, 0),
(5, 0),
(1, 0),
(2, 5),
(3, 0),
(4, 0),
(5, 0),
(1, 0),
(2, 5),
(3, 0),
(4, 0),
(5, 0),
(1, 0),
(2, 5),
(3, 0),
(4, 0),
(5, 0),
(1, 10),
(0, 2),
(1, 0),
(2, 5),
(3, 0),
(4, 0),
(5, 0),
(1, 0),
(2, 5),
(3, 0),
(4, 0),
(5, 0),
(1, 0),
(2, 5),
(3, 0),
(4, 0),
(5, 0),
(1, 0),
(2, 5),
(3, 0),
(4, 0),
(5, 0),
(1, 0),
(2, 5),
(3, 0),
(4, 0),
(5, 0),
(1, 0),
(2, 5),
(3, 0),
(4, 0),
(5, 0),
(1, 0),
(2, 5),
(3, 0),
(4, 0),
(5, 0),
(1, 0),
(2, 5),
(3, 0),
(4, 0),
(5, 0),
(1, 0),
(2, 5),
(3, 0),
(4, 0),
(5, 0),
(1, 0),
(2, 5),
(3, 0),
(4, 0),
(5, 0),
(1, 0),
(2, 5),
(3, 0),
(4, 0),
(5, 0),
(1, 0),
(2, 5),
(3, 0),
(4, 0),
(5, 0),
(1, 0),
(2, 5),
(3, 0),
(4, 0),
(5, 0),
(1, 0),
(2, 5),
(3, 0),
(4, 0),
(5, 0),
(1, 0),
(2, 5),
(3, 0),
(4, 0),
(5, 0),
(1, 10),
(0, 999),
(1, 0),
(2, 5),
(3, 0),
(4, 0),
(5, 0),
(1, 0),
(2, 5),
(3, 0),
(4, 0),
(5, 0),
(1, 0),
(2, 5),
(3, 0),
(4, 0),
(5, 0),
(1, 0),
(2, 5),
(3, 0),
(4, 0),
(5, 0),
];
pub static COMPACT_BLOCK_CONTEXT_MAP: [u8; 39] = [
0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, ];
fn build_context_tree_entropy_code(tokens: &[Token]) -> (Vec<u8>, Vec<PrefixCode>) {
let mut histograms: Vec<Histogram> = (0..NUM_TREE_CONTEXTS).map(|_| Histogram::new()).collect();
for token in tokens {
let encoded = UintCoder::encode(token.value);
let ctx = token.context() as usize;
histograms[ctx].add(encoded.token as usize);
}
let context_map = cluster_histograms(&mut histograms);
let mut prefix_codes = Vec::with_capacity(histograms.len());
#[allow(clippy::unused_enumerate_index)]
for (_i, hist) in histograms.iter().enumerate() {
let mut depths = [0u8; ALPHABET_SIZE];
let mut length = ALPHABET_SIZE;
while length > 0 && hist.counts[length - 1] == 0 {
length -= 1;
}
if length == 0 {
length = 1;
}
create_huffman_tree(&hist.counts, length, 15, &mut depths);
let mut bits = [0u16; ALPHABET_SIZE];
convert_bit_depths_to_symbols(&depths, &mut bits);
#[cfg(feature = "debug-tokens")]
{
let depth_slice: Vec<u8> = depths.iter().take(length.min(20)).copied().collect();
debug_log!(
" context_tree BuildHuffmanCodes[{}]: length={}, depths={:?}{}",
_i,
length,
depth_slice,
if length > 20 { ", ..." } else { "" }
);
}
prefix_codes.push(PrefixCode { depths, bits });
}
#[cfg(feature = "debug-tokens")]
{
debug_log!(
" context_tree_entropy: {} histograms -> {} prefix codes, context_map len={}",
NUM_TREE_CONTEXTS,
prefix_codes.len(),
context_map.len()
);
}
(context_map, prefix_codes)
}
pub fn write_context_tree(num_dc_groups: usize, writer: &mut BitWriter) -> Result<()> {
let mut tokens: Vec<Token> = CONTEXT_TREE_TOKENS
.iter()
.map(|&(ctx, val)| Token::new(ctx, val))
.collect();
tokens[1].value = pack_signed(1 + num_dc_groups as i32);
let (context_map, prefix_codes) = build_context_tree_entropy_code(&tokens);
#[cfg(feature = "debug-tokens")]
{
debug_log!(
"context_tree: {} contexts, {} prefix codes, context_map={:?}",
context_map.len(),
prefix_codes.len(),
context_map
);
}
writer.write(1, 1)?; writer.write(1, 0)?;
let code = EntropyCode::new(&context_map, &prefix_codes);
write_entropy_code(&code, writer)?;
for token in &tokens {
write_token(token, &code, None, writer)?;
}
Ok(())
}
pub fn write_learned_context_tree(
tree_tokens: &[(u32, u32)],
_num_dc_groups: usize,
writer: &mut BitWriter,
) -> Result<()> {
let tokens: Vec<Token> = tree_tokens
.iter()
.map(|&(ctx, val)| Token::new(ctx, val))
.collect();
if tokens.is_empty() {
let simple_tree = vec![
Token::new(1, 0), Token::new(2, 5), Token::new(3, 0), Token::new(4, 0), Token::new(5, 0), ];
return write_learned_context_tree_inner(&simple_tree, writer);
}
write_learned_context_tree_inner(&tokens, writer)
}
fn write_learned_context_tree_inner(tokens: &[Token], writer: &mut BitWriter) -> Result<()> {
let (context_map, prefix_codes) = build_context_tree_entropy_code(tokens);
#[cfg(feature = "debug-tokens")]
{
debug_log!(
"learned_context_tree: {} tokens, {} contexts, {} prefix codes",
tokens.len(),
context_map.len(),
prefix_codes.len()
);
}
writer.write(1, 1)?; writer.write(1, 0)?;
let code = EntropyCode::new(&context_map, &prefix_codes);
write_entropy_code(&code, writer)?;
for token in tokens {
write_token(token, &code, None, writer)?;
}
Ok(())
}
pub fn write_block_context_map(writer: &mut BitWriter) -> Result<()> {
#[cfg(feature = "debug-tokens")]
let start_bits = writer.bits_written();
let max_val = *COMPACT_BLOCK_CONTEXT_MAP.iter().max().unwrap_or(&0);
if max_val == 0 {
writer.write(3, 1)?; return Ok(());
}
writer.write(3, 0)?;
let tokens: Vec<Token> = COMPACT_BLOCK_CONTEXT_MAP
.iter()
.map(|&v| Token::new(0, v as u32))
.collect();
let mut histogram = [0u32; ALPHABET_SIZE];
for t in &tokens {
let encoded = UintCoder::encode(t.value);
histogram[encoded.token as usize] += 1;
}
let mut ctxmap_depths = [0u8; ALPHABET_SIZE];
let mut length = ALPHABET_SIZE;
while length > 0 && histogram[length - 1] == 0 {
length -= 1;
}
create_huffman_tree(&histogram, length.max(1), 15, &mut ctxmap_depths);
#[cfg(feature = "debug-tokens")]
{
let depth_slice: Vec<u8> = ctxmap_depths.iter().take(length).copied().collect();
debug_log!(
" write_block_context_map: {} entries, length={}, depths={:?}",
COMPACT_BLOCK_CONTEXT_MAP.len(),
length,
depth_slice
);
}
let mut ctxmap_bits = [0u16; ALPHABET_SIZE];
convert_bit_depths_to_symbols(&ctxmap_depths, &mut ctxmap_bits);
let ctxmap_code = PrefixCode {
depths: ctxmap_depths,
bits: ctxmap_bits,
};
#[cfg(feature = "debug-tokens")]
let before_prefix = writer.bits_written();
write_prefix_codes(&[ctxmap_code], writer)?;
#[cfg(feature = "debug-tokens")]
let after_prefix = writer.bits_written();
for t in &tokens {
let encoded = UintCoder::encode(t.value);
let tok = encoded.token as usize;
let depth = ctxmap_code.depths[tok] as usize;
let bits = ctxmap_code.bits[tok] as u64;
let data = bits | ((encoded.bits as u64) << depth);
let total_bits = depth + encoded.nbits as usize;
writer.write(total_bits, data)?;
}
#[cfg(feature = "debug-tokens")]
{
let total = writer.bits_written() - start_bits;
let prefix_bits = after_prefix - before_prefix;
let token_bits = writer.bits_written() - after_prefix;
debug_log!(
" write_block_context_map bits: header=3, prefix_code={}, tokens={}, total={}",
prefix_bits,
token_bits,
total
);
}
Ok(())
}
fn write_qf_threshold(value: u32, writer: &mut BitWriter) -> Result<()> {
let v = value - 1;
if v < 4 {
writer.write(2, 0)?; writer.write(2, v as u64)?;
} else if v < 12 {
writer.write(2, 1)?;
writer.write(3, (v - 4) as u64)?;
} else if v < 44 {
writer.write(2, 2)?;
writer.write(5, (v - 12) as u64)?;
} else {
writer.write(2, 3)?;
writer.write(8, (v - 44) as u64)?;
}
Ok(())
}
pub fn write_block_ctx_map_adaptive(ctx_map: &BlockCtxMap, writer: &mut BitWriter) -> Result<()> {
#[cfg(feature = "debug-tokens")]
let start_bits = writer.bits_written();
writer.write(1, 0)?;
writer.write(4, 0)?; writer.write(4, 0)?; writer.write(4, 0)?;
writer.write(4, ctx_map.qf_thresholds.len() as u64)?;
for &t in &ctx_map.qf_thresholds {
write_qf_threshold(t, writer)?;
}
#[cfg(feature = "debug-tokens")]
{
debug_log!(
" write_block_ctx_map_adaptive: {} qf_thresholds={:?}, {} ctxs, map_len={}",
ctx_map.qf_thresholds.len(),
ctx_map.qf_thresholds,
ctx_map.num_ctxs,
ctx_map.ctx_map.len()
);
}
write_context_map_from_slice(&ctx_map.ctx_map, writer)?;
#[cfg(feature = "debug-tokens")]
{
let total = writer.bits_written() - start_bits;
debug_log!(" write_block_ctx_map_adaptive total: {} bits", total);
}
Ok(())
}
fn write_context_map_from_slice(map: &[u8], writer: &mut BitWriter) -> Result<()> {
let max_val = *map.iter().max().unwrap_or(&0);
if max_val == 0 {
writer.write(3, 1)?; return Ok(());
}
writer.write(3, 0)?;
let tokens: Vec<Token> = map.iter().map(|&v| Token::new(0, v as u32)).collect();
let mut histogram = [0u32; ALPHABET_SIZE];
for t in &tokens {
let encoded = UintCoder::encode(t.value);
histogram[encoded.token as usize] += 1;
}
let mut ctxmap_depths = [0u8; ALPHABET_SIZE];
let mut length = ALPHABET_SIZE;
while length > 0 && histogram[length - 1] == 0 {
length -= 1;
}
create_huffman_tree(&histogram, length.max(1), 15, &mut ctxmap_depths);
let mut ctxmap_bits = [0u16; ALPHABET_SIZE];
convert_bit_depths_to_symbols(&ctxmap_depths, &mut ctxmap_bits);
let ctxmap_code = PrefixCode {
depths: ctxmap_depths,
bits: ctxmap_bits,
};
write_prefix_codes(&[ctxmap_code], writer)?;
for t in &tokens {
let encoded = UintCoder::encode(t.value);
let tok = encoded.token as usize;
let depth = ctxmap_code.depths[tok] as usize;
let bits = ctxmap_code.bits[tok] as u64;
let data = bits | ((encoded.bits as u64) << depth);
let total_bits = depth + encoded.nbits as usize;
writer.write(total_bits, data)?;
}
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_context_tree_tokens_count() {
assert_eq!(CONTEXT_TREE_TOKENS.len(), NUM_CONTEXT_TREE_TOKENS);
}
#[test]
fn test_context_tree_tokens_contexts_in_range() {
for (i, &(ctx, _)) in CONTEXT_TREE_TOKENS.iter().enumerate() {
assert!(
(ctx as usize) < NUM_TREE_CONTEXTS,
"Token {} has context {} >= {}",
i,
ctx,
NUM_TREE_CONTEXTS
);
}
}
#[test]
fn test_compact_block_context_map_size() {
assert_eq!(COMPACT_BLOCK_CONTEXT_MAP.len(), 39);
}
#[test]
fn test_write_context_tree() {
let mut writer = BitWriter::new();
let result = write_context_tree(1, &mut writer);
assert!(result.is_ok());
assert!(writer.bits_written() > 0);
}
#[test]
fn test_write_block_context_map() {
let mut writer = BitWriter::new();
let result = write_block_context_map(&mut writer);
assert!(result.is_ok());
assert!(writer.bits_written() > 0);
}
#[test]
fn test_write_block_ctx_map_adaptive_default() {
let map = BlockCtxMap::default();
let mut writer = BitWriter::new();
let result = write_block_ctx_map_adaptive(&map, &mut writer);
assert!(result.is_ok());
assert!(writer.bits_written() > 0);
}
#[test]
fn test_write_block_ctx_map_adaptive_with_qf() {
use super::super::coeff_order::NUM_ORDER_BUCKETS;
let num_qf_segs = 2;
let section_size = NUM_ORDER_BUCKETS * num_qf_segs;
let mut ctx_map = vec![0u8; section_size * 3];
for (i, val) in ctx_map[..section_size].iter_mut().enumerate() {
*val = (i % 3) as u8;
}
for (i, val) in ctx_map[section_size..].iter_mut().enumerate() {
*val = 3 + ((section_size + i) % 2) as u8;
}
let map = BlockCtxMap {
qf_thresholds: vec![10],
ctx_map,
num_ctxs: 5,
};
let mut writer = BitWriter::new();
let result = write_block_ctx_map_adaptive(&map, &mut writer);
assert!(result.is_ok());
assert!(writer.bits_written() > 0);
}
}