use core::{borrow::Borrow, marker::PhantomData};
use alloc::{boxed::Box, vec::Vec};
use num_traits::{float::FloatCore, AsPrimitive};
use crate::{generic_static_asserts, wrapping_pow2, BitArray};
use super::{
super::{DecoderModel, EntropyModel, IterableEntropyModel},
accumulate_nonzero_probabilities,
contiguous::ContiguousCategoricalEntropyModel,
fast_quantized_cdf, iter_extended_cdf, perfectly_quantized_probabilities,
};
pub type SmallContiguousLookupDecoderModel<Cdf = Vec<u16>, LookupTable = Box<[u16]>> =
ContiguousLookupDecoderModel<u16, Cdf, LookupTable, 12>;
#[derive(Debug, Clone, Copy)]
pub struct ContiguousLookupDecoderModel<
Probability = u16,
Cdf = Vec<Probability>,
LookupTable = Box<[Probability]>,
const PRECISION: usize = 12,
> where
Probability: BitArray,
{
lookup_table: LookupTable,
cdf: Cdf,
phantom: PhantomData<Probability>,
}
impl<Probability, const PRECISION: usize>
ContiguousLookupDecoderModel<Probability, Vec<Probability>, Box<[Probability]>, PRECISION>
where
Probability: BitArray + Into<usize>,
usize: AsPrimitive<Probability>,
{
#[allow(clippy::result_unit_err)]
pub fn from_floating_point_probabilities_perfect<F>(probabilities: &[F]) -> Result<Self, ()>
where
F: FloatCore + core::iter::Sum<F> + Into<f64>,
Probability: Into<f64> + AsPrimitive<usize>,
f64: AsPrimitive<Probability>,
usize: AsPrimitive<Probability>,
{
let slots = perfectly_quantized_probabilities::<_, _, PRECISION>(probabilities)?;
Self::from_nonzero_fixed_point_probabilities(
slots.into_iter().map(|slot| slot.weight),
false,
)
}
#[allow(clippy::result_unit_err)]
pub fn from_floating_point_probabilities_fast<F>(
probabilities: &[F],
normalization: Option<F>,
) -> Result<Self, ()>
where
F: FloatCore + core::iter::Sum<F> + AsPrimitive<Probability>,
Probability: AsPrimitive<usize>,
usize: AsPrimitive<F>,
{
generic_static_asserts!(
(Probability: BitArray; const PRECISION: usize);
PROBABILITY_MUST_SUPPORT_PRECISION: PRECISION <= Probability::BITS;
PRECISION_MUST_BE_NONZERO: PRECISION > 0;
USIZE_MUST_STRICTLY_SUPPORT_PRECISION: PRECISION < <usize as BitArray>::BITS;
);
let mut cdf =
fast_quantized_cdf::<Probability, F, PRECISION>(probabilities, normalization)?;
let mut extended_cdf = Vec::with_capacity(probabilities.len() + 1);
extended_cdf.push(cdf.next().expect("cdf is not empty"));
let mut lookup_table = Vec::with_capacity(1 << PRECISION);
for (index, right_cumulative) in cdf.enumerate() {
extended_cdf.push(right_cumulative);
lookup_table.resize(right_cumulative.as_(), index.as_());
}
extended_cdf.push(wrapping_pow2(PRECISION));
lookup_table.resize(1 << PRECISION, (probabilities.len() - 1).as_());
Ok(Self {
lookup_table: lookup_table.into_boxed_slice(),
cdf: extended_cdf,
phantom: PhantomData,
})
}
#[deprecated(
since = "0.4.0",
note = "Please use `from_floating_point_probabilities_perfect` or \
`from_floating_point_probabilities_fast` instead. See documentation for \
detailed upgrade instructions."
)]
#[allow(clippy::result_unit_err)]
pub fn from_floating_point_probabilities<F>(probabilities: &[F]) -> Result<Self, ()>
where
F: FloatCore + core::iter::Sum<F> + Into<f64>,
Probability: Into<f64> + AsPrimitive<usize>,
f64: AsPrimitive<Probability>,
usize: AsPrimitive<Probability>,
{
Self::from_floating_point_probabilities_perfect(probabilities)
}
#[allow(clippy::result_unit_err)]
pub fn from_nonzero_fixed_point_probabilities<I>(
probabilities: I,
infer_last_probability: bool,
) -> Result<Self, ()>
where
I: IntoIterator,
I::Item: Borrow<Probability>,
{
generic_static_asserts!(
(Probability: BitArray; const PRECISION: usize);
PROBABILITY_MUST_SUPPORT_PRECISION: PRECISION <= Probability::BITS;
PRECISION_MUST_BE_NONZERO: PRECISION > 0;
USIZE_MUST_STRICTLY_SUPPORT_PRECISION: PRECISION < <usize as BitArray>::BITS;
);
let mut lookup_table = Vec::with_capacity(1 << PRECISION);
let probabilities = probabilities.into_iter();
let mut cdf =
Vec::with_capacity(probabilities.size_hint().0 + 1 + infer_last_probability as usize);
accumulate_nonzero_probabilities::<_, _, _, _, _, PRECISION>(
core::iter::repeat(()),
probabilities,
|(), _, probability| {
let index = cdf.len().as_();
cdf.push(lookup_table.len().as_());
lookup_table.resize(lookup_table.len() + probability.into(), index);
Ok(())
},
infer_last_probability,
)?;
cdf.push(wrapping_pow2(PRECISION));
Ok(Self {
lookup_table: lookup_table.into_boxed_slice(),
cdf,
phantom: PhantomData,
})
}
}
impl<Probability, Cdf, LookupTable, const PRECISION: usize>
ContiguousLookupDecoderModel<Probability, Cdf, LookupTable, PRECISION>
where
Probability: BitArray + Into<usize>,
usize: AsPrimitive<Probability>,
Cdf: AsRef<[Probability]>,
LookupTable: AsRef<[Probability]>,
{
pub fn as_view(
&self,
) -> ContiguousLookupDecoderModel<Probability, &[Probability], &[Probability], PRECISION> {
ContiguousLookupDecoderModel {
lookup_table: self.lookup_table.as_ref(),
cdf: self.cdf.as_ref(),
phantom: PhantomData,
}
}
pub fn as_contiguous_categorical(
&self,
) -> ContiguousCategoricalEntropyModel<Probability, &[Probability], PRECISION> {
ContiguousCategoricalEntropyModel {
cdf: self.cdf.as_ref(),
phantom: PhantomData,
}
}
pub fn into_contiguous_categorical(
self,
) -> ContiguousCategoricalEntropyModel<Probability, Cdf, PRECISION> {
ContiguousCategoricalEntropyModel {
cdf: self.cdf,
phantom: PhantomData,
}
}
}
impl<Probability, Cdf, LookupTable, const PRECISION: usize> EntropyModel<PRECISION>
for ContiguousLookupDecoderModel<Probability, Cdf, LookupTable, PRECISION>
where
Probability: BitArray + Into<usize>,
{
type Symbol = usize;
type Probability = Probability;
}
impl<Probability, Cdf, LookupTable, const PRECISION: usize> DecoderModel<PRECISION>
for ContiguousLookupDecoderModel<Probability, Cdf, LookupTable, PRECISION>
where
Probability: BitArray + Into<usize>,
Cdf: AsRef<[Probability]>,
LookupTable: AsRef<[Probability]>,
{
#[inline(always)]
fn quantile_function(
&self,
quantile: Probability,
) -> (Self::Symbol, Probability, Probability::NonZero) {
generic_static_asserts!(
(Probability: BitArray; const PRECISION: usize);
PROBABILITY_MUST_SUPPORT_PRECISION: PRECISION <= Probability::BITS;
PRECISION_MUST_BE_NONZERO: PRECISION > 0;
);
if Probability::BITS != PRECISION {
assert!(quantile < Probability::one() << PRECISION);
}
let (left_sided_cumulative, symbol, next_cumulative) = unsafe {
let index = *self.lookup_table.as_ref().get_unchecked(quantile.into());
let index = index.into();
let cdf = self.cdf.as_ref();
(
*cdf.get_unchecked(index),
index,
*cdf.get_unchecked(index + 1),
)
};
let probability = unsafe {
next_cumulative
.wrapping_sub(&left_sided_cumulative)
.into_nonzero_unchecked()
};
(symbol, left_sided_cumulative, probability)
}
}
impl<'m, Probability, Cdf, const PRECISION: usize>
From<&'m ContiguousCategoricalEntropyModel<Probability, Cdf, PRECISION>>
for ContiguousLookupDecoderModel<Probability, Vec<Probability>, Box<[Probability]>, PRECISION>
where
Probability: BitArray + Into<usize>,
usize: AsPrimitive<Probability>,
Cdf: AsRef<[Probability]>,
{
fn from(model: &'m ContiguousCategoricalEntropyModel<Probability, Cdf, PRECISION>) -> Self {
let cdf = model.cdf.as_ref().to_vec();
let mut lookup_table = Vec::with_capacity(1 << PRECISION);
for (symbol, &cumulative) in model.cdf.as_ref()[1..model.cdf.as_ref().len() - 1]
.iter()
.enumerate()
{
lookup_table.resize(cumulative.into(), symbol.as_());
}
lookup_table.resize(1 << PRECISION, (model.cdf.as_ref().len() - 2).as_());
Self {
lookup_table: lookup_table.into_boxed_slice(),
cdf,
phantom: PhantomData,
}
}
}
impl<'m, Probability, Cdf, LookupTable, const PRECISION: usize> IterableEntropyModel<'m, PRECISION>
for ContiguousLookupDecoderModel<Probability, Cdf, LookupTable, PRECISION>
where
Probability: BitArray + Into<usize>,
usize: AsPrimitive<Probability>,
Cdf: AsRef<[Probability]>,
LookupTable: AsRef<[Probability]>,
{
#[inline(always)]
fn symbol_table(
&'m self,
) -> impl Iterator<
Item = (
Self::Symbol,
Self::Probability,
<Self::Probability as BitArray>::NonZero,
),
> {
iter_extended_cdf(
self.cdf
.as_ref()
.iter()
.enumerate()
.map(|(symbol, &cumulative)| (cumulative, symbol)),
)
}
}
#[cfg(test)]
mod tests {
use alloc::vec;
use crate::stream::{model::EncoderModel, stack::DefaultAnsCoder, Decode};
use super::*;
#[test]
fn lookup_contiguous() {
let probabilities = vec![3u8, 18, 1, 42];
let model =
ContiguousCategoricalEntropyModel::<_, _, 6>::from_nonzero_fixed_point_probabilities(
probabilities,
false,
)
.unwrap();
let lookup_decoder_model = model.to_lookup_decoder_model();
for symbol in 0..4 {
let (left_cumulative, probability) =
model.left_cumulative_and_probability(symbol).unwrap();
for quantile in left_cumulative..left_cumulative + probability.get() {
assert_eq!(
model.quantile_function(quantile),
(symbol, left_cumulative, probability)
);
assert_eq!(
lookup_decoder_model.quantile_function(quantile),
(symbol, left_cumulative, probability)
);
}
}
for quantile in 0..1 << 6 {
let (symbol, left_cumulative, probability) = model.quantile_function(quantile);
assert_eq!(
lookup_decoder_model.quantile_function(quantile),
(symbol, left_cumulative, probability)
);
assert_eq!(
model.left_cumulative_and_probability(symbol).unwrap(),
(left_cumulative, probability)
);
}
let symbols = vec![0, 3, 2, 3, 1, 3, 2, 0, 3];
let mut ans = DefaultAnsCoder::new();
ans.encode_iid_symbols_reverse(&symbols, &model).unwrap();
assert!(!ans.is_empty());
let mut ans2 = ans.clone();
let decoded = ans
.decode_iid_symbols(9, &model)
.collect::<Result<Vec<_>, _>>()
.unwrap();
assert_eq!(decoded, symbols);
assert!(ans.is_empty());
let decoded = ans2
.decode_iid_symbols(9, &lookup_decoder_model)
.collect::<Result<Vec<_>, _>>()
.unwrap();
assert_eq!(decoded, symbols);
assert!(ans2.is_empty());
}
}