logp 0.2.2

Information theory primitives: entropy, KL divergence, mutual information (KSG estimator), and information-monotone divergences
Documentation
[package]
name = "logp"
version = "0.2.2"
edition = "2021"
authors = ["Arc <attobop@gmail.com>"]
description = "Information theory primitives: entropy, KL divergence, mutual information (KSG estimator), and information-monotone divergences"
license = "MIT OR Apache-2.0"
repository = "https://github.com/arclabs561/logp"
homepage = "https://github.com/arclabs561/logp"
documentation = "https://docs.rs/logp"
readme = "README.md"
rust-version = "1.75"
keywords = ["information-theory", "entropy", "divergence", "mutual-information", "ksg-estimator"]
categories = ["algorithms", "science", "mathematics"]

[workspace]

[features]
default = []
# Optional ndarray adapter for users who keep joint distributions as matrices.
ndarray = ["dep:ndarray"]

[dependencies]
thiserror = "2.0"
ndarray = { version = "0.16", optional = true }

[dev-dependencies]
proptest = "1.5"
wass = "0.1"
rkhs = "0.2"
ndarray = "0.16"