use std::{error::Error, fmt};
pub type Result<T> = std::result::Result<T, LttbError>;
#[derive(Debug, PartialEq)]
pub enum LttbError {
InvalidThreshold { n_in: usize, n_out: usize },
InvalidRatio { ratio: usize },
EmptyBucketPartitioning,
InvalidBucketLimits { start: usize, end: usize },
}
impl fmt::Display for LttbError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
LttbError::InvalidThreshold { n_in, n_out } => write!(
f,
"threshold n_out={n_out} invalid; must be 2 < n_out < n_in={n_in}"
),
LttbError::InvalidRatio { ratio } => {
write!(f, "ratio is invalid; must be >= 2 (got {ratio})")
}
LttbError::EmptyBucketPartitioning => write!(f, "cannot partition an empty bucket"),
LttbError::InvalidBucketLimits { start, end } => {
write!(f, "evaluated invalid bucket with limits at [{start},{end})")
}
}
}
}
impl Error for LttbError {}
#[derive(Debug, Copy, Clone, PartialEq, Default)]
pub struct Point {
pub(crate) x: f64,
pub(crate) y: f64,
}
impl Point {
pub fn new(x: f64, y: f64) -> Self {
Self { x, y }
}
pub fn x(&self) -> f64 {
self.x
}
pub fn y(&self) -> f64 {
self.y
}
}
#[derive(Debug, Copy, Clone, PartialEq, Default)]
pub enum LttbMethod {
Classic,
Standard,
#[default]
MinMax,
}
#[derive(Debug, Copy, Clone, PartialEq, Default)]
pub enum Binning {
#[default]
ByCount,
ByRange,
}
#[derive(Default, Debug, Clone)]
pub struct LttbBuilder {
lttb: Lttb,
}
impl LttbBuilder {
pub fn new() -> Self {
Self::default()
}
pub fn method(mut self, method: LttbMethod) -> Self {
self.lttb.method = method;
self
}
pub fn ratio(mut self, ratio: usize) -> Self {
self.lttb.ratio = ratio;
self
}
pub fn threshold(mut self, threshold: usize) -> Self {
self.lttb.threshold = threshold;
self
}
pub fn build(self) -> Lttb {
self.lttb
}
}
#[derive(Debug, Clone)]
pub struct Lttb {
threshold: usize,
method: LttbMethod,
ratio: usize,
}
impl Default for Lttb {
fn default() -> Self {
Self {
threshold: 0,
method: LttbMethod::MinMax,
ratio: Self::DEFAULT_RATIO,
}
}
}
impl Lttb {
const DEFAULT_RATIO: usize = 2;
pub fn downsample(&self, points: &[Point]) -> Result<Vec<Point>> {
match self.method {
LttbMethod::MinMax => minmaxlttb(points, self.threshold, self.ratio),
LttbMethod::Classic => lttb(points, self.threshold, Binning::ByCount),
LttbMethod::Standard => lttb(points, self.threshold, Binning::ByRange),
}
}
}
pub fn minmaxlttb(points: &[Point], n_out: usize, ratio: usize) -> Result<Vec<Point>> {
debug_assert!(
points.windows(2).all(|w| w[0].x() < w[1].x()),
"points must be sorted by x"
);
if n_out >= points.len() || n_out < 3 {
return Err(LttbError::InvalidThreshold {
n_in: points.len(),
n_out,
});
}
if ratio < 2 {
return Err(LttbError::InvalidRatio { ratio });
}
let bucket_size = points.len() / n_out;
if bucket_size > ratio {
let selected = extrema_selection(points, n_out, ratio)?;
lttb(&selected, n_out, Binning::ByCount)
} else {
lttb(points, n_out, Binning::ByCount)
}
}
pub fn lttb(points: &[Point], n_out: usize, binning_method: Binning) -> Result<Vec<Point>> {
debug_assert!(
points.windows(2).all(|w| w[0].x() < w[1].x()),
"points must be sorted by x"
);
if n_out >= points.len() || n_out < 3 {
return Err(LttbError::InvalidThreshold {
n_in: points.len(),
n_out,
});
}
let bucket_bounds = match binning_method {
Binning::ByCount => bucket_limits_by_count(points, n_out)?,
Binning::ByRange => bucket_limits_by_range(points, n_out)?,
};
let mut downsampled = Vec::with_capacity(n_out);
downsampled.push(points[0]);
for i in 1..n_out - 1 {
let (start, end) = (bucket_bounds[i], bucket_bounds[i + 1]);
let (next_s, next_e) = (bucket_bounds[i + 1], bucket_bounds[i + 2]);
let first_vertex = downsampled[i - 1];
let third_vertex =
mean_point_bucket(&points[next_s..next_e]).ok_or(LttbError::InvalidBucketLimits {
start: next_s,
end: next_e,
})?;
let best_vertex = vertex_by_max_area(&points[start..end], first_vertex, third_vertex)
.ok_or(LttbError::InvalidBucketLimits { start, end })?;
downsampled.push(best_vertex);
}
downsampled.push(points[points.len() - 1]);
Ok(downsampled)
}
pub fn extrema_selection(points: &[Point], n_out: usize, ratio: usize) -> Result<Vec<Point>> {
if n_out >= points.len() || n_out < 3 {
return Err(LttbError::InvalidThreshold {
n_in: points.len(),
n_out,
});
}
if ratio < 2 {
return Err(LttbError::InvalidRatio { ratio });
}
const NUM_PTS_PER_PARTITION: usize = 2;
let num_partitions = n_out.saturating_mul(ratio / NUM_PTS_PER_PARTITION);
let n_in = points.len();
let mut selected: Vec<Point> = Vec::with_capacity(n_out * ratio);
selected.push(points[0]);
let bounds = partition_bounds_by_range(&points[1..(n_in - 1)], 1, num_partitions)?;
for i in 0..num_partitions {
let start = bounds[i];
let end = bounds[i + 1];
selected.extend(find_minmax(&points[start..end]));
}
selected.push(points[n_in - 1]);
Ok(selected)
}
fn vertex_by_max_area(points: &[Point], first_vertex: Point, next_vertex: Point) -> Option<Point> {
let mut max_area = f64::MIN;
let mut best_candidate = None;
for p in points.iter() {
let area = triangle_area(&first_vertex, p, &next_vertex);
if area >= max_area {
max_area = area;
best_candidate = Some(*p);
}
}
best_candidate
}
pub fn mean_point_bucket(points: &[Point]) -> Option<Point> {
if points.is_empty() {
return None;
}
let mut mean_p = Point::new(0.0, 0.0);
for p in points {
mean_p.x += p.x;
mean_p.y += p.y;
}
Some(Point {
x: mean_p.x / points.len() as f64,
y: mean_p.y / points.len() as f64,
})
}
pub fn find_minmax(points: &[Point]) -> Vec<Point> {
let mut result = Vec::with_capacity(2);
if points.len() < 3 {
return points.to_vec();
}
let mut min_p = points[0];
let mut max_p = points[0];
for p in points.iter() {
if p.y < min_p.y {
min_p = *p;
}
if p.y >= max_p.y {
max_p = *p;
}
}
if min_p.x < max_p.x {
result.push(min_p);
result.push(max_p);
} else {
result.push(max_p);
result.push(min_p);
}
result
}
pub fn bucket_limits_by_count(points: &[Point], n_out: usize) -> Result<Vec<usize>> {
let n_in = points.len();
if n_out >= n_in || n_out < 3 {
return Err(LttbError::InvalidThreshold { n_in, n_out });
}
let n_in_exclusive = (n_in - 2) as f64;
let n_out_exclusive = (n_out - 2) as f64;
let bucket_size = n_in_exclusive / n_out_exclusive;
let mut bounds = Vec::with_capacity(n_out + 1);
bounds.push(0);
for i in 0..n_out - 1 {
let edge = (1.0 + i as f64 * bucket_size) as usize;
bounds.push(edge);
}
bounds.push(n_in);
Ok(bounds)
}
pub fn partition_limits_by_count(start: usize, end: usize, n: usize) -> Result<Vec<usize>> {
if start >= end {
return Err(LttbError::InvalidBucketLimits { start, end });
}
if n == 0 {
return Ok(vec![start, end]);
}
let size = (end - start) as f64 / n as f64;
let mut bounds = Vec::with_capacity(n + 1);
for i in 0..n {
let edge = (i as f64 * size) as usize;
bounds.push(start + edge);
}
bounds.push(end);
Ok(bounds)
}
pub fn bucket_limits_by_range(points: &[Point], n_out: usize) -> Result<Vec<usize>> {
let n_in = points.len();
if n_out >= n_in || n_out < 3 {
return Err(LttbError::InvalidThreshold { n_in, n_out });
}
let first_point: usize = 1;
let last_point: usize = n_in - 2;
let n_out_exclusive = (n_out - 2) as f64;
let start_x = points[first_point].x();
let end_x = points[last_point].x();
let step_size = ((end_x - start_x) / n_out_exclusive).abs();
let mut bounds = Vec::with_capacity(n_out + 1);
bounds.push(0);
bounds.push(1);
let mut idx = 1;
let mut prev = 1;
for i in 1..n_out - 2 {
let edge_x = start_x + step_size * i as f64;
while idx < n_in - 1 && points[idx].x() < edge_x {
idx += 1;
}
if idx <= prev {
idx = (prev + 1).min(n_in - 2);
}
bounds.push(idx);
prev = idx;
}
bounds.push(n_in - 1);
bounds.push(n_in);
Ok(bounds)
}
pub fn partition_bounds_by_range(points: &[Point], start: usize, n: usize) -> Result<Vec<usize>> {
if n == 0 {
return Ok(vec![start, start + points.len()]);
}
if points.is_empty() {
return Err(LttbError::EmptyBucketPartitioning);
}
let start_x = points[0].x();
let end_x = points[points.len() - 1].x();
let step_size = ((end_x - start_x) / n as f64).abs();
let mut bounds = Vec::with_capacity(n + 1);
bounds.push(start);
let mut idx = 0; let mut prev_abs = start; for i in 1..n {
let edge_x = start_x + step_size * i as f64;
while idx < points.len() && points[idx].x() < edge_x {
idx += 1;
}
let mut abs = start + idx;
if abs <= prev_abs {
abs = (prev_abs + 1).min(start + points.len() - 1);
idx = abs - start;
}
bounds.push(abs);
prev_abs = abs;
}
bounds.push(start + points.len());
Ok(bounds)
}
#[inline(always)]
fn triangle_area(p1: &Point, p2: &Point, p3: &Point) -> f64 {
let a = p1.x * (p2.y - p3.y);
let b = p2.x * (p3.y - p1.y);
let c = p3.x * (p1.y - p2.y);
(a + b + c).abs() / 2.0
}
#[cfg(test)]
mod tests {
use super::*;
#[inline(always)]
fn bucket_edges_by_count(data: &[Point], n_out: usize, bucket_index: usize) -> (usize, usize) {
let bucket_bounds = bucket_limits_by_count(data, n_out).unwrap();
(bucket_bounds[bucket_index], bucket_bounds[bucket_index + 1])
}
#[test]
fn threshold_conditions() {
let data = vec![
Point::new(0.0, 0.0),
Point::new(1.0, 1.0),
Point::new(2.0, 2.0),
Point::new(3.0, 3.0),
];
let n_out = 5;
let result = lttb(&data, n_out, Binning::ByCount);
assert_eq!(
result,
Err(LttbError::InvalidThreshold { n_in: 4, n_out: 5 })
);
let n_out = 2;
let result = lttb(&data, n_out, Binning::ByCount);
assert_eq!(
result,
Err(LttbError::InvalidThreshold { n_in: 4, n_out: 2 })
);
}
#[test]
fn bucket_mean_point() {
assert!(mean_point_bucket(&[]).is_none());
let data = vec![
Point::new(0.0, 4.0),
Point::new(1.0, 5.0),
Point::new(2.0, 6.0),
Point::new(3.0, 7.0),
];
assert!(mean_point_bucket(&data[1..1]).is_none());
assert_eq!(
mean_point_bucket(&data).unwrap(),
Point::new(6.0 / 4.0, 22.0 / 4.0)
)
}
#[test]
fn minmax_partition_check() {
let data = vec![Point::new(0.0, 4.0)];
assert_eq!(find_minmax(&data), vec![Point::new(0.0, 4.0)]);
let data = vec![
Point::new(0.0, 4.0),
Point::new(1.0, 5.0),
Point::new(2.0, 7.0),
Point::new(3.0, 6.0),
];
assert_eq!(find_minmax(&[]), vec![]);
assert_eq!(find_minmax(&data[0..0]), vec![]);
assert_eq!(find_minmax(&data[0..1]), vec![Point::new(0.0, 4.0)]);
let data = vec![
Point::new(0.0, 6.0),
Point::new(1.0, 5.0),
Point::new(2.0, 4.0),
Point::new(3.0, 3.0),
];
assert_eq!(
find_minmax(&data),
vec![Point::new(0.0, 6.0), Point::new(3.0, 3.0)]
);
let data = vec![
Point::new(0.0, 4.0),
Point::new(1.0, 4.0),
Point::new(2.0, 4.0),
Point::new(3.0, 4.0),
];
assert_eq!(
find_minmax(&data),
vec![Point::new(0.0, 4.0), Point::new(3.0, 4.0)]
);
}
#[test]
fn right_vertex_for_first_bucket() {
struct TestCase {
name: &'static str,
bucket_index: usize,
expected_vertex: Option<Point>,
}
let cases = [
TestCase {
name: "Right vertex for 1st bucket",
bucket_index: 0,
expected_vertex: Some(Point::new(1.5, 2.5)), },
TestCase {
name: "Right vertex for 2nd bucket",
bucket_index: 1,
expected_vertex: Some(Point::new(3.0, 4.0)), },
];
let data = vec![
Point::new(0.0, 1.0),
Point::new(1.0, 2.0),
Point::new(2.0, 3.0),
Point::new(3.0, 4.0),
];
let n_out = 3;
for c in cases {
let (next_start, next_end) = bucket_edges_by_count(&data, n_out, c.bucket_index + 1);
let result = mean_point_bucket(&data[next_start..next_end]);
assert_eq!(result, c.expected_vertex, "test case: {}", c.name,);
}
}
#[test]
fn right_vertex_for_middle_bucket() {
let data = vec![
Point::new(0.0, 1.0), Point::new(1.0, 2.0), Point::new(2.0, 3.0), Point::new(3.0, 4.0), Point::new(4.0, 5.0), Point::new(5.0, 6.0), ];
let n_out = 4;
let bucket_index = 1;
let (next_start, next_end) = bucket_edges_by_count(&data, n_out, bucket_index + 1);
let result = mean_point_bucket(&data[next_start..next_end]);
assert_eq!(result, Some(Point::new(3.5, 4.5)));
}
#[test]
fn right_vertex_for_penultimate_bucket() {
let data = vec![
Point::new(0.0, 1.0), Point::new(1.0, 2.0), Point::new(2.0, 3.0), Point::new(3.0, 4.0), ];
let n_out = 3;
let bucket_index = n_out - 2;
let (next_start, next_end) = bucket_edges_by_count(&data, n_out, bucket_index + 1);
let result = mean_point_bucket(&data[next_start..next_end]);
assert_eq!(result, Some(Point::new(3.0, 4.0))); }
#[test]
fn best_candidate_bucket() {
let data = vec![
Point::new(0.0, 0.0), Point::new(1.0, 1.0), Point::new(1.0, 2.0), Point::new(2.0, 0.0), ];
let n_out = 3;
let bucket_index = 1;
let first = data[0];
let third = data[3];
let (start, end) = bucket_edges_by_count(&data, n_out, bucket_index);
let result = vertex_by_max_area(&data[start..end], first, third);
assert_eq!(result, Some(Point::new(1.0, 2.0))); }
#[test]
fn partition_bounds_by_count_check() {
assert_eq!(
partition_limits_by_count(0, 0, 3),
Err(LttbError::InvalidBucketLimits { start: 0, end: 0 })
);
assert_eq!(
partition_limits_by_count(4, 0, 3),
Err(LttbError::InvalidBucketLimits { start: 4, end: 0 })
);
assert_eq!(partition_limits_by_count(4, 10, 0), Ok(vec![4, 10]));
assert_eq!(partition_limits_by_count(0, 10, 3), Ok(vec![0, 3, 6, 10]));
assert_eq!(partition_limits_by_count(0, 5, 2), Ok(vec![0, 2, 5]));
assert_eq!(
partition_limits_by_count(0, 7, 7),
Ok(vec![0, 1, 2, 3, 4, 5, 6, 7])
);
}
#[test]
fn minmax_preselect_preserves_extrema() {
let data = vec![
Point::new(0.0, 0.0), Point::new(0.5, 2.0), Point::new(1.0, 10.0), Point::new(1.5, 5.0), Point::new(2.0, -5.0), Point::new(2.5, 0.0), Point::new(3.0, 8.0), Point::new(3.5, 4.0), Point::new(4.0, 0.0), ];
let selected = extrema_selection(&data, 5, 2).unwrap();
let expected = vec![
Point::new(0.0, 0.0),
Point::new(0.5, 2.0),
Point::new(1.0, 10.0),
Point::new(1.5, 5.0),
Point::new(2.0, -5.0),
Point::new(2.5, 0.0),
Point::new(3.0, 8.0),
Point::new(3.5, 4.0),
Point::new(4.0, 0.0),
];
assert_eq!(selected, expected);
}
#[test]
fn minmax_preselect_handles_duplicates() {
let data = vec![
Point::new(0.0, 1.0),
Point::new(1.0, 1.0),
Point::new(2.0, 1.0),
Point::new(3.0, 1.0),
];
let selected = extrema_selection(&data, 3, 2).unwrap();
assert_eq!(selected[0], data[0]);
assert_eq!(selected[selected.len() - 1], data[3]);
assert!(selected.iter().all(|p| p.y == 1.0));
}
#[test]
fn minmax_preselect_small_buckets() {
let data = vec![Point::new(0.0, 1.0), Point::new(1.0, 2.0)];
let selected = extrema_selection(&data, 5, 2);
assert_eq!(
selected,
Err(LttbError::InvalidThreshold { n_in: 2, n_out: 5 })
);
}
#[test]
fn minmaxlttb_invalid_inputs() {
let points = vec![
Point::new(0.0, 1.0),
Point::new(1.0, 2.0),
Point::new(2.0, 3.0),
Point::new(3.0, 4.0),
];
assert_eq!(
minmaxlttb(&points, 2, 2),
Err(LttbError::InvalidThreshold {
n_in: points.len(),
n_out: 2
})
);
assert_eq!(
extrema_selection(&points, 2, 2),
Err(LttbError::InvalidThreshold {
n_in: points.len(),
n_out: 2
})
);
assert_eq!(
minmaxlttb(&points, 4, 2),
Err(LttbError::InvalidThreshold {
n_in: points.len(),
n_out: 4
})
);
assert_eq!(
extrema_selection(&points, 4, 2),
Err(LttbError::InvalidThreshold {
n_in: points.len(),
n_out: 4
})
);
assert_eq!(
minmaxlttb(&points, 3, 1),
Err(LttbError::InvalidRatio { ratio: 1 })
);
assert_eq!(
extrema_selection(&points, 3, 1),
Err(LttbError::InvalidRatio { ratio: 1 })
);
}
#[test]
fn point_new() {
let p = Point::new(1.0, 2.0);
assert_eq!(p.x(), 1.0);
assert_eq!(p.y(), 2.0);
}
#[test]
fn downsample_classic_lttb_check() {
let points = vec![
Point::new(0.0, 1.0),
Point::new(1.0, 2.0),
Point::new(2.0, 3.0),
Point::new(3.0, 4.0),
Point::new(4.0, 5.0),
];
let result = lttb(&points, 3, Binning::ByCount).unwrap();
assert_eq!(result.len(), 3);
}
#[test]
fn builder_pattern() {
let points = vec![
Point::new(0.0, 1.0),
Point::new(1.0, 2.0),
Point::new(2.0, 3.0),
Point::new(3.0, 4.0),
Point::new(4.0, 5.0),
];
let result_classic = LttbBuilder::new()
.threshold(3)
.method(LttbMethod::Classic)
.build();
assert_eq!(result_classic.downsample(&points).unwrap().len(), 3);
let result_minmax = LttbBuilder::new()
.threshold(3)
.method(LttbMethod::MinMax)
.ratio(4)
.build();
assert_eq!(result_minmax.downsample(&points).unwrap().len(), 3);
let result_minmax_default = LttbBuilder::new()
.threshold(3)
.method(LttbMethod::MinMax)
.build();
assert_eq!(result_minmax_default.downsample(&points).unwrap().len(), 3);
let result_standard = LttbBuilder::new()
.threshold(3)
.method(LttbMethod::Standard)
.build()
.downsample(&points)
.unwrap();
assert_eq!(result_standard.len(), 3);
}
#[test]
fn bucket_limits_by_count_check() {
let bounds = bucket_limits_by_count(&[Point::default(); 6], 2);
let expected = Err(LttbError::InvalidThreshold { n_in: 6, n_out: 2 });
assert_eq!(bounds, expected);
let bounds = bucket_limits_by_count(&[Point::default(); 6], 6);
let expected = Err(LttbError::InvalidThreshold { n_in: 6, n_out: 6 });
assert_eq!(bounds, expected);
let bounds = bucket_limits_by_count(&[Point::default(); 6], 4);
let expected = vec![0, 1, 3, 5, 6];
assert_eq!(bounds.unwrap(), expected);
let bounds = bucket_limits_by_count(&[Point::default(); 6], 5);
let expected = vec![0, 1, 2, 3, 5, 6];
assert_eq!(bounds.unwrap(), expected);
let bounds = bucket_limits_by_count(&[Point::default(); 10], 5);
let expected = vec![0, 1, 3, 6, 9, 10];
assert_eq!(bounds.unwrap(), expected);
let bounds = bucket_limits_by_count(&[Point::default(); 15], 10);
let expected = vec![0, 1, 2, 4, 5, 7, 9, 10, 12, 14, 15];
assert_eq!(bounds.unwrap(), expected);
}
#[test]
fn bucket_limits_by_range_early_return_conditions() {
let data = vec![
Point::new(0.0, 0.0),
Point::new(1.0, 0.0),
Point::new(2.0, 0.0),
Point::new(3.0, 0.0),
];
assert_eq!(
bucket_limits_by_range(&data, 4),
Err(LttbError::InvalidThreshold { n_in: 4, n_out: 4 })
);
assert_eq!(
bucket_limits_by_range(&data, 2),
Err(LttbError::InvalidThreshold { n_in: 4, n_out: 2 })
);
}
#[test]
fn bucket_limits_by_range_non_uniform_spacing() {
let data = vec![
Point::new(0.0, 0.0), Point::new(0.1, 0.0), Point::new(0.2, 0.0),
Point::new(5.0, 0.0),
Point::new(5.1, 0.0), Point::new(10.0, 0.0), ];
let bounds = bucket_limits_by_range(&data, 4).unwrap();
let expected = vec![0, 1, 3, 5, 6];
assert_eq!(bounds, expected);
}
#[test]
fn bucket_limits_by_range_negative_x_and_offset() {
let data = vec![
Point::new(-5.0, 0.0), Point::new(-4.5, 0.0), Point::new(-4.0, 0.0),
Point::new(-1.0, 0.0),
Point::new(3.0, 0.0), Point::new(10.0, 0.0), ];
let bounds = bucket_limits_by_range(&data, 4).unwrap();
let expected = vec![0, 1, 4, 5, 6];
assert_eq!(bounds, expected);
}
#[test]
fn bucket_limits_by_range_matches_count_when_uniform() {
let data: Vec<Point> = (0..=10).map(|i| Point::new(i as f64, 0.0)).collect();
let n_out = 6;
let by_range = bucket_limits_by_range(&data, n_out).unwrap();
let by_count = bucket_limits_by_count(&data, n_out).unwrap();
assert_eq!(by_range, by_count);
}
#[test]
fn partition_bounds_by_range_edges() {
let points = vec![
Point::new(0.0, 0.0),
Point::new(1.0, 0.0),
Point::new(2.0, 0.0),
];
assert_eq!(
partition_bounds_by_range(&points, 5, 0).unwrap(),
vec![5, 8]
);
let empty: Vec<Point> = vec![];
assert_eq!(
partition_bounds_by_range(&empty, 0, 3),
Err(LttbError::EmptyBucketPartitioning)
);
}
#[test]
fn downsample_minmax_check() {
let points = vec![
Point::new(0.0, 1.0),
Point::new(1.0, 2.0),
Point::new(2.0, 3.0),
Point::new(3.0, 4.0),
Point::new(4.0, 5.0),
];
let result = minmaxlttb(&points, 3, 2).unwrap();
assert_eq!(result.len(), 3);
}
#[test]
fn force_extrema_selection_branch() {
let points: Vec<Point> = (0..100)
.map(|i| Point::new(i as f64, (i % 7) as f64))
.collect();
let n_out = 10;
let ratio = 2; let result = minmaxlttb(&points, n_out, ratio).unwrap();
assert_eq!(result.len(), n_out);
}
#[test]
fn lttberror_format() {
let e1 = LttbError::InvalidThreshold { n_in: 4, n_out: 5 };
assert_eq!(
format!("{}", e1),
"threshold n_out=5 invalid; must be 2 < n_out < n_in=4"
);
let e2 = LttbError::InvalidRatio { ratio: 1 };
assert_eq!(format!("{}", e2), "ratio is invalid; must be >= 2 (got 1)");
let e3 = LttbError::EmptyBucketPartitioning;
assert_eq!(format!("{}", e3), "cannot partition an empty bucket");
let e4 = LttbError::InvalidBucketLimits { start: 2, end: 1 };
assert_eq!(
format!("{}", e4),
"evaluated invalid bucket with limits at [2,1)"
);
}
}