embedded_charts/data/
aggregation.rs

1//! Data aggregation and downsampling for efficient chart rendering.
2//!
3//! This module provides efficient algorithms for reducing the number of data points
4//! while preserving important characteristics of the data. This is crucial for:
5//! - Large datasets that exceed display resolution
6//! - Real-time streaming data that needs performance optimization
7//! - Memory-constrained embedded environments
8//! - Maintaining visual fidelity while reducing computational load
9//!
10//! # Aggregation Strategies
11//!
12//! Different strategies for combining multiple data points into a single representative point:
13//!
14//! ## Min-Max Aggregation
15//! Preserves extremes in the data, essential for identifying peaks and troughs:
16//! ```rust
17//! use embedded_charts::prelude::*;
18//! use embedded_charts::data::aggregation::*;
19//!
20//! let data = data_points![(0.0, 10.0), (1.0, 25.0), (2.0, 5.0), (3.0, 20.0)];
21//! let config = AggregationConfig {
22//!     strategy: AggregationStrategy::MinMax,
23//!     target_points: 2,
24//!     ..Default::default()
25//! };
26//! let aggregated: StaticDataSeries<_, 8> = data.aggregate(&config)?;
27//! // Result preserves the minimum (5.0) and maximum (25.0) values
28//! # Ok::<(), embedded_charts::error::DataError>(())
29//! ```
30//!
31//! ## Statistical Aggregation
32//! Uses statistical measures to represent groups of data points:
33//! ```rust
34//! use embedded_charts::prelude::*;
35//! use embedded_charts::data::aggregation::*;
36//!
37//! let data = data_points![(0.0, 10.0), (1.0, 20.0), (2.0, 30.0), (3.0, 40.0)];
38//! let config = AggregationConfig {
39//!     strategy: AggregationStrategy::Mean,
40//!     target_points: 2,
41//!     ..Default::default()
42//! };
43//! let mean_aggregated: StaticDataSeries<_, 8> = data.aggregate(&config)?;
44//! # Ok::<(), embedded_charts::error::DataError>(())
45//! ```
46//!
47//! # Downsampling Algorithms
48//!
49//! ## Largest Triangle Three Buckets (LTTB)
50//! Advanced algorithm that preserves visual characteristics:
51//! ```rust
52//! use embedded_charts::prelude::*;
53//! use embedded_charts::data::aggregation::*;
54//!
55//! let data = data_points![(0.0, 10.0), (1.0, 25.0), (2.0, 5.0), (3.0, 20.0)];
56//! let config = DownsamplingConfig {
57//!     max_points: 50,
58//!     ..Default::default()
59//! };
60//! let downsampled: StaticDataSeries<_, 8> = data.downsample_lttb(&config)?;
61//! # Ok::<(), embedded_charts::error::DataError>(())
62//! ```
63//!
64//! ## Uniform Downsampling
65//! Simple algorithm that takes every Nth point:
66//! ```rust
67//! use embedded_charts::prelude::*;
68//! use embedded_charts::data::aggregation::*;
69//!
70//! let data = data_points![(0.0, 10.0), (1.0, 25.0), (2.0, 5.0), (3.0, 20.0)];
71//! let config = DownsamplingConfig {
72//!     max_points: 2,
73//!     ..Default::default()
74//! };
75//! let downsampled: StaticDataSeries<_, 8> = data.downsample_uniform(&config)?;
76//! # Ok::<(), embedded_charts::error::DataError>(())
77//! ```
78//!
79//! # Memory Efficiency
80//!
81//! All aggregation operates with bounded memory usage:
82//! - Static allocation for intermediate calculations
83//! - Configurable output capacity
84//! - No heap allocation in no_std environments
85
86use crate::data::{DataPoint, DataSeries, StaticDataSeries};
87use crate::error::{DataError, DataResult};
88
89#[cfg(not(feature = "std"))]
90use micromath::F32Ext;
91
92/// Strategy for aggregating multiple data points into a single representative point
93#[derive(Debug, Clone, Copy, PartialEq, Eq)]
94pub enum AggregationStrategy {
95    /// Take the mean (average) of X and Y coordinates
96    Mean,
97    /// Take the median of X and Y coordinates
98    Median,
99    /// Preserve minimum and maximum Y values, average X coordinates
100    MinMax,
101    /// Take the first point in each group
102    First,
103    /// Take the last point in each group
104    Last,
105    /// Take the point with maximum Y value
106    Max,
107    /// Take the point with minimum Y value
108    Min,
109}
110
111/// Configuration for data aggregation operations
112#[derive(Debug, Clone)]
113pub struct AggregationConfig {
114    /// Strategy to use for combining data points
115    pub strategy: AggregationStrategy,
116    /// Target number of points after aggregation
117    pub target_points: usize,
118    /// Whether to preserve first and last points exactly
119    pub preserve_endpoints: bool,
120    /// Minimum number of source points required for aggregation
121    pub min_group_size: usize,
122}
123
124impl Default for AggregationConfig {
125    fn default() -> Self {
126        Self {
127            strategy: AggregationStrategy::Mean,
128            target_points: 100,
129            preserve_endpoints: true,
130            min_group_size: 1,
131        }
132    }
133}
134
135/// Configuration for downsampling operations
136#[derive(Debug, Clone)]
137pub struct DownsamplingConfig {
138    /// Maximum number of points in the output
139    pub max_points: usize,
140    /// Whether to preserve first and last points exactly
141    pub preserve_endpoints: bool,
142    /// Threshold below which no downsampling is performed
143    pub min_reduction_ratio: f32,
144}
145
146impl Default for DownsamplingConfig {
147    fn default() -> Self {
148        Self {
149            max_points: 1000,
150            preserve_endpoints: true,
151            min_reduction_ratio: 1.5, // Only downsample if reducing by at least 50%
152        }
153    }
154}
155
156/// Statistics calculated for a group of data points during aggregation
157#[derive(Debug, Clone)]
158pub struct GroupStats<T: DataPoint> {
159    /// Number of points in the group
160    pub count: usize,
161    /// Minimum X value
162    pub min_x: T::X,
163    /// Maximum X value
164    pub max_x: T::X,
165    /// Minimum Y value
166    pub min_y: T::Y,
167    /// Maximum Y value
168    pub max_y: T::Y,
169    /// Mean X value
170    pub mean_x: T::X,
171    /// Mean Y value
172    pub mean_y: T::Y,
173    /// First point in the group
174    pub first: T,
175    /// Last point in the group
176    pub last: T,
177}
178
179/// Trait providing aggregation and downsampling capabilities for data series
180pub trait DataAggregation: DataSeries {
181    /// Aggregate data points using the specified strategy
182    ///
183    /// # Arguments
184    /// * `config` - Configuration for the aggregation operation
185    ///
186    /// # Returns
187    /// A new data series with aggregated points
188    fn aggregate<const N: usize>(
189        &self,
190        config: &AggregationConfig,
191    ) -> DataResult<StaticDataSeries<Self::Item, N>>;
192
193    /// Downsample data using Largest Triangle Three Buckets algorithm
194    ///
195    /// This algorithm preserves the visual characteristics of the data better than
196    /// simple uniform sampling by considering the area of triangles formed by
197    /// adjacent points.
198    ///
199    /// # Arguments
200    /// * `config` - Configuration for the downsampling operation
201    ///
202    /// # Returns
203    /// A new data series with downsampled points
204    fn downsample_lttb<const N: usize>(
205        &self,
206        config: &DownsamplingConfig,
207    ) -> DataResult<StaticDataSeries<Self::Item, N>>;
208
209    /// Downsample data using uniform sampling (every Nth point)
210    ///
211    /// # Arguments
212    /// * `config` - Configuration for the downsampling operation
213    ///
214    /// # Returns
215    /// A new data series with uniformly sampled points
216    fn downsample_uniform<const N: usize>(
217        &self,
218        config: &DownsamplingConfig,
219    ) -> DataResult<StaticDataSeries<Self::Item, N>>;
220
221    /// Calculate statistics for a group of data points
222    ///
223    /// # Arguments
224    /// * `points` - Slice of data points to analyze
225    ///
226    /// # Returns
227    /// Statistics for the group of points
228    fn calculate_group_stats(&self, points: &[Self::Item]) -> DataResult<GroupStats<Self::Item>>
229    where
230        Self::Item: Clone;
231}
232
233/// Implementation of aggregation for StaticDataSeries
234impl<T, const M: usize> DataAggregation for StaticDataSeries<T, M>
235where
236    T: DataPoint + Clone + Copy,
237    T::X: PartialOrd
238        + Copy
239        + core::ops::Add<Output = T::X>
240        + core::ops::Div<f32, Output = T::X>
241        + Into<f32>
242        + From<f32>,
243    T::Y: PartialOrd
244        + Copy
245        + core::ops::Add<Output = T::Y>
246        + core::ops::Div<f32, Output = T::Y>
247        + Into<f32>
248        + From<f32>,
249{
250    fn aggregate<const N: usize>(
251        &self,
252        config: &AggregationConfig,
253    ) -> DataResult<StaticDataSeries<T, N>> {
254        if self.is_empty() {
255            return Ok(StaticDataSeries::new());
256        }
257
258        if self.len() <= config.target_points {
259            // No aggregation needed
260            let mut result = StaticDataSeries::new();
261            for point in self.iter() {
262                result.push(point)?;
263            }
264            return Ok(result);
265        }
266
267        let mut result = StaticDataSeries::new();
268        let points = self.as_slice();
269
270        // Calculate group size
271        #[allow(clippy::manual_div_ceil)] // div_ceil requires Rust 1.73+
272        let group_size = (self.len() + config.target_points - 1) / config.target_points;
273        let group_size = group_size.max(config.min_group_size);
274
275        let mut i = 0;
276
277        // Handle first point specially if preserving endpoints
278        if config.preserve_endpoints && !points.is_empty() {
279            result.push(points[0])?;
280            i = 1;
281        }
282
283        // Process groups
284        while i < points.len() {
285            let mut end = (i + group_size).min(points.len());
286
287            // Skip last group if preserving endpoints and this is the final point
288            if config.preserve_endpoints && end == points.len() && i + 1 < points.len() {
289                end = points.len() - 1;
290            }
291
292            if i < end {
293                let group = &points[i..end];
294                if !group.is_empty() {
295                    let aggregated_point = self.aggregate_group(group, config.strategy)?;
296                    result.push(aggregated_point)?;
297                }
298            }
299
300            i = end;
301        }
302
303        // Handle last point specially if preserving endpoints
304        if config.preserve_endpoints && points.len() > 1 {
305            let last_point = points[points.len() - 1];
306            // Only add if it's different from the last added point
307            if result.is_empty() || result.as_slice()[result.len() - 1].x() != last_point.x() {
308                result.push(last_point)?;
309            }
310        }
311
312        Ok(result)
313    }
314
315    fn downsample_lttb<const N: usize>(
316        &self,
317        config: &DownsamplingConfig,
318    ) -> DataResult<StaticDataSeries<T, N>> {
319        if self.is_empty() {
320            return Ok(StaticDataSeries::new());
321        }
322
323        let data_len = self.len();
324
325        // Check if downsampling is needed
326        if data_len <= config.max_points {
327            let mut result = StaticDataSeries::new();
328            for point in self.iter() {
329                result.push(point)?;
330            }
331            return Ok(result);
332        }
333
334        // Check reduction ratio
335        let reduction_ratio = data_len as f32 / config.max_points as f32;
336        if reduction_ratio < config.min_reduction_ratio {
337            let mut result = StaticDataSeries::new();
338            for point in self.iter() {
339                result.push(point)?;
340            }
341            return Ok(result);
342        }
343
344        let mut result = StaticDataSeries::new();
345        let points = self.as_slice();
346
347        // Always include first point
348        result.push(points[0])?;
349
350        if config.max_points <= 2 {
351            // Include last point if we have room
352            if config.max_points == 2 && points.len() > 1 {
353                result.push(points[points.len() - 1])?;
354            }
355            return Ok(result);
356        }
357
358        // Calculate bucket size for intermediate points
359        let bucket_size = (data_len - 2) as f32 / (config.max_points - 2) as f32;
360        let mut bucket_start = 1.0;
361
362        // Process each bucket
363        for _i in 1..(config.max_points - 1) {
364            let bucket_end = bucket_start + bucket_size;
365            #[cfg(feature = "std")]
366            let start_idx = bucket_start.floor() as usize;
367            #[cfg(not(feature = "std"))]
368            let start_idx = bucket_start.floor() as usize;
369            #[cfg(feature = "std")]
370            let end_idx = (bucket_end.ceil() as usize).min(data_len - 1);
371            #[cfg(not(feature = "std"))]
372            let end_idx = (bucket_end.ceil() as usize).min(data_len - 1);
373
374            if start_idx >= end_idx {
375                continue;
376            }
377
378            // Calculate average point of next bucket for triangle area calculation
379            let next_bucket_start = bucket_end;
380            let next_bucket_end = next_bucket_start + bucket_size;
381            #[cfg(feature = "std")]
382            let next_start_idx = next_bucket_start.floor() as usize;
383            #[cfg(not(feature = "std"))]
384            let next_start_idx = next_bucket_start.floor() as usize;
385            #[cfg(feature = "std")]
386            let next_end_idx = (next_bucket_end.ceil() as usize).min(data_len);
387            #[cfg(not(feature = "std"))]
388            let next_end_idx = (next_bucket_end.ceil() as usize).min(data_len);
389
390            let avg_next = if next_start_idx < next_end_idx && next_end_idx <= data_len {
391                self.calculate_average_point(&points[next_start_idx..next_end_idx])?
392            } else {
393                points[data_len - 1] // Use last point if no next bucket
394            };
395
396            // Find point in current bucket that forms largest triangle
397            let mut max_area = -1.0;
398            let mut selected_idx = start_idx;
399
400            for (j_offset, j) in (start_idx..end_idx).enumerate() {
401                let area = self.calculate_triangle_area(
402                    &result.as_slice()[result.len() - 1], // Previous selected point
403                    &points[j],                           // Current candidate
404                    &avg_next,                            // Average of next bucket
405                );
406
407                if area > max_area {
408                    max_area = area;
409                    selected_idx = start_idx + j_offset;
410                }
411            }
412
413            result.push(points[selected_idx])?;
414            bucket_start = bucket_end;
415        }
416
417        // Always include last point if preserving endpoints
418        if config.preserve_endpoints && points.len() > 1 {
419            result.push(points[points.len() - 1])?;
420        }
421
422        Ok(result)
423    }
424
425    fn downsample_uniform<const N: usize>(
426        &self,
427        config: &DownsamplingConfig,
428    ) -> DataResult<StaticDataSeries<T, N>> {
429        if self.is_empty() {
430            return Ok(StaticDataSeries::new());
431        }
432
433        let data_len = self.len();
434
435        if data_len <= config.max_points {
436            let mut result = StaticDataSeries::new();
437            for point in self.iter() {
438                result.push(point)?;
439            }
440            return Ok(result);
441        }
442
443        let mut result = StaticDataSeries::new();
444        let points = self.as_slice();
445
446        // Calculate step size
447        let step = data_len as f32 / config.max_points as f32;
448        let mut current: f32 = 0.0;
449
450        for _ in 0..config.max_points {
451            #[cfg(feature = "std")]
452            let idx = (current.round() as usize).min(data_len - 1);
453            #[cfg(not(feature = "std"))]
454            let idx = (current.round() as usize).min(data_len - 1);
455            result.push(points[idx])?;
456            current += step;
457        }
458
459        Ok(result)
460    }
461
462    fn calculate_group_stats(&self, points: &[T]) -> DataResult<GroupStats<T>> {
463        if points.is_empty() {
464            return Err(DataError::insufficient_data("calculate_group_stats", 1, 0));
465        }
466
467        let first = points[0];
468        let last = points[points.len() - 1];
469
470        let mut min_x = first.x();
471        let mut max_x = first.x();
472        let mut min_y = first.y();
473        let mut max_y = first.y();
474
475        let mut sum_x: f32 = first.x().into();
476        let mut sum_y: f32 = first.y().into();
477
478        for point in points.iter().skip(1) {
479            let x = point.x();
480            let y = point.y();
481
482            if x < min_x {
483                min_x = x;
484            }
485            if x > max_x {
486                max_x = x;
487            }
488            if y < min_y {
489                min_y = y;
490            }
491            if y > max_y {
492                max_y = y;
493            }
494
495            sum_x += x.into();
496            sum_y += y.into();
497        }
498
499        let count_f = points.len() as f32;
500        let mean_x = T::X::from(sum_x / count_f);
501        let mean_y = T::Y::from(sum_y / count_f);
502
503        Ok(GroupStats {
504            count: points.len(),
505            min_x,
506            max_x,
507            min_y,
508            max_y,
509            mean_x,
510            mean_y,
511            first,
512            last,
513        })
514    }
515}
516
517impl<T, const M: usize> StaticDataSeries<T, M>
518where
519    T: DataPoint + Clone + Copy,
520    T::X: PartialOrd
521        + Copy
522        + core::ops::Add<Output = T::X>
523        + core::ops::Div<f32, Output = T::X>
524        + Into<f32>
525        + From<f32>,
526    T::Y: PartialOrd
527        + Copy
528        + core::ops::Add<Output = T::Y>
529        + core::ops::Div<f32, Output = T::Y>
530        + Into<f32>
531        + From<f32>,
532{
533    /// Aggregate a group of points using the specified strategy
534    fn aggregate_group(&self, points: &[T], strategy: AggregationStrategy) -> DataResult<T> {
535        if points.is_empty() {
536            return Err(DataError::insufficient_data("aggregate_group", 1, 0));
537        }
538
539        match strategy {
540            AggregationStrategy::Mean => {
541                let stats = self.calculate_group_stats(points)?;
542                Ok(T::new(stats.mean_x, stats.mean_y))
543            }
544            AggregationStrategy::Median => {
545                // For median, we need to sort the coordinates
546                let mut x_coords: heapless::Vec<T::X, 32> = heapless::Vec::new();
547                let mut y_coords: heapless::Vec<T::Y, 32> = heapless::Vec::new();
548
549                for point in points {
550                    let _ = x_coords.push(point.x());
551                    let _ = y_coords.push(point.y());
552                }
553
554                // Simple sorting for small arrays
555                x_coords.sort_by(|a, b| a.partial_cmp(b).unwrap_or(core::cmp::Ordering::Equal));
556                y_coords.sort_by(|a, b| a.partial_cmp(b).unwrap_or(core::cmp::Ordering::Equal));
557
558                let median_x = if x_coords.len() % 2 == 0 {
559                    let mid = x_coords.len() / 2;
560                    let sum: f32 = x_coords[mid - 1].into() + x_coords[mid].into();
561                    T::X::from(sum / 2.0)
562                } else {
563                    x_coords[x_coords.len() / 2]
564                };
565
566                let median_y = if y_coords.len() % 2 == 0 {
567                    let mid = y_coords.len() / 2;
568                    let sum: f32 = y_coords[mid - 1].into() + y_coords[mid].into();
569                    T::Y::from(sum / 2.0)
570                } else {
571                    y_coords[y_coords.len() / 2]
572                };
573
574                Ok(T::new(median_x, median_y))
575            }
576            AggregationStrategy::MinMax => {
577                // Use the point with extreme Y value for MinMax strategy
578                let point_with_max = points
579                    .iter()
580                    .max_by(|a, b| {
581                        a.y()
582                            .partial_cmp(&b.y())
583                            .unwrap_or(core::cmp::Ordering::Equal)
584                    })
585                    .unwrap();
586                Ok(*point_with_max)
587            }
588            AggregationStrategy::First => Ok(points[0]),
589            AggregationStrategy::Last => Ok(points[points.len() - 1]),
590            AggregationStrategy::Max => {
591                let max_point = points
592                    .iter()
593                    .max_by(|a, b| {
594                        a.y()
595                            .partial_cmp(&b.y())
596                            .unwrap_or(core::cmp::Ordering::Equal)
597                    })
598                    .unwrap();
599                Ok(*max_point)
600            }
601            AggregationStrategy::Min => {
602                let min_point = points
603                    .iter()
604                    .min_by(|a, b| {
605                        a.y()
606                            .partial_cmp(&b.y())
607                            .unwrap_or(core::cmp::Ordering::Equal)
608                    })
609                    .unwrap();
610                Ok(*min_point)
611            }
612        }
613    }
614
615    /// Calculate the average point of a group for LTTB algorithm
616    fn calculate_average_point(&self, points: &[T]) -> DataResult<T> {
617        if points.is_empty() {
618            return Err(DataError::insufficient_data(
619                "calculate_average_point",
620                1,
621                0,
622            ));
623        }
624
625        let mut sum_x: f32 = points[0].x().into();
626        let mut sum_y: f32 = points[0].y().into();
627
628        for point in points.iter().skip(1) {
629            sum_x += point.x().into();
630            sum_y += point.y().into();
631        }
632
633        let count = points.len() as f32;
634        let avg_x = T::X::from(sum_x / count);
635        let avg_y = T::Y::from(sum_y / count);
636
637        Ok(T::new(avg_x, avg_y))
638    }
639
640    /// Calculate triangle area for LTTB algorithm
641    fn calculate_triangle_area(&self, a: &T, b: &T, c: &T) -> f32 {
642        let ax: f32 = a.x().into();
643        let ay: f32 = a.y().into();
644        let bx: f32 = b.x().into();
645        let by: f32 = b.y().into();
646        let cx: f32 = c.x().into();
647        let cy: f32 = c.y().into();
648
649        // Calculate area using cross product formula: 0.5 * |det([[ax, ay, 1], [bx, by, 1], [cx, cy, 1]])|
650        let det = ax * (by - cy) + bx * (cy - ay) - cx * (ay - by);
651
652        det.abs() * 0.5
653    }
654}
655
656#[cfg(test)]
657mod tests {
658    use super::*;
659    use crate::data::{Point2D, StaticDataSeries};
660
661    #[test]
662    fn test_aggregation_config_default() {
663        let config = AggregationConfig::default();
664        assert_eq!(config.strategy, AggregationStrategy::Mean);
665        assert_eq!(config.target_points, 100);
666        assert!(config.preserve_endpoints);
667        assert_eq!(config.min_group_size, 1);
668    }
669
670    #[test]
671    fn test_downsampling_config_default() {
672        let config = DownsamplingConfig::default();
673        assert_eq!(config.max_points, 1000);
674        assert!(config.preserve_endpoints);
675        assert_eq!(config.min_reduction_ratio, 1.5);
676    }
677
678    #[test]
679    fn test_group_stats_calculation() {
680        let mut series: StaticDataSeries<Point2D, 256> = StaticDataSeries::new();
681        series.push(Point2D::new(0.0, 10.0)).unwrap();
682        series.push(Point2D::new(1.0, 20.0)).unwrap();
683        series.push(Point2D::new(2.0, 5.0)).unwrap();
684
685        let stats = series.calculate_group_stats(series.as_slice()).unwrap();
686
687        assert_eq!(stats.count, 3);
688        assert_eq!(stats.min_x, 0.0);
689        assert_eq!(stats.max_x, 2.0);
690        assert_eq!(stats.min_y, 5.0);
691        assert_eq!(stats.max_y, 20.0);
692        assert_eq!(stats.first.x(), 0.0);
693        assert_eq!(stats.last.x(), 2.0);
694    }
695
696    #[test]
697    fn test_mean_aggregation() {
698        let mut series: StaticDataSeries<Point2D, 256> = StaticDataSeries::new();
699        series.push(Point2D::new(0.0, 10.0)).unwrap();
700        series.push(Point2D::new(1.0, 20.0)).unwrap();
701        series.push(Point2D::new(2.0, 30.0)).unwrap();
702        series.push(Point2D::new(3.0, 40.0)).unwrap();
703
704        let config = AggregationConfig {
705            strategy: AggregationStrategy::Mean,
706            target_points: 2,
707            preserve_endpoints: false,
708            min_group_size: 1,
709        };
710
711        let aggregated: StaticDataSeries<Point2D, 256> = series.aggregate(&config).unwrap();
712        assert_eq!(aggregated.len(), 2);
713
714        // First group: (0,10) and (1,20) -> mean should be (0.5, 15)
715        let first = aggregated.get(0).unwrap();
716        assert_eq!(first.x(), 0.5);
717        assert_eq!(first.y(), 15.0);
718
719        // Second group: (2,30) and (3,40) -> mean should be (2.5, 35)
720        let second = aggregated.get(1).unwrap();
721        assert_eq!(second.x(), 2.5);
722        assert_eq!(second.y(), 35.0);
723    }
724
725    #[test]
726    fn test_uniform_downsampling() {
727        let mut series: StaticDataSeries<Point2D, 256> = StaticDataSeries::new();
728        for i in 0..10 {
729            series
730                .push(Point2D::new(i as f32, (i * 10) as f32))
731                .unwrap();
732        }
733
734        let config = DownsamplingConfig {
735            max_points: 5,
736            preserve_endpoints: true,
737            min_reduction_ratio: 1.0,
738        };
739
740        let downsampled: StaticDataSeries<Point2D, 256> =
741            series.downsample_uniform(&config).unwrap();
742        assert_eq!(downsampled.len(), 5);
743    }
744
745    #[test]
746    fn test_no_aggregation_when_not_needed() {
747        let mut series: StaticDataSeries<Point2D, 256> = StaticDataSeries::new();
748        series.push(Point2D::new(0.0, 10.0)).unwrap();
749        series.push(Point2D::new(1.0, 20.0)).unwrap();
750
751        let config = AggregationConfig {
752            target_points: 5, // More than we have
753            ..Default::default()
754        };
755
756        let aggregated: StaticDataSeries<Point2D, 256> = series.aggregate(&config).unwrap();
757        assert_eq!(aggregated.len(), 2); // Should be unchanged
758        assert_eq!(aggregated.get(0).unwrap().x(), 0.0);
759        assert_eq!(aggregated.get(1).unwrap().x(), 1.0);
760    }
761}