sparklines/
lib.rs

1//! The sparklines crate provides a simple way to generate sparklines.
2
3pub use crate::indexer::algorithmic::BuildAlgorithmicIndexer;
4pub use crate::indexer::rangemap::BuildRangemapIndexer;
5use crate::indexer::{BuildIndexer, Indexer};
6
7mod indexer;
8
9/// Default ticks for create a string sparkline.
10/// ```
11/// assert_eq!(sparklines::TICKS, ['▁', '▂', '▃', '▄', '▅', '▆', '▇', '█']);
12/// ```
13pub const TICKS: [char; 8] = ['▁', '▂', '▃', '▄', '▅', '▆', '▇', '█'];
14
15/// Errors that can occur when constructing a [`StringSpark`].
16#[derive(Debug, PartialEq, Eq, Clone, Copy)]
17pub enum Error {
18    /// The provided slice of ticks was empty.
19    EmptyTicks,
20}
21
22/// Sparkline backed by [`BuildAlgorithmicIndexer`].
23pub type AlgorithmicSpark<'a> = StringSpark<'a, BuildAlgorithmicIndexer>;
24
25/// Sparkline backed by [`BuildRangemapIndexer`].
26pub type RangemapSpark<'a> = StringSpark<'a, BuildRangemapIndexer>;
27
28/// `StringSparkline` is a struct that can be used to create a string sparkline.
29pub struct StringSpark<'a, I = BuildAlgorithmicIndexer>
30where
31    I: BuildIndexer<f64, usize>,
32{
33    min: Option<f64>,
34    max: Option<f64>,
35    ticks: &'a [char],
36    middle_idx: usize,
37    build_indexer: I,
38}
39
40impl<'a, I> StringSpark<'a, I>
41where
42    I: BuildIndexer<f64, usize> + Default,
43{
44    /// Create a new `SparkLines` instance.
45    ///
46    /// # Examples
47    /// ```
48    /// # use sparklines::TICKS;
49    ///
50    /// let spark: sparklines::StringSpark = sparklines::StringSpark::new(&TICKS).unwrap();
51    /// assert_eq!(spark.spark(&[1.0,2.0,3.0]), "▁▅█");
52    ///
53    /// let spark: sparklines::StringSpark = sparklines::StringSpark::new(&['a','b','c']).unwrap();
54    /// assert_eq!(spark.spark(&[1.0,2.0,3.0]), "abc");
55    /// ```
56    pub fn new(ticks: &'a [char]) -> Result<Self, Error> {
57        if ticks.is_empty() {
58            return Err(Error::EmptyTicks);
59        }
60        Ok(Self {
61            min: None,
62            max: None,
63            ticks,
64            middle_idx: ticks.len() / 2,
65            build_indexer: Default::default(),
66        })
67    }
68
69    /// Create a new `SparkLines` instance.
70    ///
71    /// # Examples
72    /// ```
73    /// # use sparklines::TICKS;
74    ///
75    /// let spark: sparklines::StringSpark = sparklines::StringSpark::new_with_min_max(&TICKS, 2.0, 3.0).unwrap();
76    /// assert_eq!(spark.spark(&[1.0,2.0,3.0,4.0]), "▁▁██");
77    ///
78    /// let spark: sparklines::StringSpark = sparklines::StringSpark::new_with_min_max(&TICKS, 1.0, 3.0).unwrap();
79    /// assert_eq!(spark.spark(&[0.0,2.0,300.0]), "▁▅█");
80    /// ```
81    pub fn new_with_min_max(ticks: &'a [char], min: f64, max: f64) -> Result<Self, Error> {
82        if ticks.is_empty() {
83            return Err(Error::EmptyTicks);
84        }
85        Ok(Self {
86            min: Some(min),
87            max: Some(max),
88            ticks,
89            middle_idx: ticks.len() / 2,
90            build_indexer: Default::default(),
91        })
92    }
93
94    /// Convert a slice of `f64` values into a String representing a sparkline.
95    ///
96    /// # Example
97    /// ```
98    /// let spark: sparklines::StringSpark = sparklines::StringSpark::new(&sparklines::TICKS).unwrap();
99    /// assert_eq!(spark.spark(&[1.0,2.0,3.0]), "▁▅█");
100    /// ```
101    pub fn spark(&self, data: &[f64]) -> String {
102        let mut result = String::with_capacity(data.len() * 4);
103        let mut min: Option<&f64> = self.min.as_ref();
104        let mut max: Option<&f64> = self.max.as_ref();
105        if min.is_none() || max.is_none() {
106            for v in data {
107                if v.is_nan() {
108                    continue;
109                }
110                if let Some(m) = min {
111                    if v < m {
112                        min = Some(v);
113                    }
114                } else {
115                    min = Some(v);
116                }
117                if let Some(m) = max {
118                    if v > m {
119                        max = Some(v);
120                    }
121                } else {
122                    max = Some(v);
123                }
124            }
125        }
126        if let (Some(min), Some(max)) = (min, max) {
127            if min.eq(max) {
128                data.iter().for_each(|_| {
129                    result.push(self.ticks[self.middle_idx]);
130                })
131            } else {
132                let indexer = self.build_indexer.build_indexer(*min, *max, self.ticks);
133                data.iter().for_each(|v| {
134                    if !v.is_nan() {
135                        result.push(self.ticks[indexer.index(*v)]);
136                    }
137                });
138            }
139        }
140        result
141    }
142}
143
144impl<'a> StringSpark<'a> {
145    /// Construct a `StringSpark` using the built-in ticks and the default
146    /// algorithmic indexer.
147    pub fn default() -> Self {
148        Self::new(&TICKS).expect("default ticks are not empty")
149    }
150}
151
152impl<'a, I> Default for StringSpark<'a, I>
153where
154    I: BuildIndexer<f64, usize> + Default,
155{
156    fn default() -> Self {
157        Self::new(&TICKS).expect("default ticks are not empty")
158    }
159}
160
161/// Converts a slice of `f64` to a `String` representing a sparkline .
162///
163/// # Example
164/// ```
165/// # use sparklines::spark;
166/// assert_eq!(spark(&[1.0,2.0,3.0]), "▁▅█");
167/// ```
168pub fn spark(data: &[f64]) -> String {
169    StringSpark::default().spark(data)
170}
171
172#[cfg(test)]
173mod tests {
174    use super::*;
175    use test_case::test_case;
176
177    #[test_case(&[1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0, 11.0, 12.0, 13.0, 14.0, 15.0, 16.0] => "▁▁▂▂▃▃▄▄▅▅▆▆▇▇██")]
178    #[test_case(&[1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0] => "▁▂▃▄▅▆▇█")]
179    #[test_case(&[1.0, 1.0, 1.0, 1.0] => "▅▅▅▅")]
180    #[test_case(&[1.0 ] => "▅")]
181    #[test_case(&[] => "")]
182    fn test_spark(data: &[f64]) -> String {
183        let spark = AlgorithmicSpark::new(&TICKS).unwrap();
184        spark.spark(data)
185    }
186
187    #[test]
188    fn test_default() {
189        let spark = AlgorithmicSpark::default();
190        assert_eq!(spark.spark(&[1.0, 2.0, 3.0]), "▁▅█");
191    }
192
193    #[test]
194    fn test_rangemap_indexer() {
195        let spark = RangemapSpark::default();
196        assert_eq!(spark.spark(&[1.0, 2.0, 3.0]), "▁▄█");
197    }
198
199    #[test]
200    fn test_stringspark() {
201        let spark = StringSpark::default();
202        assert_eq!(spark.spark(&[f64::NAN, 1.0, 2.0, f64::NAN, 3.0]), "▁▅█");
203    }
204
205    #[test]
206    fn test_non_default() {
207        let spark = AlgorithmicSpark::new(&['a', 'b', 'c']).unwrap();
208        assert_eq!(spark.spark(&[1.0, 2.0, 3.0]), "abc");
209    }
210
211    #[test]
212    fn test_nan() {
213        let spark = AlgorithmicSpark::default();
214        assert_eq!(spark.spark(&[f64::NAN, 1.0, 2.0, f64::NAN, 3.0]), "▁▅█");
215    }
216
217    #[ignore]
218    #[test]
219    fn test_infinite() {
220        let spark = AlgorithmicSpark::default();
221        assert_eq!(
222            spark.spark(&[f64::NEG_INFINITY, 0.0, f64::INFINITY,]),
223            "▁▅█"
224        );
225    }
226
227    #[test]
228    fn test_spark_fn() {
229        assert_eq!(spark(&[1.0, 2.0, 3.0]), "▁▅█");
230    }
231
232    #[test]
233    fn test_empty_ticks() {
234        assert!(AlgorithmicSpark::new(&[]).is_err());
235        assert!(AlgorithmicSpark::new_with_min_max(&[], 0.0, 1.0).is_err());
236    }
237}