1pub use crate::indexer::algorithmic::BuildAlgorithmicIndexer;
4pub use crate::indexer::rangemap::BuildRangemapIndexer;
5use crate::indexer::{BuildIndexer, Indexer};
6
7mod indexer;
8
9pub const TICKS: [char; 8] = ['▁', '▂', '▃', '▄', '▅', '▆', '▇', '█'];
14
15#[derive(Debug, PartialEq, Eq, Clone, Copy)]
17pub enum Error {
18 EmptyTicks,
20}
21
22pub type AlgorithmicSpark<'a> = StringSpark<'a, BuildAlgorithmicIndexer>;
24
25pub type RangemapSpark<'a> = StringSpark<'a, BuildRangemapIndexer>;
27
28pub struct StringSpark<'a, I = BuildAlgorithmicIndexer>
30where
31 I: BuildIndexer<f64, usize>,
32{
33 min: Option<f64>,
34 max: Option<f64>,
35 ticks: &'a [char],
36 middle_idx: usize,
37 build_indexer: I,
38}
39
40impl<'a, I> StringSpark<'a, I>
41where
42 I: BuildIndexer<f64, usize> + Default,
43{
44 pub fn new(ticks: &'a [char]) -> Result<Self, Error> {
57 if ticks.is_empty() {
58 return Err(Error::EmptyTicks);
59 }
60 Ok(Self {
61 min: None,
62 max: None,
63 ticks,
64 middle_idx: ticks.len() / 2,
65 build_indexer: Default::default(),
66 })
67 }
68
69 pub fn new_with_min_max(ticks: &'a [char], min: f64, max: f64) -> Result<Self, Error> {
82 if ticks.is_empty() {
83 return Err(Error::EmptyTicks);
84 }
85 Ok(Self {
86 min: Some(min),
87 max: Some(max),
88 ticks,
89 middle_idx: ticks.len() / 2,
90 build_indexer: Default::default(),
91 })
92 }
93
94 pub fn spark(&self, data: &[f64]) -> String {
102 let mut result = String::with_capacity(data.len() * 4);
103 let mut min: Option<&f64> = self.min.as_ref();
104 let mut max: Option<&f64> = self.max.as_ref();
105 if min.is_none() || max.is_none() {
106 for v in data {
107 if v.is_nan() {
108 continue;
109 }
110 if let Some(m) = min {
111 if v < m {
112 min = Some(v);
113 }
114 } else {
115 min = Some(v);
116 }
117 if let Some(m) = max {
118 if v > m {
119 max = Some(v);
120 }
121 } else {
122 max = Some(v);
123 }
124 }
125 }
126 if let (Some(min), Some(max)) = (min, max) {
127 if min.eq(max) {
128 data.iter().for_each(|_| {
129 result.push(self.ticks[self.middle_idx]);
130 })
131 } else {
132 let indexer = self.build_indexer.build_indexer(*min, *max, self.ticks);
133 data.iter().for_each(|v| {
134 if !v.is_nan() {
135 result.push(self.ticks[indexer.index(*v)]);
136 }
137 });
138 }
139 }
140 result
141 }
142}
143
144impl<'a> StringSpark<'a> {
145 pub fn default() -> Self {
148 Self::new(&TICKS).expect("default ticks are not empty")
149 }
150}
151
152impl<'a, I> Default for StringSpark<'a, I>
153where
154 I: BuildIndexer<f64, usize> + Default,
155{
156 fn default() -> Self {
157 Self::new(&TICKS).expect("default ticks are not empty")
158 }
159}
160
161pub fn spark(data: &[f64]) -> String {
169 StringSpark::default().spark(data)
170}
171
172#[cfg(test)]
173mod tests {
174 use super::*;
175 use test_case::test_case;
176
177 #[test_case(&[1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0, 11.0, 12.0, 13.0, 14.0, 15.0, 16.0] => "▁▁▂▂▃▃▄▄▅▅▆▆▇▇██")]
178 #[test_case(&[1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0] => "▁▂▃▄▅▆▇█")]
179 #[test_case(&[1.0, 1.0, 1.0, 1.0] => "▅▅▅▅")]
180 #[test_case(&[1.0 ] => "▅")]
181 #[test_case(&[] => "")]
182 fn test_spark(data: &[f64]) -> String {
183 let spark = AlgorithmicSpark::new(&TICKS).unwrap();
184 spark.spark(data)
185 }
186
187 #[test]
188 fn test_default() {
189 let spark = AlgorithmicSpark::default();
190 assert_eq!(spark.spark(&[1.0, 2.0, 3.0]), "▁▅█");
191 }
192
193 #[test]
194 fn test_rangemap_indexer() {
195 let spark = RangemapSpark::default();
196 assert_eq!(spark.spark(&[1.0, 2.0, 3.0]), "▁▄█");
197 }
198
199 #[test]
200 fn test_stringspark() {
201 let spark = StringSpark::default();
202 assert_eq!(spark.spark(&[f64::NAN, 1.0, 2.0, f64::NAN, 3.0]), "▁▅█");
203 }
204
205 #[test]
206 fn test_non_default() {
207 let spark = AlgorithmicSpark::new(&['a', 'b', 'c']).unwrap();
208 assert_eq!(spark.spark(&[1.0, 2.0, 3.0]), "abc");
209 }
210
211 #[test]
212 fn test_nan() {
213 let spark = AlgorithmicSpark::default();
214 assert_eq!(spark.spark(&[f64::NAN, 1.0, 2.0, f64::NAN, 3.0]), "▁▅█");
215 }
216
217 #[ignore]
218 #[test]
219 fn test_infinite() {
220 let spark = AlgorithmicSpark::default();
221 assert_eq!(
222 spark.spark(&[f64::NEG_INFINITY, 0.0, f64::INFINITY,]),
223 "▁▅█"
224 );
225 }
226
227 #[test]
228 fn test_spark_fn() {
229 assert_eq!(spark(&[1.0, 2.0, 3.0]), "▁▅█");
230 }
231
232 #[test]
233 fn test_empty_ticks() {
234 assert!(AlgorithmicSpark::new(&[]).is_err());
235 assert!(AlgorithmicSpark::new_with_min_max(&[], 0.0, 1.0).is_err());
236 }
237}