1use crate::{
12 AtomicCount, AtomicHistogram, AtomicHistogram32, CumulativeROHistogram,
13 CumulativeROHistogram32, Error, Histogram, Histogram32, SparseHistogram, SparseHistogram32,
14};
15
16impl From<&Histogram32> for Histogram {
21 fn from(h: &Histogram32) -> Self {
22 let buckets: Vec<u64> = h.as_slice().iter().map(|&c| c as u64).collect();
23 Histogram::from_buckets(
24 h.config().grouping_power(),
25 h.config().max_value_power(),
26 buckets,
27 )
28 .expect("widening preserves bucket count")
29 }
30}
31
32impl From<&AtomicHistogram32> for AtomicHistogram {
33 fn from(h: &AtomicHistogram32) -> Self {
34 let snapshot = h.load();
36 let widened: Histogram = (&snapshot).into();
37 let out = AtomicHistogram::with_config(&widened.config());
38 for (i, &c) in widened.as_slice().iter().enumerate() {
39 out.buckets[i].store_relaxed(c);
40 }
41 out
42 }
43}
44
45impl From<&SparseHistogram32> for SparseHistogram {
46 fn from(h: &SparseHistogram32) -> Self {
47 let widened: Vec<u64> = h.count().iter().map(|&c| c as u64).collect();
48 SparseHistogram::from_parts(h.config(), h.index().to_vec(), widened)
49 .expect("widening preserves invariants")
50 }
51}
52
53impl From<&CumulativeROHistogram32> for CumulativeROHistogram {
54 fn from(h: &CumulativeROHistogram32) -> Self {
55 let widened: Vec<u64> = h.count().iter().map(|&c| c as u64).collect();
56 CumulativeROHistogram::from_parts(h.config(), h.index().to_vec(), widened)
57 .expect("widening preserves invariants")
58 }
59}
60
61impl TryFrom<&Histogram> for Histogram32 {
66 type Error = Error;
67 fn try_from(h: &Histogram) -> Result<Self, Error> {
68 let mut narrowed: Vec<u32> = Vec::with_capacity(h.as_slice().len());
69 for &c in h.as_slice() {
70 narrowed.push(u32::try_from(c).map_err(|_| Error::Overflow)?);
71 }
72 Histogram32::from_buckets(
73 h.config().grouping_power(),
74 h.config().max_value_power(),
75 narrowed,
76 )
77 }
78}
79
80impl TryFrom<&SparseHistogram> for SparseHistogram32 {
81 type Error = Error;
82 fn try_from(h: &SparseHistogram) -> Result<Self, Error> {
83 let mut narrowed: Vec<u32> = Vec::with_capacity(h.count().len());
84 for &c in h.count() {
85 narrowed.push(u32::try_from(c).map_err(|_| Error::Overflow)?);
86 }
87 SparseHistogram32::from_parts(h.config(), h.index().to_vec(), narrowed)
88 }
89}
90
91impl TryFrom<&CumulativeROHistogram> for CumulativeROHistogram32 {
92 type Error = Error;
93 fn try_from(h: &CumulativeROHistogram) -> Result<Self, Error> {
94 if let Some(&last) = h.count().last() {
97 if u32::try_from(last).is_err() {
98 return Err(Error::Overflow);
99 }
100 }
101 let narrowed: Vec<u32> = h.count().iter().map(|&c| c as u32).collect();
102 CumulativeROHistogram32::from_parts(h.config(), h.index().to_vec(), narrowed)
103 }
104}
105
106impl TryFrom<&Histogram> for CumulativeROHistogram32 {
116 type Error = Error;
117 fn try_from(h: &Histogram) -> Result<Self, Error> {
118 let mut index: Vec<u32> = Vec::new();
119 let mut count: Vec<u32> = Vec::new();
120 let mut running: u64 = 0;
121 for (i, &n) in h.as_slice().iter().enumerate() {
122 if n > 0 {
123 running = running.checked_add(n).ok_or(Error::Overflow)?;
124 if running > u32::MAX as u64 {
125 return Err(Error::Overflow);
126 }
127 index.push(i as u32);
128 count.push(running as u32);
129 }
130 }
131 CumulativeROHistogram32::from_parts(h.config(), index, count)
132 }
133}
134
135impl TryFrom<&Histogram> for SparseHistogram32 {
139 type Error = Error;
140 fn try_from(h: &Histogram) -> Result<Self, Error> {
141 let mut index: Vec<u32> = Vec::new();
142 let mut count: Vec<u32> = Vec::new();
143 for (i, &n) in h.as_slice().iter().enumerate() {
144 if n > 0 {
145 count.push(u32::try_from(n).map_err(|_| Error::Overflow)?);
146 index.push(i as u32);
147 }
148 }
149 SparseHistogram32::from_parts(h.config(), index, count)
150 }
151}
152
153impl TryFrom<&SparseHistogram> for CumulativeROHistogram32 {
157 type Error = Error;
158 fn try_from(h: &SparseHistogram) -> Result<Self, Error> {
159 let mut running: u64 = 0;
160 let mut count: Vec<u32> = Vec::with_capacity(h.count().len());
161 for &n in h.count() {
162 running = running.checked_add(n).ok_or(Error::Overflow)?;
163 if running > u32::MAX as u64 {
164 return Err(Error::Overflow);
165 }
166 count.push(running as u32);
167 }
168 CumulativeROHistogram32::from_parts(h.config(), h.index().to_vec(), count)
169 }
170}
171
172#[cfg(test)]
177mod tests {
178 use super::*;
179 use crate::Config;
180
181 #[test]
184 fn widen_histogram() {
185 let mut h32 = Histogram32::new(7, 32).unwrap();
186 h32.add(1, 1234u32).unwrap();
187 h32.add(1000, 5678u32).unwrap();
188 let h64: Histogram = (&h32).into();
189 assert_eq!(h64.config(), h32.config());
190 for (a, b) in h64.as_slice().iter().zip(h32.as_slice().iter()) {
191 assert_eq!(*a, *b as u64);
192 }
193 }
194
195 #[test]
196 fn widen_sparse() {
197 let config = Config::new(7, 32).unwrap();
198 let s32 = SparseHistogram32::from_parts(config, vec![1, 3], vec![10u32, 20]).unwrap();
199 let s64: SparseHistogram = (&s32).into();
200 assert_eq!(s64.count(), &[10u64, 20]);
201 assert_eq!(s64.index(), &[1u32, 3]);
202 }
203
204 #[test]
205 fn widen_cumulative() {
206 let config = Config::new(7, 32).unwrap();
207 let c32 = CumulativeROHistogram32::from_parts(config, vec![1, 3], vec![10u32, 30]).unwrap();
208 let c64: CumulativeROHistogram = (&c32).into();
209 assert_eq!(c64.count(), &[10u64, 30]);
210 }
211
212 #[cfg(target_has_atomic = "32")]
213 #[cfg(target_has_atomic = "64")]
214 #[test]
215 fn widen_atomic_histogram() {
216 let h32 = AtomicHistogram32::new(7, 32).unwrap();
217 h32.add(5, 100u32).unwrap();
218 h32.add(50, 200u32).unwrap();
219 let h64: AtomicHistogram = (&h32).into();
220 let snap = h64.load();
221 let total: u64 = snap.as_slice().iter().sum();
222 assert_eq!(total, 300);
223 }
224
225 #[test]
228 fn narrow_histogram_success() {
229 let mut h64 = Histogram::new(7, 32).unwrap();
230 h64.add(1, 100u64).unwrap();
231 h64.add(1000, 200u64).unwrap();
232 let h32: Histogram32 = (&h64).try_into().unwrap();
233 assert_eq!(h32.as_slice()[1], 100u32);
234 }
235
236 #[test]
237 fn narrow_histogram_overflow() {
238 let mut h64 = Histogram::new(2, 4).unwrap();
239 h64.add(1, (u32::MAX as u64) + 1).unwrap();
240 let r: Result<Histogram32, _> = (&h64).try_into();
241 assert_eq!(r, Err(Error::Overflow));
242 }
243
244 #[test]
245 fn narrow_sparse_overflow() {
246 let config = Config::new(7, 32).unwrap();
247 let s64 =
248 SparseHistogram::from_parts(config, vec![1], vec![(u32::MAX as u64) + 1]).unwrap();
249 let r: Result<SparseHistogram32, _> = (&s64).try_into();
250 assert_eq!(r, Err(Error::Overflow));
251 }
252
253 #[test]
254 fn narrow_cumulative_checks_total_only() {
255 let config = Config::new(7, 32).unwrap();
256 let c64 = CumulativeROHistogram::from_parts(
257 config,
258 vec![1, 3],
259 vec![100u64, (u32::MAX as u64) + 1],
260 )
261 .unwrap();
262 let r: Result<CumulativeROHistogram32, _> = (&c64).try_into();
263 assert_eq!(r, Err(Error::Overflow));
264
265 let c64_ok =
266 CumulativeROHistogram::from_parts(config, vec![1, 3], vec![100u64, 200]).unwrap();
267 let c32: CumulativeROHistogram32 = (&c64_ok).try_into().unwrap();
268 assert_eq!(c32.total_count(), 200);
269 }
270
271 #[test]
272 fn round_trip_widen_then_narrow() {
273 let mut h32 = Histogram32::new(7, 32).unwrap();
274 h32.add(5, 1234u32).unwrap();
275 h32.add(50, 5678u32).unwrap();
276 let h64: Histogram = (&h32).into();
277 let h32_back: Histogram32 = (&h64).try_into().unwrap();
278 assert_eq!(h32.as_slice(), h32_back.as_slice());
279 }
280
281 #[test]
284 fn histogram_to_cumulative32() {
285 let mut h = Histogram::new(7, 32).unwrap();
286 h.add(1, 100u64).unwrap();
287 h.add(50, 200u64).unwrap();
288 h.add(1000, 300u64).unwrap();
289 let croh: CumulativeROHistogram32 = (&h).try_into().unwrap();
290 assert_eq!(croh.total_count(), 600);
291 assert_eq!(croh.count().len(), 3);
292 }
293
294 #[test]
295 fn histogram_to_cumulative32_overflow() {
296 let mut h = Histogram::new(2, 4).unwrap();
297 h.add(0, 3_000_000_000u64).unwrap();
298 h.add(1, 2_000_000_000u64).unwrap();
299 let r: Result<CumulativeROHistogram32, _> = (&h).try_into();
300 assert_eq!(r, Err(Error::Overflow));
301 }
302
303 #[test]
304 fn histogram_to_sparse32() {
305 let mut h = Histogram::new(7, 32).unwrap();
306 h.add(1, 100u64).unwrap();
307 h.add(1000, 200u64).unwrap();
308 let s: SparseHistogram32 = (&h).try_into().unwrap();
309 assert_eq!(s.count().iter().map(|&c| c as u64).sum::<u64>(), 300);
310 }
311
312 #[test]
313 fn sparse_to_cumulative32() {
314 let config = Config::new(7, 32).unwrap();
315 let s = SparseHistogram::from_parts(config, vec![1, 3], vec![100u64, 200]).unwrap();
316 let c: CumulativeROHistogram32 = (&s).try_into().unwrap();
317 assert_eq!(c.count(), &[100u32, 300]);
318 }
319
320 #[test]
321 fn direct_path_matches_two_step() {
322 let mut h = Histogram::new(4, 10).unwrap();
323 for v in 1..1024u64 {
324 h.increment(v).unwrap();
325 }
326 let direct: CumulativeROHistogram32 = (&h).try_into().unwrap();
327 let mid: CumulativeROHistogram = (&h).into();
328 let two_step: CumulativeROHistogram32 = (&mid).try_into().unwrap();
329 assert_eq!(direct.count(), two_step.count());
330 assert_eq!(direct.index(), two_step.index());
331 }
332
333 #[test]
334 fn snapshot_pipeline_end_to_end() {
335 let recorder = AtomicHistogram::new(7, 64).unwrap();
336 for v in 1..=50u64 {
337 recorder.increment(v).unwrap();
338 }
339 let snap_t0 = recorder.load();
340 for v in 1..=50u64 {
341 recorder.increment(v).unwrap();
342 }
343 let snap_t1 = recorder.load();
344 let delta = snap_t1.checked_sub(&snap_t0).unwrap();
345 let analytic: CumulativeROHistogram32 = (&delta).try_into().unwrap();
346 assert_eq!(analytic.total_count(), 50);
347 }
348}