1use std::sync::Arc;
2use std::sync::atomic::{AtomicI32, AtomicU32, AtomicU64, Ordering};
3
4use parking_lot::Mutex;
5
6use crate::error::{ADError, ADResult};
7use crate::ndarray::{NDArray, NDDataBuffer, NDDataType, NDDimension};
8use crate::ndarray_handle::{NDArrayHandle, pooled_array};
9use crate::timestamp::EpicsTimestamp;
10
11const THRESHOLD_SIZE_RATIO: f64 = 1.5;
14
15pub struct NDArrayPool {
22 max_memory: usize,
23 allocated_bytes: AtomicU64,
24 next_unique_id: AtomicI32,
25 free_list: Mutex<Vec<NDArray>>,
26 num_alloc_buffers: AtomicU32,
27 num_free_buffers: AtomicU32,
28}
29
30impl NDArrayPool {
31 pub fn new(max_memory: usize) -> Self {
32 Self {
33 max_memory,
34 allocated_bytes: AtomicU64::new(0),
35 next_unique_id: AtomicI32::new(1),
36 free_list: Mutex::new(Vec::new()),
37 num_alloc_buffers: AtomicU32::new(0),
38 num_free_buffers: AtomicU32::new(0),
39 }
40 }
41
42 pub fn alloc(&self, dims: Vec<NDDimension>, data_type: NDDataType) -> ADResult<NDArray> {
44 let num_elements: usize = dims.iter().map(|d| d.size).product();
45 let needed_bytes = num_elements * data_type.element_size();
46
47 let reused = {
49 let mut free = self.free_list.lock();
50 let mut best_idx = None;
52 let mut best_cap = usize::MAX;
53 for (i, arr) in free.iter().enumerate() {
54 let cap = arr.data.capacity_bytes();
55 if cap >= needed_bytes && cap < best_cap {
56 best_cap = cap;
57 best_idx = Some(i);
58 }
59 }
60 if let Some(idx) = best_idx {
61 if best_cap as f64 > needed_bytes as f64 * THRESHOLD_SIZE_RATIO {
62 let dropped = free.swap_remove(idx);
63 let dropped_cap = dropped.data.capacity_bytes();
64 self.num_free_buffers.fetch_sub(1, Ordering::Relaxed);
65 self.allocated_bytes
66 .fetch_sub(dropped_cap as u64, Ordering::Relaxed);
67 self.num_alloc_buffers.fetch_sub(1, Ordering::Relaxed);
68 None
69 } else {
70 let arr = free.swap_remove(idx);
71 self.num_free_buffers.fetch_sub(1, Ordering::Relaxed);
72 Some(arr)
73 }
74 } else {
75 None
76 }
77 };
78
79 let mut arr = if let Some(mut reused) = reused {
80 if reused.data.data_type() != data_type {
82 let old_cap = reused.data.capacity_bytes();
84 reused.data = NDDataBuffer::zeros(data_type, num_elements);
85 let new_cap = reused.data.capacity_bytes();
86 if new_cap > old_cap {
88 let diff = new_cap - old_cap;
89 let current = self.allocated_bytes.load(Ordering::Relaxed);
90 if self.max_memory > 0 && current + diff as u64 > self.max_memory as u64 {
91 return Err(ADError::PoolExhausted(needed_bytes, self.max_memory));
92 }
93 self.allocated_bytes
94 .fetch_add(diff as u64, Ordering::Relaxed);
95 } else {
96 let diff = old_cap - new_cap;
97 self.allocated_bytes
98 .fetch_sub(diff as u64, Ordering::Relaxed);
99 }
100 } else {
101 reused.data.resize(num_elements);
102 }
103 reused.dims = dims;
104 reused.attributes.clear();
105 reused.codec = None;
106 reused
107 } else {
108 if self.max_memory > 0 {
110 loop {
111 let current = self.allocated_bytes.load(Ordering::Relaxed);
112 if current + needed_bytes as u64 > self.max_memory as u64 {
113 let mut freed_enough = false;
114 {
115 let mut free = self.free_list.lock();
116 free.sort_by(|a, b| {
117 b.data.capacity_bytes().cmp(&a.data.capacity_bytes())
118 });
119 let mut reclaimed = 0u64;
120 let over = (current + needed_bytes as u64)
121 .saturating_sub(self.max_memory as u64);
122 while !free.is_empty() && reclaimed < over {
123 let dropped = free.remove(0);
124 let dropped_cap = dropped.data.capacity_bytes();
125 self.allocated_bytes
126 .fetch_sub(dropped_cap as u64, Ordering::Relaxed);
127 self.num_free_buffers.fetch_sub(1, Ordering::Relaxed);
128 self.num_alloc_buffers.fetch_sub(1, Ordering::Relaxed);
129 reclaimed += dropped_cap as u64;
130 }
131 if reclaimed >= over {
132 freed_enough = true;
133 }
134 }
135 if !freed_enough {
136 return Err(ADError::PoolExhausted(needed_bytes, self.max_memory));
137 }
138 continue;
139 }
140 if self
141 .allocated_bytes
142 .compare_exchange_weak(
143 current,
144 current + needed_bytes as u64,
145 Ordering::Relaxed,
146 Ordering::Relaxed,
147 )
148 .is_ok()
149 {
150 break;
151 }
152 }
153 } else {
154 self.allocated_bytes
155 .fetch_add(needed_bytes as u64, Ordering::Relaxed);
156 }
157 self.num_alloc_buffers.fetch_add(1, Ordering::Relaxed);
158 let new_arr = NDArray::new(dims, data_type);
159 let actual_cap = new_arr.data.capacity_bytes();
160 if actual_cap > needed_bytes {
161 self.allocated_bytes
162 .fetch_add((actual_cap - needed_bytes) as u64, Ordering::Relaxed);
163 }
164 new_arr
165 };
166
167 arr.unique_id = self.next_unique_id.fetch_add(1, Ordering::Relaxed);
168 arr.timestamp = EpicsTimestamp::now();
169 Ok(arr)
170 }
171
172 pub fn alloc_copy(&self, source: &NDArray) -> ADResult<NDArray> {
175 let dims = source.dims.clone();
176 let data_type = source.data.data_type();
177 let mut copy = self.alloc(dims, data_type)?;
178 copy.data = source.data.clone();
179 copy.time_stamp = source.time_stamp;
180 copy.attributes = source.attributes.clone();
181 copy.codec = source.codec.clone();
182 Ok(copy)
183 }
184
185 pub fn release(&self, array: NDArray) {
187 let cap = array.data.capacity_bytes();
188 let mut free = self.free_list.lock();
189 free.push(array);
190 self.num_free_buffers.fetch_add(1, Ordering::Relaxed);
191
192 let total = self.allocated_bytes.load(Ordering::Relaxed) as usize;
195 if self.max_memory > 0 && total > self.max_memory && !free.is_empty() {
196 free.sort_by(|a, b| b.data.capacity_bytes().cmp(&a.data.capacity_bytes()));
198 let mut excess = total.saturating_sub(self.max_memory);
199 while excess > 0 && !free.is_empty() {
200 let dropped = free.remove(0);
201 let dropped_cap = dropped.data.capacity_bytes();
202 self.allocated_bytes
203 .fetch_sub(dropped_cap.min(total) as u64, Ordering::Relaxed);
204 self.num_free_buffers.fetch_sub(1, Ordering::Relaxed);
205 self.num_alloc_buffers.fetch_sub(1, Ordering::Relaxed);
206 if dropped_cap >= excess {
207 break;
208 }
209 excess -= dropped_cap;
210 }
211 }
212 let _ = cap;
213 }
214
215 pub fn empty_free_list(&self) {
217 let mut free = self.free_list.lock();
218 let count = free.len() as u32;
219 for arr in free.drain(..) {
220 let cap = arr.data.capacity_bytes();
221 self.allocated_bytes
222 .fetch_sub(cap as u64, Ordering::Relaxed);
223 self.num_alloc_buffers.fetch_sub(1, Ordering::Relaxed);
224 }
225 self.num_free_buffers.fetch_sub(count, Ordering::Relaxed);
226 }
227
228 pub fn allocated_bytes(&self) -> u64 {
229 self.allocated_bytes.load(Ordering::Relaxed)
230 }
231
232 pub fn num_alloc_buffers(&self) -> u32 {
233 self.num_alloc_buffers.load(Ordering::Relaxed)
234 }
235
236 pub fn num_free_buffers(&self) -> u32 {
237 self.num_free_buffers.load(Ordering::Relaxed)
238 }
239
240 pub fn max_memory(&self) -> usize {
241 self.max_memory
242 }
243
244 pub fn alloc_handle(
247 pool: &Arc<Self>,
248 dims: Vec<NDDimension>,
249 data_type: NDDataType,
250 ) -> ADResult<NDArrayHandle> {
251 let array = pool.alloc(dims, data_type)?;
252 Ok(pooled_array(array, pool))
253 }
254
255 pub fn convert_type(&self, src: &NDArray, target_type: NDDataType) -> ADResult<NDArray> {
258 if src.data.data_type() == target_type {
259 return self.alloc_copy(src);
260 }
261 let mut out = crate::color::convert_data_type(src, target_type)?;
262 out.unique_id = self
263 .next_unique_id
264 .fetch_add(1, std::sync::atomic::Ordering::Relaxed);
265 Ok(out)
266 }
267
268 pub fn convert(
279 &self,
280 src: &NDArray,
281 dims_out: &[NDDimension],
282 target_type: NDDataType,
283 ) -> ADResult<NDArray> {
284 let ndims = src.dims.len();
285 if dims_out.len() != ndims {
286 return Err(ADError::InvalidDimensions(format!(
287 "convert: dims_out length {} != source ndims {}",
288 dims_out.len(),
289 ndims,
290 )));
291 }
292
293 let mut out_sizes = Vec::with_capacity(ndims);
295 for (i, d) in dims_out.iter().enumerate() {
296 let bin = d.binning.max(1);
297 if d.size == 0 {
298 return Err(ADError::InvalidDimensions(format!(
299 "convert: dims_out[{}].size is 0",
300 i,
301 )));
302 }
303 let out_size = d.size / bin;
304 if out_size == 0 {
305 return Err(ADError::InvalidDimensions(format!(
306 "convert: dims_out[{}] size {} / binning {} = 0",
307 i, d.size, bin,
308 )));
309 }
310 if d.offset + d.size > src.dims[i].size {
312 return Err(ADError::InvalidDimensions(format!(
313 "convert: dims_out[{}] offset {} + size {} > src dim size {}",
314 i, d.offset, d.size, src.dims[i].size,
315 )));
316 }
317 out_sizes.push(out_size);
318 }
319
320 let src_type = src.data.data_type();
321
322 let mut out_dims = Vec::with_capacity(ndims);
324 for i in 0..ndims {
325 let bin = dims_out[i].binning.max(1);
326 out_dims.push(NDDimension {
327 size: out_sizes[i],
328 offset: src.dims[i].offset + dims_out[i].offset,
329 binning: src.dims[i].binning * bin,
330 reverse: dims_out[i].reverse,
331 });
332 }
333
334 let total_out: usize = out_sizes.iter().product();
335
336 let mut src_strides = vec![1usize; ndims];
338 for i in 1..ndims {
339 src_strides[i] = src_strides[i - 1] * src.dims[i - 1].size;
340 }
341
342 let mut out_strides = vec![1usize; ndims];
344 for i in 1..ndims {
345 out_strides[i] = out_strides[i - 1] * out_sizes[i - 1];
346 }
347
348 macro_rules! convert_buf {
350 ($src_vec:expr, $T:ty, $zero:expr, $variant:ident) => {{
351 let mut out = vec![$zero; total_out];
352
353 for out_idx in 0..total_out {
355 let mut remaining = out_idx;
357 let mut out_coords = [0usize; 10]; for i in (0..ndims).rev() {
359 out_coords[i] = remaining / out_strides[i];
360 remaining %= out_strides[i];
361 }
362
363 let mut eff_coords = [0usize; 10];
365 for i in 0..ndims {
366 eff_coords[i] = if dims_out[i].reverse {
367 out_sizes[i] - 1 - out_coords[i]
368 } else {
369 out_coords[i]
370 };
371 }
372
373 let mut sum = 0.0f64;
375 let bin_total: usize = dims_out.iter().map(|d| d.binning.max(1)).product();
376
377 for bin_flat in 0..bin_total {
379 let mut br = bin_flat;
380 let mut src_flat = 0usize;
381 let mut valid = true;
382
383 for i in (0..ndims).rev() {
384 let bin = dims_out[i].binning.max(1);
385 let bin_off = br % bin;
386 br /= bin;
387
388 let src_coord = dims_out[i].offset + eff_coords[i] * bin + bin_off;
389 if src_coord >= src.dims[i].size {
390 valid = false;
391 break;
392 }
393 src_flat += src_coord * src_strides[i];
394 }
395
396 if valid {
397 sum += $src_vec[src_flat] as f64;
398 }
399 }
400
401 out[out_idx] = sum as $T;
402 }
403
404 NDDataBuffer::$variant(out)
405 }};
406 }
407
408 let out_data = match &src.data {
409 NDDataBuffer::I8(v) => convert_buf!(v, i8, 0i8, I8),
410 NDDataBuffer::U8(v) => convert_buf!(v, u8, 0u8, U8),
411 NDDataBuffer::I16(v) => convert_buf!(v, i16, 0i16, I16),
412 NDDataBuffer::U16(v) => convert_buf!(v, u16, 0u16, U16),
413 NDDataBuffer::I32(v) => convert_buf!(v, i32, 0i32, I32),
414 NDDataBuffer::U32(v) => convert_buf!(v, u32, 0u32, U32),
415 NDDataBuffer::I64(v) => convert_buf!(v, i64, 0i64, I64),
416 NDDataBuffer::U64(v) => convert_buf!(v, u64, 0u64, U64),
417 NDDataBuffer::F32(v) => convert_buf!(v, f32, 0.0f32, F32),
418 NDDataBuffer::F64(v) => convert_buf!(v, f64, 0.0f64, F64),
419 };
420
421 let mut arr = NDArray {
423 unique_id: self
424 .next_unique_id
425 .fetch_add(1, std::sync::atomic::Ordering::Relaxed),
426 timestamp: src.timestamp,
427 time_stamp: src.time_stamp,
428 dims: out_dims,
429 data: out_data,
430 attributes: src.attributes.clone(),
431 codec: src.codec.clone(),
432 };
433
434 if target_type != src_type {
436 let converted = crate::color::convert_data_type(&arr, target_type)?;
437 arr.data = converted.data;
438 }
439
440 Ok(arr)
441 }
442}
443
444const _: fn() = || {
446 fn assert_send_sync<T: Send + Sync>() {}
447 assert_send_sync::<NDArrayPool>();
448};
449
450#[cfg(test)]
451mod tests {
452 use super::*;
453
454 #[test]
455 fn test_alloc_auto_id() {
456 let pool = NDArrayPool::new(1_000_000);
457 let a1 = pool
458 .alloc(vec![NDDimension::new(10)], NDDataType::UInt8)
459 .unwrap();
460 let a2 = pool
461 .alloc(vec![NDDimension::new(10)], NDDataType::UInt8)
462 .unwrap();
463 assert_eq!(a1.unique_id, 1);
464 assert_eq!(a2.unique_id, 2);
465 }
466
467 #[test]
468 fn test_alloc_tracks_bytes() {
469 let pool = NDArrayPool::new(1_000_000);
470 let _ = pool
471 .alloc(vec![NDDimension::new(100)], NDDataType::Float64)
472 .unwrap();
473 assert!(pool.allocated_bytes() >= 800);
474 }
475
476 #[test]
477 fn test_alloc_exceeds_max() {
478 let pool = NDArrayPool::new(100);
479 let result = pool.alloc(vec![NDDimension::new(200)], NDDataType::UInt8);
480 assert!(result.is_err());
481 }
482
483 #[test]
484 fn test_alloc_copy_preserves_data() {
485 let pool = NDArrayPool::new(1_000_000);
486 let mut source = pool
487 .alloc(vec![NDDimension::new(4)], NDDataType::UInt8)
488 .unwrap();
489 if let NDDataBuffer::U8(ref mut v) = source.data {
490 v[0] = 1;
491 v[1] = 2;
492 v[2] = 3;
493 v[3] = 4;
494 }
495
496 let copy = pool.alloc_copy(&source).unwrap();
497 assert_ne!(copy.unique_id, source.unique_id);
498 assert_eq!(copy.dims.len(), source.dims.len());
499 if let NDDataBuffer::U8(ref v) = copy.data {
500 assert_eq!(v, &[1, 2, 3, 4]);
501 } else {
502 panic!("wrong type");
503 }
504 }
505
506 #[test]
507 fn test_alloc_copy_tracks_bytes() {
508 let pool = NDArrayPool::new(1_000_000);
509 let source = pool
510 .alloc(vec![NDDimension::new(10)], NDDataType::UInt16)
511 .unwrap();
512 assert_eq!(pool.allocated_bytes(), 20);
513 let _ = pool.alloc_copy(&source).unwrap();
514 assert!(pool.allocated_bytes() >= 40);
515 }
516
517 #[test]
518 fn test_alloc_copy_exceeds_max() {
519 let pool = NDArrayPool::new(60);
520 let source = pool
521 .alloc(vec![NDDimension::new(50)], NDDataType::UInt8)
522 .unwrap();
523 assert!(pool.alloc_copy(&source).is_err());
524 }
525
526 #[test]
529 fn test_release_and_reuse() {
530 let pool = NDArrayPool::new(1_000_000);
531 let arr = pool
532 .alloc(vec![NDDimension::new(100)], NDDataType::UInt8)
533 .unwrap();
534 let _alloc_bytes_after_first = pool.allocated_bytes();
535 assert_eq!(pool.num_alloc_buffers(), 1);
536
537 pool.release(arr);
539 assert_eq!(pool.num_free_buffers(), 1);
540
541 let arr2 = pool
543 .alloc(vec![NDDimension::new(80)], NDDataType::UInt8)
544 .unwrap();
545 assert_eq!(arr2.data.len(), 80);
546 }
547
548 #[test]
549 fn test_free_list_prefers_smallest_sufficient() {
550 let pool = NDArrayPool::new(10_000_000);
551 let small = pool
552 .alloc(vec![NDDimension::new(100)], NDDataType::UInt8)
553 .unwrap();
554 let large = pool
555 .alloc(vec![NDDimension::new(10000)], NDDataType::UInt8)
556 .unwrap();
557 let medium = pool
558 .alloc(vec![NDDimension::new(1000)], NDDataType::UInt8)
559 .unwrap();
560
561 pool.release(large);
562 pool.release(medium);
563 pool.release(small);
564 assert_eq!(pool.num_free_buffers(), 3);
565
566 let reused = pool
568 .alloc(vec![NDDimension::new(900)], NDDataType::UInt8)
569 .unwrap();
570 assert!(reused.data.capacity_bytes() >= 900);
571 }
572
573 #[test]
574 fn test_empty_free_list() {
575 let pool = NDArrayPool::new(1_000_000);
576 let a1 = pool
577 .alloc(vec![NDDimension::new(100)], NDDataType::UInt8)
578 .unwrap();
579 let a2 = pool
580 .alloc(vec![NDDimension::new(200)], NDDataType::UInt8)
581 .unwrap();
582 pool.release(a1);
583 pool.release(a2);
584 assert_eq!(pool.num_free_buffers(), 2);
585
586 pool.empty_free_list();
587 assert_eq!(pool.num_free_buffers(), 0);
588 assert_eq!(pool.num_alloc_buffers(), 0);
589 }
590
591 #[test]
592 fn test_num_free_buffers_tracking() {
593 let pool = NDArrayPool::new(1_000_000);
594 assert_eq!(pool.num_free_buffers(), 0);
595
596 let a = pool
597 .alloc(vec![NDDimension::new(10)], NDDataType::UInt8)
598 .unwrap();
599 assert_eq!(pool.num_free_buffers(), 0);
600
601 pool.release(a);
602 assert_eq!(pool.num_free_buffers(), 1);
603
604 let _ = pool
605 .alloc(vec![NDDimension::new(10)], NDDataType::UInt8)
606 .unwrap();
607 assert_eq!(pool.num_free_buffers(), 0);
608 }
609
610 #[test]
611 fn test_concurrent_alloc_release() {
612 use std::sync::Arc;
613 use std::thread;
614
615 let pool = Arc::new(NDArrayPool::new(10_000_000));
616 let mut handles = Vec::new();
617
618 for _ in 0..4 {
619 let pool = pool.clone();
620 handles.push(thread::spawn(move || {
621 for _ in 0..100 {
622 let arr = pool
623 .alloc(vec![NDDimension::new(100)], NDDataType::UInt8)
624 .unwrap();
625 pool.release(arr);
626 }
627 }));
628 }
629
630 for h in handles {
631 h.join().unwrap();
632 }
633
634 assert!(pool.num_free_buffers() > 0);
636 }
637
638 #[test]
639 fn test_max_memory() {
640 let pool = NDArrayPool::new(42);
641 assert_eq!(pool.max_memory(), 42);
642 }
643
644 #[test]
647 fn test_convert_type_same_type() {
648 let pool = NDArrayPool::new(1_000_000);
649 let mut src = NDArray::new(vec![NDDimension::new(4)], NDDataType::UInt8);
650 if let NDDataBuffer::U8(ref mut v) = src.data {
651 v[0] = 10;
652 v[1] = 20;
653 v[2] = 30;
654 v[3] = 40;
655 }
656
657 let out = pool.convert_type(&src, NDDataType::UInt8).unwrap();
658 assert_eq!(out.data.data_type(), NDDataType::UInt8);
659 if let NDDataBuffer::U8(ref v) = out.data {
660 assert_eq!(v, &[10, 20, 30, 40]);
661 } else {
662 panic!("wrong type");
663 }
664 }
665
666 #[test]
667 fn test_convert_type_u8_to_f32() {
668 let pool = NDArrayPool::new(1_000_000);
669 let mut src = NDArray::new(vec![NDDimension::new(3)], NDDataType::UInt8);
670 if let NDDataBuffer::U8(ref mut v) = src.data {
671 v[0] = 0;
672 v[1] = 128;
673 v[2] = 255;
674 }
675
676 let out = pool.convert_type(&src, NDDataType::Float32).unwrap();
677 assert_eq!(out.data.data_type(), NDDataType::Float32);
678 if let NDDataBuffer::F32(ref v) = out.data {
679 assert_eq!(v[0], 0.0);
680 assert_eq!(v[1], 128.0);
681 assert_eq!(v[2], 255.0);
682 } else {
683 panic!("wrong type");
684 }
685 }
686
687 #[test]
688 fn test_convert_type_u16_to_u8() {
689 let pool = NDArrayPool::new(1_000_000);
690 let mut src = NDArray::new(vec![NDDimension::new(2)], NDDataType::UInt16);
691 if let NDDataBuffer::U16(ref mut v) = src.data {
692 v[0] = 100;
693 v[1] = 300; }
695
696 let out = pool.convert_type(&src, NDDataType::UInt8).unwrap();
697 if let NDDataBuffer::U8(ref v) = out.data {
698 assert_eq!(v[0], 100);
699 assert_eq!(v[1], 255); } else {
701 panic!("wrong type");
702 }
703 }
704
705 fn make_4x4_u8() -> NDArray {
709 let mut arr = NDArray::new(
710 vec![NDDimension::new(4), NDDimension::new(4)],
711 NDDataType::UInt8,
712 );
713 if let NDDataBuffer::U8(ref mut v) = arr.data {
714 for i in 0..16 {
715 v[i] = i as u8;
716 }
717 }
718 arr
719 }
720
721 #[test]
722 fn test_convert_identity() {
723 let pool = NDArrayPool::new(1_000_000);
725 let src = make_4x4_u8();
726 let dims_out = vec![
727 NDDimension {
728 size: 4,
729 offset: 0,
730 binning: 1,
731 reverse: false,
732 },
733 NDDimension {
734 size: 4,
735 offset: 0,
736 binning: 1,
737 reverse: false,
738 },
739 ];
740
741 let out = pool.convert(&src, &dims_out, NDDataType::UInt8).unwrap();
742 assert_eq!(out.dims[0].size, 4);
743 assert_eq!(out.dims[1].size, 4);
744 if let NDDataBuffer::U8(ref v) = out.data {
745 for i in 0..16 {
746 assert_eq!(v[i], i as u8);
747 }
748 } else {
749 panic!("wrong type");
750 }
751 }
752
753 #[test]
754 fn test_convert_offset_extraction() {
755 let pool = NDArrayPool::new(1_000_000);
757 let src = make_4x4_u8();
758 let dims_out = vec![
759 NDDimension {
760 size: 2,
761 offset: 1,
762 binning: 1,
763 reverse: false,
764 },
765 NDDimension {
766 size: 2,
767 offset: 1,
768 binning: 1,
769 reverse: false,
770 },
771 ];
772
773 let out = pool.convert(&src, &dims_out, NDDataType::UInt8).unwrap();
774 assert_eq!(out.dims[0].size, 2);
775 assert_eq!(out.dims[1].size, 2);
776 if let NDDataBuffer::U8(ref v) = out.data {
780 assert_eq!(v[0], 5);
781 assert_eq!(v[1], 6);
782 assert_eq!(v[2], 9);
783 assert_eq!(v[3], 10);
784 } else {
785 panic!("wrong type");
786 }
787
788 assert_eq!(out.dims[0].offset, 1); assert_eq!(out.dims[1].offset, 1);
791 }
792
793 #[test]
794 fn test_convert_binning_2x2() {
795 let pool = NDArrayPool::new(1_000_000);
797 let src = make_4x4_u8();
798 let dims_out = vec![
799 NDDimension {
800 size: 4,
801 offset: 0,
802 binning: 2,
803 reverse: false,
804 },
805 NDDimension {
806 size: 4,
807 offset: 0,
808 binning: 2,
809 reverse: false,
810 },
811 ];
812
813 let out = pool.convert(&src, &dims_out, NDDataType::UInt8).unwrap();
814 assert_eq!(out.dims[0].size, 2);
815 assert_eq!(out.dims[1].size, 2);
816 if let NDDataBuffer::U8(ref v) = out.data {
821 assert_eq!(v[0], 10);
822 assert_eq!(v[1], 18);
823 assert_eq!(v[2], 42);
824 assert_eq!(v[3], 50);
825 } else {
826 panic!("wrong type");
827 }
828
829 assert_eq!(out.dims[0].binning, 2); assert_eq!(out.dims[1].binning, 2);
832 }
833
834 #[test]
835 fn test_convert_reverse_x() {
836 let pool = NDArrayPool::new(1_000_000);
838 let mut src = NDArray::new(
839 vec![NDDimension::new(4), NDDimension::new(1)],
840 NDDataType::UInt8,
841 );
842 if let NDDataBuffer::U8(ref mut v) = src.data {
843 v[0] = 10;
844 v[1] = 20;
845 v[2] = 30;
846 v[3] = 40;
847 }
848
849 let dims_out = vec![
850 NDDimension {
851 size: 4,
852 offset: 0,
853 binning: 1,
854 reverse: true,
855 },
856 NDDimension {
857 size: 1,
858 offset: 0,
859 binning: 1,
860 reverse: false,
861 },
862 ];
863
864 let out = pool.convert(&src, &dims_out, NDDataType::UInt8).unwrap();
865 if let NDDataBuffer::U8(ref v) = out.data {
866 assert_eq!(v[0], 40);
867 assert_eq!(v[1], 30);
868 assert_eq!(v[2], 20);
869 assert_eq!(v[3], 10);
870 } else {
871 panic!("wrong type");
872 }
873 }
874
875 #[test]
876 fn test_convert_reverse_y() {
877 let pool = NDArrayPool::new(1_000_000);
879 let mut src = NDArray::new(
880 vec![NDDimension::new(2), NDDimension::new(2)],
881 NDDataType::UInt16,
882 );
883 if let NDDataBuffer::U16(ref mut v) = src.data {
884 v[0] = 1;
886 v[1] = 2;
887 v[2] = 3;
888 v[3] = 4;
889 }
890
891 let dims_out = vec![
892 NDDimension {
893 size: 2,
894 offset: 0,
895 binning: 1,
896 reverse: false,
897 },
898 NDDimension {
899 size: 2,
900 offset: 0,
901 binning: 1,
902 reverse: true,
903 },
904 ];
905
906 let out = pool.convert(&src, &dims_out, NDDataType::UInt16).unwrap();
907 if let NDDataBuffer::U16(ref v) = out.data {
908 assert_eq!(v[0], 3);
910 assert_eq!(v[1], 4);
911 assert_eq!(v[2], 1);
912 assert_eq!(v[3], 2);
913 } else {
914 panic!("wrong type");
915 }
916 }
917
918 #[test]
919 fn test_convert_with_type_change() {
920 let pool = NDArrayPool::new(1_000_000);
922 let src = make_4x4_u8();
923 let dims_out = vec![
924 NDDimension {
925 size: 4,
926 offset: 0,
927 binning: 2,
928 reverse: false,
929 },
930 NDDimension {
931 size: 4,
932 offset: 0,
933 binning: 2,
934 reverse: false,
935 },
936 ];
937
938 let out = pool.convert(&src, &dims_out, NDDataType::Float32).unwrap();
939 assert_eq!(out.data.data_type(), NDDataType::Float32);
940 assert_eq!(out.dims[0].size, 2);
941 assert_eq!(out.dims[1].size, 2);
942 if let NDDataBuffer::F32(ref v) = out.data {
943 assert_eq!(v[0], 10.0); assert_eq!(v[1], 18.0); } else {
946 panic!("wrong type");
947 }
948 }
949
950 #[test]
951 fn test_convert_cumulative_offset_and_binning() {
952 let pool = NDArrayPool::new(1_000_000);
954 let mut src = NDArray::new(
955 vec![NDDimension::new(4), NDDimension::new(4)],
956 NDDataType::UInt8,
957 );
958 src.dims[0].offset = 10;
959 src.dims[0].binning = 2;
960 src.dims[1].offset = 20;
961 src.dims[1].binning = 3;
962 if let NDDataBuffer::U8(ref mut v) = src.data {
963 for i in 0..16 {
964 v[i] = i as u8;
965 }
966 }
967
968 let dims_out = vec![
969 NDDimension {
970 size: 2,
971 offset: 1,
972 binning: 2,
973 reverse: false,
974 },
975 NDDimension {
976 size: 2,
977 offset: 1,
978 binning: 2,
979 reverse: false,
980 },
981 ];
982
983 let out = pool.convert(&src, &dims_out, NDDataType::UInt8).unwrap();
984 assert_eq!(out.dims[0].offset, 10 + 1);
986 assert_eq!(out.dims[1].offset, 20 + 1);
987 assert_eq!(out.dims[0].binning, 2 * 2);
989 assert_eq!(out.dims[1].binning, 3 * 2);
990 }
991
992 #[test]
993 fn test_convert_1d() {
994 let pool = NDArrayPool::new(1_000_000);
996 let mut src = NDArray::new(vec![NDDimension::new(8)], NDDataType::UInt16);
997 if let NDDataBuffer::U16(ref mut v) = src.data {
998 for i in 0..8 {
999 v[i] = (i * 10) as u16;
1000 }
1001 }
1003
1004 let dims_out = vec![NDDimension {
1005 size: 4,
1006 offset: 2,
1007 binning: 2,
1008 reverse: false,
1009 }];
1010
1011 let out = pool.convert(&src, &dims_out, NDDataType::UInt16).unwrap();
1012 assert_eq!(out.dims.len(), 1);
1013 assert_eq!(out.dims[0].size, 2);
1014 if let NDDataBuffer::U16(ref v) = out.data {
1015 assert_eq!(v[0], 50);
1018 assert_eq!(v[1], 90);
1019 } else {
1020 panic!("wrong type");
1021 }
1022 }
1023
1024 #[test]
1025 fn test_convert_3d() {
1026 let pool = NDArrayPool::new(1_000_000);
1028 let mut src = NDArray::new(
1029 vec![
1030 NDDimension::new(2),
1031 NDDimension::new(2),
1032 NDDimension::new(2),
1033 ],
1034 NDDataType::UInt8,
1035 );
1036 if let NDDataBuffer::U8(ref mut v) = src.data {
1037 for i in 0..8 {
1038 v[i] = (i + 1) as u8;
1039 }
1040 }
1041
1042 let dims_out = vec![
1043 NDDimension {
1044 size: 2,
1045 offset: 0,
1046 binning: 1,
1047 reverse: false,
1048 },
1049 NDDimension {
1050 size: 2,
1051 offset: 0,
1052 binning: 1,
1053 reverse: false,
1054 },
1055 NDDimension {
1056 size: 2,
1057 offset: 0,
1058 binning: 1,
1059 reverse: false,
1060 },
1061 ];
1062
1063 let out = pool.convert(&src, &dims_out, NDDataType::UInt8).unwrap();
1064 if let NDDataBuffer::U8(ref v) = out.data {
1065 for i in 0..8 {
1066 assert_eq!(v[i], (i + 1) as u8);
1067 }
1068 } else {
1069 panic!("wrong type");
1070 }
1071 }
1072
1073 #[test]
1074 fn test_convert_dim_mismatch_error() {
1075 let pool = NDArrayPool::new(1_000_000);
1076 let src = make_4x4_u8();
1077 let dims_out = vec![NDDimension {
1079 size: 4,
1080 offset: 0,
1081 binning: 1,
1082 reverse: false,
1083 }];
1084
1085 let result = pool.convert(&src, &dims_out, NDDataType::UInt8);
1086 assert!(result.is_err());
1087 }
1088
1089 #[test]
1090 fn test_convert_offset_out_of_bounds_error() {
1091 let pool = NDArrayPool::new(1_000_000);
1092 let src = make_4x4_u8();
1093 let dims_out = vec![
1094 NDDimension {
1095 size: 4,
1096 offset: 2,
1097 binning: 1,
1098 reverse: false,
1099 }, NDDimension {
1101 size: 4,
1102 offset: 0,
1103 binning: 1,
1104 reverse: false,
1105 },
1106 ];
1107
1108 let result = pool.convert(&src, &dims_out, NDDataType::UInt8);
1109 assert!(result.is_err());
1110 }
1111
1112 #[test]
1113 fn test_convert_preserves_metadata() {
1114 let pool = NDArrayPool::new(1_000_000);
1115 let mut src = make_4x4_u8();
1116 src.time_stamp = 12345.678;
1117
1118 let dims_out = vec![
1119 NDDimension {
1120 size: 4,
1121 offset: 0,
1122 binning: 1,
1123 reverse: false,
1124 },
1125 NDDimension {
1126 size: 4,
1127 offset: 0,
1128 binning: 1,
1129 reverse: false,
1130 },
1131 ];
1132
1133 let out = pool.convert(&src, &dims_out, NDDataType::UInt8).unwrap();
1134 assert_eq!(out.time_stamp, 12345.678);
1135 }
1136
1137 #[test]
1138 fn test_convert_binning_and_reverse_combined() {
1139 let pool = NDArrayPool::new(1_000_000);
1141 let mut src = NDArray::new(vec![NDDimension::new(4)], NDDataType::UInt16);
1142 if let NDDataBuffer::U16(ref mut v) = src.data {
1143 v[0] = 1;
1144 v[1] = 2;
1145 v[2] = 3;
1146 v[3] = 4;
1147 }
1148
1149 let dims_out = vec![NDDimension {
1150 size: 4,
1151 offset: 0,
1152 binning: 2,
1153 reverse: true,
1154 }];
1155
1156 let out = pool.convert(&src, &dims_out, NDDataType::UInt16).unwrap();
1157 assert_eq!(out.dims[0].size, 2);
1158 if let NDDataBuffer::U16(ref v) = out.data {
1159 assert_eq!(v[0], 7);
1164 assert_eq!(v[1], 3);
1165 } else {
1166 panic!("wrong type");
1167 }
1168 }
1169}