1use crate::helpers::simpsons_weights;
7use crate::iter_maybe_parallel;
8use rand::prelude::*;
9use rand_distr::StandardNormal;
10#[cfg(feature = "parallel")]
11use rayon::iter::ParallelIterator;
12
13pub fn fraiman_muniz_1d(
26 data_obj: &[f64],
27 data_ori: &[f64],
28 nobj: usize,
29 nori: usize,
30 n_points: usize,
31 scale: bool,
32) -> Vec<f64> {
33 if nobj == 0 || nori == 0 || n_points == 0 {
34 return Vec::new();
35 }
36
37 let scale_factor = if scale { 2.0 } else { 1.0 };
38
39 iter_maybe_parallel!(0..nobj)
40 .map(|i| {
41 let mut depth_sum = 0.0;
42
43 for t in 0..n_points {
44 let x_t = data_obj[i + t * nobj];
45 let mut le_count = 0;
46
47 for j in 0..nori {
48 let y_t = data_ori[j + t * nori];
49 if y_t <= x_t {
50 le_count += 1;
51 }
52 }
53
54 let fn_x = le_count as f64 / nori as f64;
55 let univariate_depth = fn_x.min(1.0 - fn_x) * scale_factor;
56 depth_sum += univariate_depth;
57 }
58
59 depth_sum / n_points as f64
60 })
61 .collect()
62}
63
64pub fn fraiman_muniz_2d(
66 data_obj: &[f64],
67 data_ori: &[f64],
68 nobj: usize,
69 nori: usize,
70 n_points: usize,
71 scale: bool,
72) -> Vec<f64> {
73 fraiman_muniz_1d(data_obj, data_ori, nobj, nori, n_points, scale)
75}
76
77pub fn modal_1d(
89 data_obj: &[f64],
90 data_ori: &[f64],
91 nobj: usize,
92 nori: usize,
93 n_points: usize,
94 h: f64,
95) -> Vec<f64> {
96 if nobj == 0 || nori == 0 || n_points == 0 {
97 return Vec::new();
98 }
99
100 iter_maybe_parallel!(0..nobj)
101 .map(|i| {
102 let mut depth = 0.0;
103
104 for j in 0..nori {
105 let mut dist_sq = 0.0;
106 for t in 0..n_points {
107 let diff = data_obj[i + t * nobj] - data_ori[j + t * nori];
108 dist_sq += diff * diff;
109 }
110 let dist = (dist_sq / n_points as f64).sqrt();
111 let kernel_val = (-0.5 * (dist / h).powi(2)).exp();
112 depth += kernel_val;
113 }
114
115 depth / nori as f64
116 })
117 .collect()
118}
119
120pub fn modal_2d(
122 data_obj: &[f64],
123 data_ori: &[f64],
124 nobj: usize,
125 nori: usize,
126 n_points: usize,
127 h: f64,
128) -> Vec<f64> {
129 modal_1d(data_obj, data_ori, nobj, nori, n_points, h)
130}
131
132pub fn random_projection_1d(
145 data_obj: &[f64],
146 data_ori: &[f64],
147 nobj: usize,
148 nori: usize,
149 n_points: usize,
150 nproj: usize,
151) -> Vec<f64> {
152 if nobj == 0 || nori == 0 || n_points == 0 || nproj == 0 {
153 return Vec::new();
154 }
155
156 let mut rng = rand::thread_rng();
157 let projections: Vec<Vec<f64>> = (0..nproj)
158 .map(|_| {
159 let mut proj: Vec<f64> = (0..n_points).map(|_| rng.sample(StandardNormal)).collect();
160 let norm: f64 = proj.iter().map(|x| x * x).sum::<f64>().sqrt();
161 proj.iter_mut().for_each(|x| *x /= norm);
162 proj
163 })
164 .collect();
165
166 iter_maybe_parallel!(0..nobj)
167 .map(|i| {
168 let mut total_depth = 0.0;
169
170 for proj in &projections {
171 let mut proj_i = 0.0;
172 for t in 0..n_points {
173 proj_i += data_obj[i + t * nobj] * proj[t];
174 }
175
176 let mut proj_ori: Vec<f64> = (0..nori)
177 .map(|j| {
178 let mut p = 0.0;
179 for t in 0..n_points {
180 p += data_ori[j + t * nori] * proj[t];
181 }
182 p
183 })
184 .collect();
185
186 proj_ori.sort_by(|a, b| a.partial_cmp(b).unwrap());
187
188 let below = proj_ori.iter().filter(|&&x| x < proj_i).count();
189 let above = proj_ori.iter().filter(|&&x| x > proj_i).count();
190 let depth = (below.min(above) as f64 + 1.0) / (nori as f64 + 1.0);
191
192 total_depth += depth;
193 }
194
195 total_depth / nproj as f64
196 })
197 .collect()
198}
199
200pub fn random_projection_2d(
202 data_obj: &[f64],
203 data_ori: &[f64],
204 nobj: usize,
205 nori: usize,
206 n_points: usize,
207 nproj: usize,
208) -> Vec<f64> {
209 random_projection_1d(data_obj, data_ori, nobj, nori, n_points, nproj)
210}
211
212pub fn random_tukey_1d(
216 data_obj: &[f64],
217 data_ori: &[f64],
218 nobj: usize,
219 nori: usize,
220 n_points: usize,
221 nproj: usize,
222) -> Vec<f64> {
223 if nobj == 0 || nori == 0 || n_points == 0 || nproj == 0 {
224 return Vec::new();
225 }
226
227 let mut rng = rand::thread_rng();
228 let projections: Vec<Vec<f64>> = (0..nproj)
229 .map(|_| {
230 let mut proj: Vec<f64> = (0..n_points).map(|_| rng.sample(StandardNormal)).collect();
231 let norm: f64 = proj.iter().map(|x| x * x).sum::<f64>().sqrt();
232 proj.iter_mut().for_each(|x| *x /= norm);
233 proj
234 })
235 .collect();
236
237 iter_maybe_parallel!(0..nobj)
238 .map(|i| {
239 let mut min_depth = f64::INFINITY;
240
241 for proj in &projections {
242 let mut proj_i = 0.0;
243 for t in 0..n_points {
244 proj_i += data_obj[i + t * nobj] * proj[t];
245 }
246
247 let proj_ori: Vec<f64> = (0..nori)
248 .map(|j| {
249 let mut p = 0.0;
250 for t in 0..n_points {
251 p += data_ori[j + t * nori] * proj[t];
252 }
253 p
254 })
255 .collect();
256
257 let below = proj_ori.iter().filter(|&&x| x < proj_i).count();
258 let above = proj_ori.iter().filter(|&&x| x > proj_i).count();
259 let depth = (below.min(above) as f64 + 1.0) / (nori as f64 + 1.0);
260
261 min_depth = min_depth.min(depth);
262 }
263
264 min_depth
265 })
266 .collect()
267}
268
269pub fn random_tukey_2d(
271 data_obj: &[f64],
272 data_ori: &[f64],
273 nobj: usize,
274 nori: usize,
275 n_points: usize,
276 nproj: usize,
277) -> Vec<f64> {
278 random_tukey_1d(data_obj, data_ori, nobj, nori, n_points, nproj)
279}
280
281pub fn functional_spatial_1d(
283 data_obj: &[f64],
284 data_ori: &[f64],
285 nobj: usize,
286 nori: usize,
287 n_points: usize,
288) -> Vec<f64> {
289 if nobj == 0 || nori == 0 || n_points == 0 {
290 return Vec::new();
291 }
292
293 iter_maybe_parallel!(0..nobj)
294 .map(|i| {
295 let mut sum_unit = vec![0.0; n_points];
296
297 for j in 0..nori {
298 let mut direction = vec![0.0; n_points];
299 let mut norm_sq = 0.0;
300
301 for t in 0..n_points {
302 direction[t] = data_ori[j + t * nori] - data_obj[i + t * nobj];
303 norm_sq += direction[t] * direction[t];
304 }
305
306 let norm = norm_sq.sqrt();
307 if norm > 1e-10 {
308 for t in 0..n_points {
309 sum_unit[t] += direction[t] / norm;
310 }
311 }
312 }
313
314 let mut avg_norm_sq = 0.0;
315 for t in 0..n_points {
316 let avg = sum_unit[t] / nori as f64;
317 avg_norm_sq += avg * avg;
318 }
319
320 1.0 - avg_norm_sq.sqrt()
321 })
322 .collect()
323}
324
325pub fn functional_spatial_2d(
327 data_obj: &[f64],
328 data_ori: &[f64],
329 nobj: usize,
330 nori: usize,
331 n_points: usize,
332) -> Vec<f64> {
333 functional_spatial_1d(data_obj, data_ori, nobj, nori, n_points)
334}
335
336pub fn kernel_functional_spatial_1d(
340 data_obj: &[f64],
341 data_ori: &[f64],
342 nobj: usize,
343 nori: usize,
344 n_points: usize,
345 argvals: &[f64],
346 h: f64,
347) -> Vec<f64> {
348 if nobj == 0 || nori == 0 || n_points == 0 {
349 return Vec::new();
350 }
351
352 let weights = simpsons_weights(argvals);
353 let h_sq = h * h;
354
355 let norm_sq_integrated = |data1: &[f64],
357 row1: usize,
358 nrow1: usize,
359 data2: &[f64],
360 row2: usize,
361 nrow2: usize|
362 -> f64 {
363 let mut sum = 0.0;
364 for t in 0..n_points {
365 let diff = data1[row1 + t * nrow1] - data2[row2 + t * nrow2];
366 sum += weights[t] * diff * diff;
367 }
368 sum
369 };
370
371 let kern = |dist_sq: f64| -> f64 { (-dist_sq / h_sq).exp() };
372
373 let m1_upper: Vec<(usize, usize, f64)> = iter_maybe_parallel!(0..nori)
375 .flat_map(|j| {
376 ((j + 1)..nori)
377 .map(|k| {
378 let mut sum = 0.0;
379 for t in 0..n_points {
380 let diff = data_ori[j + t * nori] - data_ori[k + t * nori];
381 sum += weights[t] * diff * diff;
382 }
383 let kval = (-sum / h_sq).exp();
384 (j, k, kval)
385 })
386 .collect::<Vec<_>>()
387 })
388 .collect();
389
390 let mut m1 = vec![vec![0.0; nori]; nori];
391 for j in 0..nori {
392 m1[j][j] = 1.0;
393 }
394 for (j, k, kval) in m1_upper {
395 m1[j][k] = kval;
396 m1[k][j] = kval;
397 }
398
399 let nori_f64 = nori as f64;
400
401 iter_maybe_parallel!(0..nobj)
402 .map(|i| {
403 let k02_i = 1.0;
404
405 let m2: Vec<f64> = (0..nori)
406 .map(|j| {
407 let d_sq = norm_sq_integrated(data_obj, i, nobj, data_ori, j, nori);
408 kern(d_sq)
409 })
410 .collect();
411
412 let mut total_sum = 0.0;
413 let mut valid_count = 0;
414
415 for j in 0..nori {
416 let k01_j = 1.0;
417 let denom_j_sq = k02_i + k01_j - 2.0 * m2[j];
418
419 if denom_j_sq < 1e-20 {
420 continue;
421 }
422 let denom_j = denom_j_sq.sqrt();
423
424 for k in 0..nori {
425 let k01_k = 1.0;
426 let denom_k_sq = k02_i + k01_k - 2.0 * m2[k];
427
428 if denom_k_sq < 1e-20 {
429 continue;
430 }
431 let denom_k = denom_k_sq.sqrt();
432
433 let numerator = k02_i + m1[j][k] - m2[j] - m2[k];
434 let denom = denom_j * denom_k;
435
436 if denom > 1e-20 {
437 let m_ijk = numerator / denom;
438 if m_ijk.is_finite() {
439 total_sum += m_ijk;
440 valid_count += 1;
441 }
442 }
443 }
444 }
445
446 if valid_count > 0 && total_sum >= 0.0 {
447 1.0 - total_sum.sqrt() / nori_f64
448 } else if total_sum < 0.0 {
449 1.0
450 } else {
451 0.0
452 }
453 })
454 .collect()
455}
456
457pub fn kernel_functional_spatial_2d(
459 data_obj: &[f64],
460 data_ori: &[f64],
461 nobj: usize,
462 nori: usize,
463 n_points: usize,
464 h: f64,
465) -> Vec<f64> {
466 if nobj == 0 || nori == 0 || n_points == 0 {
467 return Vec::new();
468 }
469
470 let h_sq = h * h;
471
472 let norm_sq = |data1: &[f64],
473 row1: usize,
474 nrow1: usize,
475 data2: &[f64],
476 row2: usize,
477 nrow2: usize|
478 -> f64 {
479 let mut sum = 0.0;
480 for t in 0..n_points {
481 let diff = data1[row1 + t * nrow1] - data2[row2 + t * nrow2];
482 sum += diff * diff;
483 }
484 sum
485 };
486
487 let kern = |dist_sq: f64| -> f64 { (-dist_sq / h_sq).exp() };
488
489 let m1_upper: Vec<(usize, usize, f64)> = iter_maybe_parallel!(0..nori)
491 .flat_map(|j| {
492 ((j + 1)..nori)
493 .map(|k| {
494 let d_sq = norm_sq(data_ori, j, nori, data_ori, k, nori);
495 let kval = kern(d_sq);
496 (j, k, kval)
497 })
498 .collect::<Vec<_>>()
499 })
500 .collect();
501
502 let mut m1_mat = vec![vec![0.0; nori]; nori];
503 for j in 0..nori {
504 m1_mat[j][j] = 1.0;
505 }
506 for (j, k, kval) in m1_upper {
507 m1_mat[j][k] = kval;
508 m1_mat[k][j] = kval;
509 }
510
511 let nori_f64 = nori as f64;
512
513 iter_maybe_parallel!(0..nobj)
514 .map(|i| {
515 let k02_i = 1.0;
516
517 let m2: Vec<f64> = (0..nori)
518 .map(|j| {
519 let d_sq = norm_sq(data_obj, i, nobj, data_ori, j, nori);
520 kern(d_sq)
521 })
522 .collect();
523
524 let mut total_sum = 0.0;
525 let mut valid_count = 0;
526
527 for j in 0..nori {
528 let k01_j = 1.0;
529 let denom_j_sq = k02_i + k01_j - 2.0 * m2[j];
530
531 if denom_j_sq < 1e-20 {
532 continue;
533 }
534 let denom_j = denom_j_sq.sqrt();
535
536 for k in 0..nori {
537 let k01_k = 1.0;
538 let denom_k_sq = k02_i + k01_k - 2.0 * m2[k];
539
540 if denom_k_sq < 1e-20 {
541 continue;
542 }
543 let denom_k = denom_k_sq.sqrt();
544
545 let numerator = k02_i + m1_mat[j][k] - m2[j] - m2[k];
546 let denom = denom_j * denom_k;
547
548 if denom > 1e-20 {
549 let m_ijk = numerator / denom;
550 if m_ijk.is_finite() {
551 total_sum += m_ijk;
552 valid_count += 1;
553 }
554 }
555 }
556 }
557
558 if valid_count > 0 && total_sum >= 0.0 {
559 1.0 - total_sum.sqrt() / nori_f64
560 } else if total_sum < 0.0 {
561 1.0
562 } else {
563 0.0
564 }
565 })
566 .collect()
567}
568
569pub fn band_1d(
573 data_obj: &[f64],
574 data_ori: &[f64],
575 nobj: usize,
576 nori: usize,
577 n_points: usize,
578) -> Vec<f64> {
579 if nobj == 0 || nori < 2 || n_points == 0 {
580 return Vec::new();
581 }
582
583 let n_pairs = (nori * (nori - 1)) / 2;
584
585 iter_maybe_parallel!(0..nobj)
586 .map(|i| {
587 let mut count_in_band = 0usize;
588
589 for j in 0..nori {
590 for k in (j + 1)..nori {
591 let mut inside_band = true;
592
593 for t in 0..n_points {
594 let x_t = data_obj[i + t * nobj];
595 let y_j_t = data_ori[j + t * nori];
596 let y_k_t = data_ori[k + t * nori];
597
598 let band_min = y_j_t.min(y_k_t);
599 let band_max = y_j_t.max(y_k_t);
600
601 if x_t < band_min || x_t > band_max {
602 inside_band = false;
603 break;
604 }
605 }
606
607 if inside_band {
608 count_in_band += 1;
609 }
610 }
611 }
612
613 count_in_band as f64 / n_pairs as f64
614 })
615 .collect()
616}
617
618pub fn modified_band_1d(
622 data_obj: &[f64],
623 data_ori: &[f64],
624 nobj: usize,
625 nori: usize,
626 n_points: usize,
627) -> Vec<f64> {
628 if nobj == 0 || nori < 2 || n_points == 0 {
629 return Vec::new();
630 }
631
632 let n_pairs = (nori * (nori - 1)) / 2;
633
634 iter_maybe_parallel!(0..nobj)
635 .map(|i| {
636 let mut total_proportion = 0.0;
637
638 for j in 0..nori {
639 for k in (j + 1)..nori {
640 let mut count_inside = 0usize;
641
642 for t in 0..n_points {
643 let x_t = data_obj[i + t * nobj];
644 let y_j_t = data_ori[j + t * nori];
645 let y_k_t = data_ori[k + t * nori];
646
647 let band_min = y_j_t.min(y_k_t);
648 let band_max = y_j_t.max(y_k_t);
649
650 if x_t >= band_min && x_t <= band_max {
651 count_inside += 1;
652 }
653 }
654
655 total_proportion += count_inside as f64 / n_points as f64;
656 }
657 }
658
659 total_proportion / n_pairs as f64
660 })
661 .collect()
662}
663
664pub fn modified_epigraph_index_1d(
668 data_obj: &[f64],
669 data_ori: &[f64],
670 nobj: usize,
671 nori: usize,
672 n_points: usize,
673) -> Vec<f64> {
674 if nobj == 0 || nori == 0 || n_points == 0 {
675 return Vec::new();
676 }
677
678 iter_maybe_parallel!(0..nobj)
679 .map(|i| {
680 let mut total = 0.0;
681
682 for j in 0..nori {
683 let mut count = 0.0;
684
685 for t in 0..n_points {
686 let xi = data_obj[i + t * nobj];
687 let xj = data_ori[j + t * nori];
688
689 if xi < xj {
690 count += 1.0;
691 } else if (xi - xj).abs() < 1e-12 {
692 count += 0.5;
693 }
694 }
695
696 total += count / n_points as f64;
697 }
698
699 total / nori as f64
700 })
701 .collect()
702}
703
704#[cfg(test)]
705mod tests {
706 use super::*;
707 use std::f64::consts::PI;
708
709 fn uniform_grid(n: usize) -> Vec<f64> {
710 (0..n).map(|i| i as f64 / (n - 1) as f64).collect()
711 }
712
713 fn generate_centered_data(n: usize, m: usize) -> Vec<f64> {
714 let argvals = uniform_grid(m);
715 let mut data = vec![0.0; n * m];
716 for i in 0..n {
717 let offset = (i as f64 - n as f64 / 2.0) / (n as f64);
718 for j in 0..m {
719 data[i + j * n] = (2.0 * PI * argvals[j]).sin() + offset;
720 }
721 }
722 data
723 }
724
725 #[test]
728 fn test_fraiman_muniz() {
729 let data = vec![1.0, 1.0, 2.0, 2.0]; let depths = fraiman_muniz_1d(&data, &data, 2, 2, 2, true);
732 assert_eq!(depths.len(), 2);
733 }
734
735 #[test]
736 fn test_fraiman_muniz_central_deeper() {
737 let n = 20;
738 let m = 30;
739 let data = generate_centered_data(n, m);
740 let depths = fraiman_muniz_1d(&data, &data, n, n, m, true);
741
742 let central_depth = depths[n / 2];
744 let edge_depth = depths[0];
745 assert!(
746 central_depth > edge_depth,
747 "Central curve should be deeper: {} > {}",
748 central_depth,
749 edge_depth
750 );
751 }
752
753 #[test]
754 fn test_fraiman_muniz_range() {
755 let n = 15;
756 let m = 20;
757 let data = generate_centered_data(n, m);
758 let depths = fraiman_muniz_1d(&data, &data, n, n, m, true);
759
760 for d in &depths {
761 assert!(*d >= 0.0 && *d <= 1.0, "Depth should be in [0, 1]");
762 }
763 }
764
765 #[test]
766 fn test_fraiman_muniz_invalid() {
767 assert!(fraiman_muniz_1d(&[], &[], 0, 0, 0, true).is_empty());
768 }
769
770 #[test]
773 fn test_modal_central_deeper() {
774 let n = 20;
775 let m = 30;
776 let data = generate_centered_data(n, m);
777 let depths = modal_1d(&data, &data, n, n, m, 0.5);
778
779 let central_depth = depths[n / 2];
780 let edge_depth = depths[0];
781 assert!(central_depth > edge_depth, "Central curve should be deeper");
782 }
783
784 #[test]
785 fn test_modal_positive() {
786 let n = 10;
787 let m = 20;
788 let data = generate_centered_data(n, m);
789 let depths = modal_1d(&data, &data, n, n, m, 0.5);
790
791 for d in &depths {
792 assert!(*d > 0.0, "Modal depth should be positive");
793 }
794 }
795
796 #[test]
797 fn test_modal_invalid() {
798 assert!(modal_1d(&[], &[], 0, 0, 0, 0.5).is_empty());
799 }
800
801 #[test]
804 fn test_rp_depth_range() {
805 let n = 15;
806 let m = 20;
807 let data = generate_centered_data(n, m);
808 let depths = random_projection_1d(&data, &data, n, n, m, 50);
809
810 for d in &depths {
811 assert!(*d >= 0.0 && *d <= 1.0, "RP depth should be in [0, 1]");
812 }
813 }
814
815 #[test]
816 fn test_rp_depth_invalid() {
817 assert!(random_projection_1d(&[], &[], 0, 0, 0, 10).is_empty());
818 }
819
820 #[test]
823 fn test_random_tukey_range() {
824 let n = 15;
825 let m = 20;
826 let data = generate_centered_data(n, m);
827 let depths = random_tukey_1d(&data, &data, n, n, m, 50);
828
829 for d in &depths {
830 assert!(*d >= 0.0 && *d <= 1.0, "Tukey depth should be in [0, 1]");
831 }
832 }
833
834 #[test]
835 fn test_random_tukey_invalid() {
836 assert!(random_tukey_1d(&[], &[], 0, 0, 0, 10).is_empty());
837 }
838
839 #[test]
842 fn test_functional_spatial_range() {
843 let n = 15;
844 let m = 20;
845 let data = generate_centered_data(n, m);
846 let depths = functional_spatial_1d(&data, &data, n, n, m);
847
848 for d in &depths {
849 assert!(*d >= 0.0 && *d <= 1.0, "Spatial depth should be in [0, 1]");
850 }
851 }
852
853 #[test]
854 fn test_functional_spatial_invalid() {
855 assert!(functional_spatial_1d(&[], &[], 0, 0, 0).is_empty());
856 }
857
858 #[test]
861 fn test_band_depth_central_deeper() {
862 let n = 10;
863 let m = 20;
864 let data = generate_centered_data(n, m);
865 let depths = band_1d(&data, &data, n, n, m);
866
867 let central_depth = depths[n / 2];
869 let edge_depth = depths[0];
870 assert!(
871 central_depth >= edge_depth,
872 "Central curve should have higher band depth"
873 );
874 }
875
876 #[test]
877 fn test_band_depth_range() {
878 let n = 10;
879 let m = 20;
880 let data = generate_centered_data(n, m);
881 let depths = band_1d(&data, &data, n, n, m);
882
883 for d in &depths {
884 assert!(*d >= 0.0 && *d <= 1.0, "Band depth should be in [0, 1]");
885 }
886 }
887
888 #[test]
889 fn test_band_depth_invalid() {
890 assert!(band_1d(&[], &[], 0, 0, 0).is_empty());
891 assert!(band_1d(&[1.0, 2.0], &[1.0, 2.0], 1, 1, 2).is_empty()); }
893
894 #[test]
897 fn test_modified_band_depth_range() {
898 let n = 10;
899 let m = 20;
900 let data = generate_centered_data(n, m);
901 let depths = modified_band_1d(&data, &data, n, n, m);
902
903 for d in &depths {
904 assert!(*d >= 0.0 && *d <= 1.0, "MBD should be in [0, 1]");
905 }
906 }
907
908 #[test]
909 fn test_modified_band_depth_invalid() {
910 assert!(modified_band_1d(&[], &[], 0, 0, 0).is_empty());
911 }
912
913 #[test]
916 fn test_mei_range() {
917 let n = 15;
918 let m = 20;
919 let data = generate_centered_data(n, m);
920 let mei = modified_epigraph_index_1d(&data, &data, n, n, m);
921
922 for d in &mei {
923 assert!(*d >= 0.0 && *d <= 1.0, "MEI should be in [0, 1]");
924 }
925 }
926
927 #[test]
928 fn test_mei_invalid() {
929 assert!(modified_epigraph_index_1d(&[], &[], 0, 0, 0).is_empty());
930 }
931}