1use std::{
8 cmp::{max, Ordering},
9 fs::File,
10 io::{BufRead, BufReader, Error as IoError, Write},
11 path::Path,
12 slice::Iter,
13 str,
14 time::SystemTime,
15 vec::IntoIter,
16};
17
18use base64::{engine::general_purpose::STANDARD, DecodeError, Engine};
19use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt};
20use chrono::{DateTime, SecondsFormat, Utc};
21use log::debug;
22
23use crate::{
24 nested::{
25 hash,
26 map::fits::{
27 error::FitsError,
28 read::{
29 check_keyword_and_parse_uint_val, check_keyword_and_str_val, check_keyword_and_val,
30 get_str_val_no_quote, next_36_chunks_of_80_bytes, parse_uint_val,
31 },
32 write::{
33 write_final_padding_ioerr, write_keyword_record, write_primary_hdu_ioerr,
34 write_str_keyword_record, write_uint_mandatory_keyword_record,
35 },
36 },
37 to_range,
38 },
39 nside_square_unsafe,
40};
41
42#[derive(Debug)]
45pub struct BMOCBuilderUnsafe {
46 depth_max: u8,
48 entries: Option<Vec<u64>>,
49}
50
51impl BMOCBuilderUnsafe {
52 pub fn new(depth_max: u8, capacity: usize) -> BMOCBuilderUnsafe {
53 BMOCBuilderUnsafe {
54 depth_max,
55 entries: Some(Vec::with_capacity(capacity)),
56 }
57 }
58
59 pub fn push(&mut self, depth: u8, hash: u64, is_full: bool) -> &mut BMOCBuilderUnsafe {
68 if let Some(ref mut v) = self.entries {
69 v.push(build_raw_value(depth, hash, is_full, self.depth_max));
70 } else {
72 panic!("Empty builder, you have to re-init it before re-using it!");
73 }
74 self
75 }
76
77 fn push_raw_unsafe(&mut self, raw_value: u64) -> &mut BMOCBuilderUnsafe {
78 if let Some(ref mut v) = self.entries {
80 v.push(raw_value);
81 } else {
82 panic!("Empty builder, you have to re-init it before re-using it!");
83 }
84 self
85 }
86
87 pub fn push_all(
88 &mut self,
89 depth: u8,
90 from_hash: u64,
91 to_hash: u64,
92 are_full: bool,
93 ) -> &mut BMOCBuilderUnsafe {
94 if let Some(ref mut v) = self.entries {
95 for h in from_hash..to_hash {
96 v.push(build_raw_value(depth, h, are_full, self.depth_max));
97 }
98 } else {
99 panic!("Empty builder, you have to re-init it before re-using it!");
100 }
101 self
102 }
103
104 #[allow(clippy::wrong_self_convention)]
105 pub fn to_bmoc(mut self) -> BMOC {
106 BMOC::create_unsafe(
107 self.depth_max,
108 self
109 .entries
110 .take()
111 .expect("Empty builder!")
112 .into_boxed_slice(),
113 )
114 }
115
116 #[allow(clippy::wrong_self_convention)]
119 pub fn to_bmoc_from_unordered(mut self) -> BMOC {
120 let mut res = self.entries.take().expect("Empty builder!");
121 res.sort_unstable();
122 BMOC::create_unsafe(self.depth_max, res.into_boxed_slice())
123 }
124
125 fn pack(&mut self) -> Vec<u64> {
126 let mut entries = self.entries.take().expect("Empty builder!");
127 let mut prev_to_index = 0_usize;
129 let mut curr_to_index = entries.len();
130 while prev_to_index != curr_to_index {
131 prev_to_index = curr_to_index;
133 let mut i_prev_moc = 0_usize;
134 let mut i_curr_moc = 0_usize;
135 while i_prev_moc < prev_to_index {
136 let mut curr_cell = entries[i_prev_moc];
137 i_prev_moc += 1;
138 let mut curr_cell_depth = get_depth(curr_cell, self.depth_max);
139 let mut curr_cell_hash =
140 get_hash_from_delta_depth(curr_cell, self.depth_max - curr_cell_depth);
141 while i_prev_moc < prev_to_index
143 && (curr_cell_depth == 0
144 || is_partial(curr_cell)
145 || is_not_first_cell_of_larger_cell(curr_cell_hash))
146 {
147 if i_curr_moc != i_prev_moc {
148 entries[i_curr_moc] = curr_cell;
149 i_curr_moc += 1;
150 }
151 curr_cell = entries[i_prev_moc];
152 i_prev_moc += 1;
153 curr_cell_depth = get_depth(curr_cell, self.depth_max);
154 curr_cell_hash = get_hash_from_delta_depth(curr_cell, self.depth_max - curr_cell_depth);
155 }
156 if i_prev_moc + 2 < prev_to_index
158 && entries[i_prev_moc]
159 == build_raw_value(curr_cell_depth, curr_cell_hash | 1, true, self.depth_max)
160 && entries[i_prev_moc + 1]
161 == build_raw_value(curr_cell_depth, curr_cell_hash | 2, true, self.depth_max)
162 && entries[i_prev_moc + 2]
163 == build_raw_value(curr_cell_depth, curr_cell_hash | 3, true, self.depth_max)
164 {
165 entries[i_curr_moc] = build_raw_value(
166 curr_cell_depth - 1,
167 curr_cell_hash >> 2,
168 true,
169 self.depth_max,
170 );
171 i_curr_moc += 1;
172 i_prev_moc += 3;
173 } else if i_curr_moc != i_prev_moc {
174 entries[i_curr_moc] = curr_cell;
175 i_curr_moc += 1;
176 }
177 }
178 curr_to_index = i_curr_moc;
179 }
180 entries.truncate(curr_to_index);
184 entries
185 }
186
187 fn low_depth_raw_val_at_lower_depth(&self, raw_value: u64, new_depth: u8) -> u64 {
188 debug_assert!(self.get_depth(raw_value) <= new_depth);
189 debug_assert!(new_depth <= self.depth_max);
190 let twice_delta_depth = (self.depth_max - new_depth) << 1;
191 (raw_value >> twice_delta_depth) | (raw_value & 1_u64)
192 }
193
194 fn to_lower_depth(&self, new_depth: u8, mut entries: Vec<u64>) -> Vec<u64> {
196 if new_depth >= self.depth_max {
197 panic!("The given depth must be lower than the depth max of the BMOC");
198 }
199 let mut i_new = 0_usize;
200 let mut prev_hash_at_new_depth = loop {
201 if i_new == entries.len() {
202 break None;
204 }
205 let raw_value = entries[i_new];
206 let depth = self.get_depth(raw_value);
207 if depth <= new_depth {
208 entries[i_new] = self.low_depth_raw_val_at_lower_depth(raw_value, new_depth);
209 i_new += 1;
210 } else {
211 break Some(get_hash_from_delta_depth(
212 raw_value,
213 self.depth_max - new_depth,
214 ));
215 }
216 };
217 for i in (i_new + 1)..entries.len() {
218 let raw_value = entries[i];
219 let depth = self.get_depth(raw_value);
220 if depth <= new_depth {
221 if prev_hash_at_new_depth.is_some() {
222 entries[i_new] = (prev_hash_at_new_depth.take().unwrap() << 2) | 2_u64;
223 i_new += 1;
224 }
225 entries[i_new] = self.low_depth_raw_val_at_lower_depth(raw_value, new_depth);
226 i_new += 1;
227 } else {
228 let curr_hash_at_new_depth =
229 get_hash_from_delta_depth(raw_value, self.depth_max - new_depth);
230 if let Some(prev_val_at_new_depth) = prev_hash_at_new_depth {
231 if prev_val_at_new_depth != curr_hash_at_new_depth {
232 entries[i_new] = (prev_val_at_new_depth << 2) | 2_u64; i_new += 1;
234 prev_hash_at_new_depth.replace(curr_hash_at_new_depth);
235 }
236 } else {
237 prev_hash_at_new_depth.replace(curr_hash_at_new_depth);
238 }
239 }
240 }
241 if prev_hash_at_new_depth.is_some() {
242 entries[i_new] = (prev_hash_at_new_depth.take().unwrap() << 2) | 2_u64;
243 i_new += 1;
244 }
245 entries.truncate(i_new);
246 entries
247 }
248
249 #[allow(clippy::wrong_self_convention)]
250 pub fn to_bmoc_packing(&mut self) -> BMOC {
251 let entries = self.pack();
252 BMOC::create_unsafe(self.depth_max, entries.into_boxed_slice())
253 }
254
255 #[allow(clippy::wrong_self_convention)]
256 pub fn to_lower_depth_bmoc(&mut self, new_depth: u8) -> BMOC {
257 let entries = self.entries.take().expect("Empty builder!");
258 let entries = self.to_lower_depth(new_depth, entries);
259 BMOC::create_unsafe(new_depth, entries.into_boxed_slice())
260 }
261
262 #[allow(clippy::wrong_self_convention)]
263 pub fn to_lower_depth_bmoc_packing(&mut self, new_depth: u8) -> BMOC {
264 let entries = self.pack();
265 let entries = self.to_lower_depth(new_depth, entries);
266 BMOC::create_unsafe(new_depth, entries.into_boxed_slice())
267 }
268
269 #[inline]
270 fn get_depth(&self, raw_value: u64) -> u8 {
271 self.get_depth_no_flag(rm_flag(raw_value))
272 }
273 #[inline]
274 fn get_depth_no_flag(&self, raw_value_no_flag: u64) -> u8 {
276 self.depth_max - (raw_value_no_flag.trailing_zeros() >> 1) as u8
277 }
278}
279
280pub enum Status {
281 IN,
283 OUT,
285 UNKNOWN,
287}
288
289pub struct BMOCBuilderFixedDepth {
291 depth: u8,
292 bmoc: Option<BMOC>,
293 is_full: bool,
294 buffer: Vec<u64>,
295 sorted: bool,
296}
297
298impl BMOCBuilderFixedDepth {
299 pub fn new(depth: u8, is_full: bool) -> BMOCBuilderFixedDepth {
307 BMOCBuilderFixedDepth::with_capacity(depth, is_full, 10_000_000)
308 }
309
310 pub fn with_capacity(depth: u8, is_full: bool, buff_capacity: usize) -> BMOCBuilderFixedDepth {
311 BMOCBuilderFixedDepth {
312 depth,
313 bmoc: None,
314 is_full,
315 buffer: Vec::with_capacity(buff_capacity),
316 sorted: true,
317 }
318 }
319
320 pub fn push(&mut self, hash: u64) {
322 if let Some(h) = self.buffer.last() {
323 if *h == hash {
324 return;
325 } else if self.sorted && *h > hash {
326 self.sorted = false;
327 }
328 }
329 self.buffer.push(hash);
330 if self.buffer.len() == self.buffer.capacity() {
331 self.drain_buffer();
332 }
333 }
334
335 #[allow(clippy::wrong_self_convention)]
336 pub fn to_bmoc(&mut self) -> Option<BMOC> {
337 self.drain_buffer();
339 self.bmoc.take()
341 }
342
343 fn drain_buffer(&mut self) {
344 if !self.sorted {
345 self.buffer.sort_unstable();
347 self.buffer.dedup();
348 }
349 let new_bmoc = self.buff_to_bmoc();
350 self.clear_buff();
351 self.bmoc = Some(match self.bmoc.take() {
352 Some(prev_bmoc) => prev_bmoc.or(&new_bmoc),
353 None => new_bmoc,
354 })
355 }
356
357 fn buff_to_bmoc(&mut self) -> BMOC {
358 let mut i = 0_usize;
359 let mut k = 0_usize;
360 while i < self.buffer.len() {
361 let h = self.buffer[i];
362 let sequence_len = self.largest_lower_cell_sequence_len(h, &self.buffer[i..]);
363 let delta_depth = sequence_len.next_power_of_two();
379 let delta_depth = if delta_depth > sequence_len {
380 delta_depth.trailing_zeros() >> 2 } else {
382 debug_assert_eq!(delta_depth, sequence_len);
383 delta_depth.trailing_zeros() >> 1 } as u8;
385 let twice_dd = delta_depth << 1;
386 let sequence_len = 1_usize << twice_dd;
387 self.buffer[k] = build_raw_value(
389 self.depth - delta_depth,
390 h >> twice_dd,
391 self.is_full,
392 self.depth,
393 );
394 k += 1;
395 i += sequence_len;
396 }
397 BMOC::create_unsafe_copying(self.depth, &self.buffer[0..k])
399 }
400
401 #[inline]
402 fn largest_lower_cell_sequence_len(&self, mut h: u64, entries: &[u64]) -> usize {
403 let dd = ((h.trailing_zeros() >> 1) as u8).min(self.depth); let n = 1_usize << (dd << 1); let n = n.min(entries.len());
408 for (i, e) in entries.iter().enumerate().take(n).skip(1) {
415 h += 1;
416 if *e != h {
417 return i;
418 }
419 }
420 n
421 }
422
423 fn clear_buff(&mut self) {
424 self.sorted = true;
425 self.buffer.clear();
426 }
427}
428
429#[derive(Debug, PartialEq, Eq)]
440pub struct BMOC {
441 depth_max: u8,
442 pub entries: Box<[u64]>,
443}
444
445#[derive(Debug)]
446pub struct Cell {
447 pub raw_value: u64,
448 pub depth: u8,
449 pub hash: u64,
450 pub is_full: bool,
451}
452
453impl Cell {
454 fn new(raw_value: u64, depth_max: u8) -> Cell {
455 let is_full = (raw_value & 1_u64) == 1_u64;
457 let delta_depth = ((raw_value >> 1).trailing_zeros() >> 1) as u8;
459 let hash = raw_value >> (2 + (delta_depth << 1));
461 let depth = depth_max - delta_depth;
462 Cell {
463 raw_value,
464 depth,
465 hash,
466 is_full,
467 }
468 }
469}
470
471impl BMOC {
472 pub fn new_empty(depth: u8) -> Self {
473 let builder = BMOCBuilderUnsafe::new(depth, 0);
474 builder.to_bmoc()
475 }
476
477 pub fn new_allsky(depth: u8) -> Self {
478 let mut builder = BMOCBuilderUnsafe::new(depth, 12);
479 builder.push_all(0, 0, 12, true);
480 builder.to_bmoc()
481 }
482
483 pub fn size(&self) -> usize {
484 self.entries.len()
485 }
486
487 pub fn create_unsafe(depth_max: u8, entries: Box<[u64]>) -> BMOC {
492 BMOC { depth_max, entries }
493 }
494
495 pub(super) fn create_unsafe_copying(depth_max: u8, entries: &[u64]) -> BMOC {
496 let mut entries_copy = Vec::with_capacity(entries.len());
497 for e in entries {
498 entries_copy.push(*e);
499 }
500 BMOC {
501 depth_max,
502 entries: entries_copy.into_boxed_slice(),
503 }
504 }
505
506 pub fn get_depth_max(&self) -> u8 {
507 self.depth_max
508 }
509
510 pub fn equals(&self, other: &BMOC) -> bool {
511 if self.depth_max == other.depth_max && self.entries.len() == other.entries.len() {
512 for (r1, r2) in self.iter().zip(other.iter()) {
513 if r1 != r2 {
514 return false;
515 }
516 }
517 return true;
518 }
519 false
520 }
521
522 pub fn assert_equals(&self, other: &BMOC) {
523 if self.depth_max == other.depth_max {
524 for (r1, r2) in self.iter().zip(other.iter()) {
525 if *r1 != *r2 {
526 panic!(
527 "Left: {:?}; Right: {:?}",
528 self.from_raw_value(*r1),
529 other.from_raw_value(*r2)
530 );
531 }
532 }
533 if self.entries.len() != other.entries.len() {
534 panic!("Lengths are different");
535 }
536 } else {
537 panic!("Depths are different");
538 }
539 }
540
541 pub fn test_coo(&self, lon: f64, lat: f64) -> Status {
543 let h_raw = build_raw_value(
544 self.depth_max,
545 hash(self.depth_max, lon, lat),
546 true,
547 self.depth_max,
548 );
549 match self.entries.binary_search(&h_raw) {
550 Ok(i) => {
551 if is_partial(self.entries[i]) {
552 Status::UNKNOWN
553 } else {
554 Status::IN
555 }
556 }
557 Err(i) => {
558 let cell = Cell::new(h_raw, self.depth_max);
559 if i > 0 && is_in(&self.from_raw_value(self.entries[i - 1]), &cell) {
561 if is_partial(self.entries[i - 1]) {
562 Status::UNKNOWN
563 } else {
564 Status::IN
565 }
566 } else if i < self.entries.len() && is_in(&self.from_raw_value(self.entries[i]), &cell) {
567 if is_partial(self.entries[i]) {
568 Status::UNKNOWN
569 } else {
570 Status::IN
571 }
572 } else {
573 Status::OUT
574 }
575 }
576 }
577 }
578
579 pub fn not(&self) -> BMOC {
586 let mut builder = BMOCBuilderUnsafe::new(self.depth_max, 3 * self.entries.len() + 12);
588 if self.entries.is_empty() {
590 for h in 0..12_u64 {
591 builder.push(0_u8, h, true);
592 }
593 return builder.to_bmoc();
594 }
595 let mut d = 0_u8;
597 let mut h = 0_u64;
598 let mut cell = self.from_raw_value(self.entries[0]);
600 go_down(&mut d, &mut h, cell.depth, cell.hash, true, &mut builder);
601 if !cell.is_full {
602 builder.push_raw_unsafe(cell.raw_value);
603 }
604 for i in 1..self.entries.len() {
606 cell = self.from_raw_value(self.entries[i]);
607 let dd = dd_4_go_up(d, h, cell.depth, cell.hash);
608 go_up(&mut d, &mut h, dd, true, &mut builder);
609 go_down(&mut d, &mut h, cell.depth, cell.hash, true, &mut builder);
610 if !cell.is_full {
611 builder.push_raw_unsafe(cell.raw_value);
612 }
613 }
614 let delta_depth = d;
616 go_up(&mut d, &mut h, delta_depth, true, &mut builder); for h in h..12 {
618 builder.push(0_u8, h, true);
620 }
621 builder.to_bmoc()
622 }
623
624 pub fn and(&self, other: &BMOC) -> BMOC {
649 let mut builder = BMOCBuilderUnsafe::new(
650 max(self.depth_max, other.depth_max),
651 max(self.entries.len(), other.entries.len()),
652 );
653 let mut it_left = self.into_iter();
654 let mut it_right = other.into_iter();
655 let mut left = it_left.next();
656 let mut right = it_right.next();
657 while let (Some(l), Some(r)) = (&left, &right) {
661 match l.depth.cmp(&r.depth) {
662 Ordering::Less => {
663 let hr_at_dl = r.hash >> ((r.depth - l.depth) << 1);
664 match l.hash.cmp(&hr_at_dl) {
665 Ordering::Less => left = it_left.next(),
666 Ordering::Greater => right = it_right.next(),
667 Ordering::Equal => {
668 debug_assert_eq!(l.hash, hr_at_dl);
669 builder.push(r.depth, r.hash, r.is_full && l.is_full);
670 right = it_right.next()
671 }
672 }
673 }
674 Ordering::Greater => {
675 let hl_at_dr = l.hash >> ((l.depth - r.depth) << 1);
676 match hl_at_dr.cmp(&r.hash) {
677 Ordering::Less => left = it_left.next(),
678 Ordering::Greater => right = it_right.next(),
679 Ordering::Equal => {
680 debug_assert_eq!(hl_at_dr, r.hash);
681 builder.push(l.depth, l.hash, r.is_full && l.is_full);
682 left = it_left.next()
683 }
684 }
685 }
686 Ordering::Equal => {
687 debug_assert_eq!(l.depth, r.depth);
688 match l.hash.cmp(&r.hash) {
689 Ordering::Less => left = it_left.next(),
690 Ordering::Greater => right = it_right.next(),
691 Ordering::Equal => {
692 debug_assert_eq!(l.hash, r.hash);
693 builder.push(l.depth, l.hash, r.is_full && l.is_full);
694 left = it_left.next();
695 right = it_right.next()
696 }
697 }
698 }
699 }
700 }
701 builder.to_bmoc()
702 }
703
704 pub fn or(&self, other: &BMOC) -> BMOC {
725 let mut builder = BMOCBuilderUnsafe::new(
726 max(self.depth_max, other.depth_max),
727 max(self.entries.len(), other.entries.len()),
728 );
729 let mut it_left = self.into_iter();
730 let mut it_right = other.into_iter();
731 let mut left = it_left.next();
732 let mut right = it_right.next();
733 while let (Some(l), Some(r)) = (&left, &right) {
737 match l.depth.cmp(&r.depth) {
738 Ordering::Less => {
739 let hr_at_dl = r.hash >> ((r.depth - l.depth) << 1);
740 if l.hash < hr_at_dl {
741 builder.push(l.depth, l.hash, l.is_full);
742 left = it_left.next();
743 } else if l.hash > hr_at_dl {
744 builder.push(r.depth, r.hash, r.is_full);
745 right = it_right.next();
746 } else if l.is_full {
747 debug_assert_eq!(l.hash, hr_at_dl);
748 builder.push(l.depth, l.hash, l.is_full);
749 right = consume_while_overlapped(l, &mut it_right);
750 left = it_left.next();
751 } else {
752 debug_assert_eq!(l.hash, hr_at_dl);
753 debug_assert!(!l.is_full);
754 let mut is_overlapped = false;
755 right = consume_while_overlapped_and_partial(l, &mut it_right, &mut is_overlapped);
756 if is_overlapped {
757 right = self.not_in_cell_4_or(l, right.unwrap(), &mut it_right, &mut builder);
758 } else {
759 builder.push(l.depth, l.hash, false);
761 }
762 left = it_left.next();
763 }
764 }
765 Ordering::Greater => {
766 let hl_at_dr = l.hash >> ((l.depth - r.depth) << 1);
767 if hl_at_dr < r.hash {
768 builder.push(l.depth, l.hash, l.is_full);
769 left = it_left.next();
770 } else if hl_at_dr > r.hash {
771 builder.push(r.depth, r.hash, r.is_full);
772 right = it_right.next();
773 } else if r.is_full {
774 debug_assert_eq!(hl_at_dr, r.hash);
775 builder.push(r.depth, r.hash, r.is_full);
776 left = consume_while_overlapped(r, &mut it_left);
777 right = it_right.next();
778 } else {
779 debug_assert_eq!(hl_at_dr, r.hash);
780 debug_assert!(!r.is_full);
781 let mut is_overlapped = false;
782 left = consume_while_overlapped_and_partial(r, &mut it_left, &mut is_overlapped);
783 if is_overlapped {
784 left = self.not_in_cell_4_or(r, left.unwrap(), &mut it_left, &mut builder);
785 } else {
786 builder.push(r.depth, r.hash, false);
788 }
789 right = it_right.next();
790 }
791 }
792 Ordering::Equal => {
793 debug_assert_eq!(l.depth, r.depth);
794 match l.hash.cmp(&r.hash) {
795 Ordering::Less => {
796 builder.push(l.depth, l.hash, l.is_full);
797 left = it_left.next();
798 }
799 Ordering::Greater => {
800 builder.push(r.depth, r.hash, r.is_full);
801 right = it_right.next();
802 }
803 Ordering::Equal => {
804 debug_assert_eq!(l.hash, r.hash);
805 builder.push(l.depth, l.hash, r.is_full || l.is_full);
806 left = it_left.next();
807 right = it_right.next();
808 }
809 }
810 }
811 }
812 }
813 while let Some(l) = &left {
814 debug_assert!(right.is_none());
815 builder.push(l.depth, l.hash, l.is_full);
816 left = it_left.next();
817 }
818 while let Some(r) = &right {
819 debug_assert!(left.is_none());
820 builder.push(r.depth, r.hash, r.is_full);
821 right = it_right.next();
822 }
823 builder.to_bmoc_packing()
824 }
825
826 fn not_in_cell_4_or(
827 &self,
828 low_resolution: &Cell,
829 mut c: Cell,
830 iter: &mut BMOCIter,
831 builder: &mut BMOCBuilderUnsafe,
832 ) -> Option<Cell> {
833 let mut d = low_resolution.depth;
834 let mut h = low_resolution.hash;
835 debug_assert!(c.is_full);
836 go_down(&mut d, &mut h, c.depth, c.hash, false, builder);
837 builder.push(c.depth, c.hash, true);
838 let mut is_overlapped = false;
839 let mut cell;
840 while {
841 cell = consume_while_overlapped_and_partial(low_resolution, iter, &mut is_overlapped);
842 is_overlapped
843 } {
844 c = cell.unwrap(); let dd = dd_4_go_up(d, h, c.depth, c.hash);
846 go_up(&mut d, &mut h, dd, false, builder);
847 go_down(&mut d, &mut h, c.depth, c.hash, false, builder);
848 builder.push(c.depth, c.hash, true);
849 }
850 let dd = d - low_resolution.depth;
851 go_up(&mut d, &mut h, dd, false, builder);
852 go_down(
853 &mut d,
854 &mut h,
855 low_resolution.depth,
856 low_resolution.hash + 1,
857 false,
858 builder,
859 );
860 cell
861 }
862
863 pub fn xor(&self, other: &BMOC) -> BMOC {
871 let mut builder = BMOCBuilderUnsafe::new(
872 max(self.depth_max, other.depth_max),
873 max(self.entries.len(), other.entries.len()),
874 );
875 let mut it_left = self.into_iter();
876 let mut it_right = other.into_iter();
877 let mut left = it_left.next();
878 let mut right = it_right.next();
879 while let (Some(l), Some(r)) = (&left, &right) {
883 match l.depth.cmp(&r.depth) {
884 Ordering::Less => {
885 let hr_at_dl = r.hash >> ((r.depth - l.depth) << 1);
886 if l.hash < hr_at_dl {
887 builder.push(l.depth, l.hash, l.is_full);
888 left = it_left.next();
889 } else if l.hash > hr_at_dl {
890 builder.push(r.depth, r.hash, r.is_full);
891 right = it_right.next();
892 } else if l.is_full {
893 debug_assert_eq!(l.hash, hr_at_dl);
894 right = self.not_in_cell_4_xor(l, r, &mut it_right, &mut builder);
895 left = it_left.next();
896 } else {
897 debug_assert_eq!(l.hash, hr_at_dl);
898 debug_assert!(!l.is_full);
899 builder.push(l.depth, l.hash, l.is_full);
900 right = consume_while_overlapped(l, &mut it_right);
901 left = it_left.next();
902 }
903 }
904 Ordering::Greater => {
905 let hl_at_dr = l.hash >> ((l.depth - r.depth) << 1);
906 if hl_at_dr < r.hash {
907 builder.push(l.depth, l.hash, l.is_full);
908 left = it_left.next();
909 } else if hl_at_dr > r.hash {
910 builder.push(r.depth, r.hash, r.is_full);
911 right = it_right.next();
912 } else if r.is_full {
913 debug_assert_eq!(hl_at_dr, r.hash);
914 left = self.not_in_cell_4_xor(r, l, &mut it_left, &mut builder);
915 right = it_right.next();
916 } else {
917 debug_assert_eq!(hl_at_dr, r.hash);
918 debug_assert!(!r.is_full);
919 builder.push(r.depth, r.hash, r.is_full);
920 left = consume_while_overlapped(r, &mut it_left);
921 right = it_right.next();
922 }
923 }
924 Ordering::Equal => {
925 debug_assert_eq!(l.depth, r.depth);
926 match l.hash.cmp(&r.hash) {
927 Ordering::Less => {
928 builder.push(l.depth, l.hash, l.is_full);
929 left = it_left.next();
930 }
931 Ordering::Greater => {
932 builder.push(r.depth, r.hash, r.is_full);
933 right = it_right.next();
934 }
935 Ordering::Equal => {
936 debug_assert_eq!(l.hash, r.hash);
937 let both_fully_covered = r.is_full && l.is_full;
938 if !both_fully_covered {
939 builder.push(l.depth, l.hash, both_fully_covered);
940 }
941 left = it_left.next();
942 right = it_right.next();
943 }
944 }
945 }
946 }
947 }
948 while let Some(l) = &left {
949 debug_assert!(right.is_none());
950 builder.push(l.depth, l.hash, l.is_full);
951 left = it_left.next();
952 }
953 while let Some(r) = &right {
954 debug_assert!(left.is_none());
955 builder.push(r.depth, r.hash, r.is_full);
956 right = it_right.next();
957 }
958 builder.to_bmoc_packing()
959 }
960
961 fn not_in_cell_4_xor(
963 &self,
964 low_resolution: &Cell,
965 c: &Cell,
966 iter: &mut BMOCIter,
967 builder: &mut BMOCBuilderUnsafe,
968 ) -> Option<Cell> {
969 let mut d = low_resolution.depth;
970 let mut h = low_resolution.hash;
971 go_down(&mut d, &mut h, c.depth, c.hash, true, builder);
972 if !c.is_full {
973 builder.push(c.depth, c.hash, false);
974 }
975 let mut cell = iter.next();
976 while let Some(c) = &cell {
977 if !is_in(low_resolution, c) {
978 break;
979 }
980 let dd = dd_4_go_up(d, h, c.depth, c.hash);
981 go_up(&mut d, &mut h, dd, true, builder);
982 go_down(&mut d, &mut h, c.depth, c.hash, true, builder);
983 if !c.is_full {
984 builder.push(c.depth, c.hash, false);
985 }
986 cell = iter.next()
987 }
988 let dd = d - low_resolution.depth;
989 go_up(&mut d, &mut h, dd, true, builder);
990 go_down(
991 &mut d,
992 &mut h,
993 low_resolution.depth,
994 low_resolution.hash + 1,
995 true,
996 builder,
997 );
998 cell
999 }
1000
1001 pub fn minus(&self, other: &BMOC) -> BMOC {
1011 let mut builder = BMOCBuilderUnsafe::new(
1012 max(self.depth_max, other.depth_max),
1013 max(self.entries.len(), other.entries.len()),
1014 );
1015 let mut it_left = self.into_iter();
1016 let mut it_right = other.into_iter();
1017 let mut left = it_left.next();
1018 let mut right = it_right.next();
1019 while let (Some(l), Some(r)) = (&left, &right) {
1023 match l.depth.cmp(&r.depth) {
1024 Ordering::Less => {
1025 let hr_at_dl = r.hash >> ((r.depth - l.depth) << 1);
1028 if l.hash < hr_at_dl {
1029 builder.push(l.depth, l.hash, l.is_full);
1030 left = it_left.next();
1031 } else if l.hash > hr_at_dl {
1032 right = it_right.next();
1033 } else if l.is_full {
1034 debug_assert_eq!(l.hash, hr_at_dl);
1035 right = self.not_in_cell_4_xor(l, r, &mut it_right, &mut builder);
1037 left = it_left.next();
1038 } else {
1039 debug_assert_eq!(l.hash, hr_at_dl);
1040 debug_assert!(!l.is_full);
1041 builder.push(l.depth, l.hash, false);
1042 right = consume_while_overlapped(l, &mut it_right);
1043 left = it_left.next();
1044 }
1045 }
1046 Ordering::Greater => {
1047 let hl_at_dr = l.hash >> ((l.depth - r.depth) << 1);
1050 if hl_at_dr < r.hash {
1051 builder.push(l.depth, l.hash, l.is_full);
1052 left = it_left.next();
1053 } else if hl_at_dr > r.hash {
1054 right = it_right.next();
1056 } else if !r.is_full || !l.is_full {
1057 debug_assert_eq!(hl_at_dr, r.hash);
1058 builder.push(l.depth, l.hash, false);
1059 left = it_left.next();
1060 }
1061 }
1062 Ordering::Equal => {
1063 debug_assert_eq!(l.depth, r.depth);
1064 match l.hash.cmp(&r.hash) {
1065 Ordering::Less => {
1066 builder.push(l.depth, l.hash, l.is_full);
1067 left = it_left.next();
1068 }
1069 Ordering::Greater => {
1070 right = it_right.next();
1071 }
1072 Ordering::Equal => {
1073 debug_assert_eq!(l.hash, r.hash);
1074 let both_fully_covered = r.is_full && l.is_full;
1075 if !both_fully_covered {
1076 builder.push(l.depth, l.hash, both_fully_covered);
1077 }
1078 left = it_left.next();
1079 right = it_right.next();
1080 }
1081 }
1082 }
1083 }
1084 }
1085 while let Some(l) = &left {
1086 debug_assert!(right.is_none());
1087 builder.push(l.depth, l.hash, l.is_full);
1088 left = it_left.next();
1089 }
1090 builder.to_bmoc_packing()
1091 }
1092
1093 pub fn from_raw_value(&self, raw_value: u64) -> Cell {
1094 Cell::new(raw_value, self.depth_max)
1095 }
1096
1097 pub fn deep_size(&self) -> usize {
1100 let mut sum = 0_usize;
1101 for &raw_value in self.entries.iter() {
1102 let depth = self.get_depth(raw_value);
1103 sum += nside_square_unsafe(self.depth_max - depth) as usize;
1104 }
1105 sum
1106 }
1107
1108 pub fn iter(&self) -> Iter<'_, u64> {
1116 self.entries.iter()
1117 }
1118
1119 pub fn flat_iter(&self) -> BMOCFlatIter<'_> {
1122 BMOCFlatIter::new(self.depth_max, self.deep_size(), self.entries.iter())
1123 }
1124
1125 pub fn into_flat_iter(self) -> BMOCIntoFlatIter {
1128 BMOCIntoFlatIter::new(
1129 self.depth_max,
1130 self.deep_size(),
1131 self.entries.into_vec().into_iter(),
1132 )
1133 }
1134
1135 pub fn flat_iter_cell(&self) -> BMOCFlatIterCell<'_> {
1140 BMOCFlatIterCell::new(self.depth_max, self.deep_size(), self.entries.iter())
1141 }
1142
1143 pub fn to_flat_array(&self) -> Box<[u64]> {
1147 let mut res: Vec<u64> = Vec::with_capacity(self.deep_size());
1148 for cell in self.flat_iter() {
1149 res.push(cell);
1150 }
1151 res.into_boxed_slice()
1152 }
1153
1154 fn get_depth(&self, raw_value: u64) -> u8 {
1155 self.get_depth_no_flag(rm_flag(raw_value))
1156 }
1157
1158 fn get_depth_no_flag(&self, raw_value_no_flag: u64) -> u8 {
1160 self.depth_max - (raw_value_no_flag.trailing_zeros() >> 1) as u8
1161 }
1162
1163 fn get_depth_icell(&self, raw_value: u64) -> (u8, u64) {
1164 let delta_depth = ((raw_value >> 1).trailing_zeros() >> 1) as u8;
1166 let hash = raw_value >> (2 + (delta_depth << 1));
1168 let depth = self.depth_max - delta_depth;
1169 (depth, hash)
1170 }
1171
1172 pub fn to_ranges(&self) -> Box<[std::ops::Range<u64>]> {
1176 let mut ranges: Vec<std::ops::Range<u64>> = Vec::with_capacity(self.entries.len());
1177 let mut prev_min = 0_u64;
1178 let mut prev_max = 0_u64;
1179 for cell in self.into_iter() {
1180 if cell.depth < self.depth_max {
1181 let range = to_range(cell.hash, self.depth_max - cell.depth);
1182 if range.start != prev_max {
1183 if prev_min != prev_max {
1184 ranges.push(prev_min..prev_max);
1186 }
1187 prev_min = range.start;
1188 }
1189 prev_max = range.end;
1190 } else if cell.hash == prev_max {
1191 prev_max += 1;
1192 } else {
1193 if prev_min != prev_max {
1194 ranges.push(prev_min..prev_max);
1196 }
1197 prev_min = cell.hash;
1198 prev_max = cell.hash + 1;
1199 }
1200 }
1201 if prev_min != prev_max {
1202 ranges.push(prev_min..prev_max);
1204 }
1205 ranges.into_boxed_slice()
1206 }
1207
1208 pub fn to_flagged_ranges(&self) -> Vec<(std::ops::Range<u64>, bool)> {
1211 let mut ranges: Vec<(std::ops::Range<u64>, bool)> = Vec::with_capacity(self.entries.len());
1212 let mut prev_min = 0_u64;
1213 let mut prev_max = 0_u64;
1214 let mut prev_flag = self
1215 .entries
1216 .first()
1217 .map(|v| (v & 1_u64) == 1_u64)
1218 .unwrap_or(false);
1219 for cell in self.into_iter() {
1220 if cell.depth < self.depth_max {
1221 let range = to_range(cell.hash, self.depth_max - cell.depth);
1222 if range.start == prev_max && (prev_max == 0 || cell.is_full == prev_flag) {
1223 prev_max = range.end;
1224 } else {
1225 if prev_min != prev_max {
1226 ranges.push((prev_min..prev_max, prev_flag));
1228 }
1229 prev_min = range.start;
1230 prev_max = range.end;
1231 prev_flag = cell.is_full;
1232 }
1233 } else if cell.hash == prev_max && cell.is_full == prev_flag {
1234 prev_max += 1;
1235 } else {
1236 if prev_min != prev_max {
1237 ranges.push((prev_min..prev_max, prev_flag));
1239 }
1240 prev_min = cell.hash;
1241 prev_max = cell.hash + 1;
1242 prev_flag = cell.is_full;
1243 }
1244 }
1245 if prev_min != prev_max {
1246 ranges.push((prev_min..prev_max, prev_flag));
1248 }
1249 ranges.shrink_to_fit();
1250 ranges
1251 }
1252
1253 #[allow(clippy::many_single_char_names)]
1269 pub fn compress_lossy(&self) -> CompressedMOC {
1270 let n = self.entries.len();
1271 let dm = self.depth_max;
1272 let mut b = CompressedMOCBuilder::new(dm, 4 + 3 * n);
1273 if n == 0 {
1274 if dm == 0 {
1276 for _ in 0..12 {
1277 b.push_leaf_empty();
1278 }
1279 } else {
1280 for _ in 0..12 {
1281 b.push_node_empty();
1282 }
1283 }
1284 return b.to_compressed_moc();
1285 } else if dm == 0 {
1286 let (curr_d, _) = self.get_depth_icell(self.entries[0]);
1288 assert_eq!(curr_d, 0);
1289 let mut h = 0_u64;
1290 for (_, curr_h) in self.entries.iter().map(|e| self.get_depth_icell(*e)) {
1291 for _ in h..curr_h {
1292 b.push_leaf_empty();
1293 }
1294 b.push_leaf_full();
1295 h = curr_h + 1;
1296 }
1297 for _ in h..12 {
1298 b.push_leaf_empty();
1299 }
1300 return b.to_compressed_moc();
1301 }
1302 let mut d;
1304 let mut h = 0;
1305 let (curr_d, curr_h) = self.get_depth_icell(self.entries[0]);
1306 for dd in (0..=curr_d).rev() {
1308 let target_h = curr_h >> (dd << 1);
1309 if curr_d == dm && dd == 0 {
1310 for _ in h..target_h {
1311 b.push_leaf_empty();
1312 }
1313 b.push_leaf_full();
1314 } else {
1315 for _ in h..target_h {
1316 b.push_node_empty();
1317 }
1318 if dd == 0 {
1319 b.push_node_full()
1320 } else {
1321 b.push_node_partial()
1322 };
1323 }
1324 h = target_h << 2;
1325 }
1326 d = curr_d;
1327 h = curr_h;
1328 let mut i = 1_usize;
1330 while i < n {
1331 let (curr_d, curr_h) = self.get_depth_icell(self.entries[i]);
1332 let target_h = if d > curr_d {
1334 let dd = d - curr_d;
1336 curr_h << (dd << 1)
1337 } else {
1338 let dd = curr_d - d;
1340 curr_h >> (dd << 1)
1341 };
1342 let mut dd = ((63 - (h ^ target_h).leading_zeros()) >> 1) as u8;
1343 if dd > d {
1344 dd = d;
1345 }
1346 if dd > 0 && d == dm {
1348 for _ in h & 3..3 {
1349 b.push_leaf_empty();
1351 }
1352 h >>= 2;
1353 dd -= 1;
1354 d -= 1;
1355 }
1356 for _ in 0..dd {
1357 for _ in h & 3..3 {
1358 b.push_node_empty();
1360 }
1361 h >>= 2;
1362 }
1363 d -= dd;
1364 h += 1;
1365 let dd = curr_d - d;
1367 for rdd in (0..=dd).rev() {
1368 let target_h = curr_h >> (rdd << 1);
1369 if curr_d == dm && rdd == 0 {
1370 for _ in h..target_h {
1371 b.push_leaf_empty();
1372 }
1373 b.push_leaf_full();
1374 } else {
1375 for _ in h..target_h {
1376 b.push_node_empty();
1377 }
1378 if rdd == 0 {
1379 b.push_node_full()
1380 } else {
1381 b.push_node_partial()
1382 };
1383 }
1384 h = target_h << 2;
1385 }
1386 d = curr_d;
1387 h = curr_h;
1388 i += 1;
1389 }
1390 if d == dm {
1392 for _ in h & 3..3 {
1393 b.push_leaf_empty();
1394 }
1395 h >>= 2;
1396 d -= 1;
1397 }
1398 for _ in 0..d {
1399 for _ in h & 3..3 {
1400 b.push_node_empty();
1401 }
1402 h >>= 2;
1403 }
1404 if dm == 0 {
1406 for _ in h + 1..12 {
1407 b.push_leaf_empty();
1408 }
1409 } else {
1410 for _ in h + 1..12 {
1411 b.push_node_empty();
1412 }
1413 }
1414 b.to_compressed_moc()
1415 }
1416
1417 pub fn to_fits<W: Write>(&self, mut writer: W) -> Result<(), IoError> {
1420 self
1421 .write_fits_header(&mut writer)
1422 .and_then(|()| self.write_fits_data(&mut writer))
1423 .and_then(|n_bytes_written| write_final_padding_ioerr(writer, n_bytes_written))
1424 }
1425
1426 fn write_fits_header<W: Write>(&self, mut writer: W) -> Result<(), IoError> {
1427 let mut header_block = [b' '; 2880];
1429 let mut it = header_block.chunks_mut(80);
1430 it.next().unwrap()[0..30].copy_from_slice(b"SIMPLE = T"); it.next().unwrap()[0..30].copy_from_slice(b"BITPIX = 8"); it.next().unwrap()[0..30].copy_from_slice(b"NAXIS = 2"); if self.depth_max <= 13 {
1434 it.next().unwrap()[0..30].copy_from_slice(b"NAXIS1 = 4");
1436 } else {
1437 it.next().unwrap()[0..30].copy_from_slice(b"NAXIS1 = 8");
1439 }
1440 write_uint_mandatory_keyword_record(it.next().unwrap(), b"NAXIS2 ", self.entries.len() as u64); it.next().unwrap()[0..30].copy_from_slice(b"EXTEND = F"); it.next().unwrap()[0..16].copy_from_slice(b"PRODTYPE= 'BMOC'"); write_uint_mandatory_keyword_record(it.next().unwrap(), b"HPXORDER", self.depth_max as u64);
1444 write_str_keyword_record(it.next().unwrap(), b"VAL_NAME", "FLAGGED_ZUNIQ");
1445 it.next().unwrap()[0..20].copy_from_slice(b"DTENDIAN= 'LITTLE '");
1446 write_keyword_record(
1447 it.next().unwrap(),
1448 b"DATE ",
1449 format!(
1450 "'{}'",
1451 Utc::now().to_rfc3339_opts(SecondsFormat::Secs, true)
1452 )
1453 .as_str(),
1454 );
1455 write_keyword_record(
1456 it.next().unwrap(),
1457 b"CREATOR ",
1458 format!(
1459 "'Rust crate {} {}'",
1460 env!("CARGO_PKG_NAME"),
1461 env!("CARGO_PKG_VERSION")
1462 )
1463 .as_str(),
1464 );
1465 it.next().unwrap()[0..3].copy_from_slice(b"END");
1466 writer.write_all(&header_block[..])
1468 }
1469 fn write_fits_data<W: Write>(&self, mut writer: W) -> Result<usize, IoError> {
1471 let elem_byte_size = if self.depth_max <= 13 {
1472 for fzuniq in &self.entries {
1473 writer.write_all(((*fzuniq) as u32).to_le_bytes().as_ref())?;
1474 }
1475 size_of::<u32>()
1476 } else {
1477 for fzuniq in &self.entries {
1478 writer.write_all(fzuniq.to_le_bytes().as_ref())?;
1479 }
1480 size_of::<u64>()
1481 };
1482 Ok(self.entries.len() * elem_byte_size)
1483 }
1484
1485 pub fn from_fits_file<P: AsRef<Path>>(path: P) -> Result<Self, FitsError> {
1486 File::open(path)
1487 .map_err(FitsError::Io)
1488 .and_then(|file| Self::from_fits(BufReader::new(file)))
1489 }
1490
1491 pub fn from_fits<R: BufRead>(mut reader: R) -> Result<Self, FitsError> {
1492 let mut raw_header = [b' '; 2880];
1493 let mut raw_cards_it = next_36_chunks_of_80_bytes(&mut reader, &mut raw_header)?;
1495 let (zuniq_n_bytes, n_rows) =
1497 check_keyword_and_val(raw_cards_it.next().unwrap(), b"SIMPLE ", b"T")
1498 .and_then(|()| check_keyword_and_val(raw_cards_it.next().unwrap(), b"BITPIX ", b"8"))
1499 .and_then(|()| check_keyword_and_val(raw_cards_it.next().unwrap(), b"NAXIS ", b"2"))
1500 .and_then(|()| {
1501 check_keyword_and_parse_uint_val::<u64>(raw_cards_it.next().unwrap(), b"NAXIS1 ")
1502 })
1503 .and_then(|n_bytes| {
1504 check_keyword_and_parse_uint_val::<u64>(raw_cards_it.next().unwrap(), b"NAXIS2 ")
1505 .map(move |n_rows| (n_bytes, n_rows))
1506 })?;
1507 let mut end_found = false;
1509 let mut prodtype_found = false;
1510 let mut dtendian_found = false;
1511 let mut hpxoder: Option<u8> = None;
1512 let mut date: Option<SystemTime> = None;
1513 for kw_record in &mut raw_cards_it {
1514 match &kw_record[0..8] {
1515 b"EXTEND " => check_keyword_and_val(kw_record, b"EXTEND ", b"F"),
1516 b"PRODTYPE" => {
1517 check_keyword_and_str_val(kw_record, b"PRODTYPE", b"BMOC").map(|()| prodtype_found = true)
1518 }
1519 b"HPXORDER" => parse_uint_val::<u8>(kw_record).map(|v| hpxoder = Some(v)),
1520 b"DTENDIAN" => check_keyword_and_str_val(kw_record, b"DTENDIAN", b"LITTLE")
1521 .map(|()| dtendian_found = true),
1522 b"DATE " => get_str_val_no_quote(kw_record).map(|v| {
1523 date = unsafe { str::from_utf8_unchecked(v) }
1524 .parse::<DateTime<Utc>>()
1525 .ok()
1526 .map(|dt| dt.into())
1527 }),
1528 b"CREATOR " => continue,
1529 b"END " => {
1530 end_found = true;
1531 break;
1532 }
1533 _ => {
1534 debug!("Ignored FITS card: {}", unsafe {
1535 str::from_utf8_unchecked(kw_record)
1536 });
1537 continue;
1538 }
1539 }?;
1540 }
1541 if !end_found {
1543 return Err(FitsError::new_custom(String::from(
1544 "'END' keyword not found in the first 36 primary header cards.",
1545 )));
1546 }
1547 if !(prodtype_found & dtendian_found) {
1548 return Err(FitsError::new_custom(String::from(
1549 "One of the BMOC mandatory cards is missing in the FITS header!",
1550 )));
1551 }
1552 let depth = hpxoder.ok_or_else(|| {
1553 FitsError::new_custom(String::from(
1554 "'HPXORDER' keyword not found in the primary header cards.",
1555 ))
1556 })?;
1557 match zuniq_n_bytes {
1558 4 => (0..n_rows)
1559 .into_iter()
1560 .map(|_| reader.read_u32::<LittleEndian>().map(|v| v as u64))
1561 .collect::<Result<Vec<u64>, IoError>>()
1562 .map_err(FitsError::Io),
1563 8 => {
1564 (0..n_rows)
1567 .into_iter()
1568 .map(|_| reader.read_u64::<LittleEndian>())
1569 .collect::<Result<Vec<u64>, IoError>>()
1570 .map_err(FitsError::Io)
1571 }
1572 _ => Err(FitsError::new_custom(format!(
1573 "Wrong 'NAXIS1'. Expected: 4 or 8. Actual: {}",
1574 zuniq_n_bytes
1575 ))),
1576 }
1577 .map(|entries| BMOC::create_unsafe(depth, entries.into_boxed_slice()))
1578 }
1579
1580 pub fn to_bintable_fits<W: Write>(&self, mut writer: W) -> Result<(), IoError> {
1588 self
1589 .write_bintable_fits_header(&mut writer)
1590 .and_then(|()| self.write_bintable_fits_data(&mut writer))
1591 .and_then(|n_bytes_written| write_final_padding_ioerr(writer, n_bytes_written))
1592 }
1593 fn write_bintable_fits_header<W: Write>(&self, mut writer: W) -> Result<(), IoError> {
1594 write_primary_hdu_ioerr(&mut writer)?;
1595 let mut header_block = [b' '; 2880];
1596 let mut it = header_block.chunks_mut(80);
1597 it.next().unwrap()[0..20].copy_from_slice(b"XTENSION= 'BINTABLE'");
1599 it.next().unwrap()[0..30].copy_from_slice(b"BITPIX = 8");
1600 it.next().unwrap()[0..30].copy_from_slice(b"NAXIS = 2");
1601 it.next().unwrap()[0..30].copy_from_slice(b"NAXIS1 = 9"); write_uint_mandatory_keyword_record(it.next().unwrap(), b"NAXIS2 ", self.entries.len() as u64);
1603 it.next().unwrap()[0..30].copy_from_slice(b"PCOUNT = 0");
1604 it.next().unwrap()[0..30].copy_from_slice(b"GCOUNT = 1");
1605 it.next().unwrap()[0..30].copy_from_slice(b"TFIELDS = 2");
1606 it.next().unwrap()[0..20].copy_from_slice(b"TTYPE1 = 'ZUNIQ '");
1607 it.next().unwrap()[0..20].copy_from_slice(b"TFORM1 = 'K '");
1608 it.next().unwrap()[0..25].copy_from_slice(b"TTYPE2 = 'FULLY_COVERED'");
1609 it.next().unwrap()[0..20].copy_from_slice(b"TFORM2 = 'L '");
1610 it.next().unwrap()[0..16].copy_from_slice(b"PRODTYPE= 'BMOC'");
1611 it.next().unwrap()[0..20].copy_from_slice(b"COORDSYS= 'C '");
1612 it.next().unwrap()[0..20].copy_from_slice(b"EXTNAME = 'xtension'");
1613 write_uint_mandatory_keyword_record(it.next().unwrap(), b"MAXORDER", self.depth_max as u64);
1614 write_keyword_record(
1615 it.next().unwrap(),
1616 b"DATE ",
1617 format!(
1618 "'{}'",
1619 Utc::now()
1620 .to_rfc3339_opts(SecondsFormat::Secs, true)
1621 .trim_end_matches('Z')
1622 )
1623 .as_str(),
1624 );
1625 write_keyword_record(
1626 it.next().unwrap(),
1627 b"CREATOR ",
1628 format!(
1629 "'Rust crate {} {}'",
1630 env!("CARGO_PKG_NAME"),
1631 env!("CARGO_PKG_VERSION")
1632 )
1633 .as_str(),
1634 );
1635 it.next().unwrap()[0..3].copy_from_slice(b"END");
1636 writer.write_all(&header_block[..])
1638 }
1639 fn write_bintable_fits_data<W: Write>(&self, mut writer: W) -> Result<usize, IoError> {
1641 let mut n = 0_usize;
1642 for fzuniq in self.entries.iter().cloned() {
1643 let is_full = if (fzuniq as u8 & 1_u8) == 1_u8 {
1644 b'T'
1645 } else {
1646 b'F'
1647 };
1648 let zuniq = fzuniq >> 1;
1649 writer
1650 .write_all(zuniq.to_be_bytes().as_ref())
1651 .and_then(|()| writer.write_u8(is_full))?;
1652 n += 1;
1653 }
1654 Ok(n * (size_of::<u64>() + size_of::<u8>()))
1655 }
1656
1657 pub fn to_csv<W: Write>(&self, mut writer: W) -> Result<(), IoError> {
1658 write!(
1659 &mut writer,
1660 "# BMOC order max: {}; File sorted by ZUNIQ(order, ipix).\n",
1661 self.depth_max
1662 )?;
1663 write!(&mut writer, "order,ipix,is_fully_covered_flag\n")?;
1664 for fzuniq in self.entries.iter().cloned() {
1665 let is_full = fzuniq as u8 & 1_u8;
1666 let zuniq = fzuniq >> 1; let n_trailing_zero = zuniq.trailing_zeros(); let hash = zuniq >> (1 | n_trailing_zero); let depth = self.depth_max - (n_trailing_zero as u8 >> 1);
1670 write!(&mut writer, "{},{},{}\n", depth, hash, is_full)?;
1671 }
1672 Ok(())
1673 }
1674
1675 }
1678
1679#[inline]
1680fn consume_while_overlapped(low_resolution: &Cell, iter: &mut BMOCIter) -> Option<Cell> {
1681 let mut cell = iter.next();
1682 while {
1683 match &cell {
1684 Some(c) => is_in(low_resolution, c),
1685 None => false,
1686 }
1687 } {
1688 cell = iter.next();
1689 }
1690 cell
1691}
1692
1693#[inline]
1697fn consume_while_overlapped_and_partial(
1698 low_resolution: &Cell,
1699 iter: &mut BMOCIter,
1700 res_is_overlapped: &mut bool,
1701) -> Option<Cell> {
1702 let mut cell = iter.next();
1703 while {
1704 match &cell {
1705 Some(c) => {
1706 if is_in(low_resolution, c) {
1707 if c.is_full {
1708 *res_is_overlapped = true;
1709 false
1710 } else {
1711 true
1712 }
1713 } else {
1714 false
1715 }
1716 }
1717 None => false,
1718 }
1719 } {
1720 cell = iter.next();
1721 }
1722 cell
1723 }
1739
1740#[inline]
1741fn dd_4_go_up(d: u8, h: u64, next_d: u8, next_h: u64) -> u8 {
1742 let target_h_at_d = if next_d < d {
1744 next_h << ((d - next_d) << 1)
1746 } else {
1747 next_h >> ((next_d - d) << 1)
1749 };
1750 let xor = h ^ target_h_at_d;
1758 if xor != 0 {
1759 ((63_u8 - (xor.leading_zeros() as u8)) >> 1).min(d)
1760 } else {
1761 0
1762 }
1763}
1764
1765#[inline]
1767fn is_in(low_resolution: &Cell, high_resolution: &Cell) -> bool {
1768 low_resolution.depth <= high_resolution.depth
1769 && low_resolution.hash
1770 == (high_resolution.hash >> ((high_resolution.depth - low_resolution.depth) << 1))
1771}
1772#[inline]
1779fn rm_flag(raw_value: u64) -> u64 {
1780 raw_value >> 1
1781}
1782
1783#[inline]
1784fn is_partial(raw_value: u64) -> bool {
1785 (raw_value & 1_u64) == 0_u64
1786}
1787
1788#[inline]
1789fn is_not_first_cell_of_larger_cell(hash: u64) -> bool {
1790 (hash & 3_u64) != 0_u64
1791}
1792
1793#[inline]
1794fn get_depth(raw_value: u64, depth_max: u8) -> u8 {
1795 get_depth_no_flag(rm_flag(raw_value), depth_max)
1796}
1797
1798#[inline]
1799fn get_depth_no_flag(raw_value_no_flag: u64, depth_max: u8) -> u8 {
1800 depth_max - (raw_value_no_flag.trailing_zeros() >> 1) as u8
1801}
1802
1803#[inline]
1804fn get_hash_from_delta_depth(raw_value: u64, delta_depth: u8) -> u64 {
1805 raw_value >> (2 + (delta_depth << 1))
1806}
1807
1808pub struct BMOCIntoFlatIter {
1809 depth_max: u8,
1810 deep_size: usize,
1811 raw_val_iter: IntoIter<u64>,
1812 curr_val: Option<u64>,
1813 curr_val_max: u64,
1814 n_returned: usize,
1815}
1816
1817impl BMOCIntoFlatIter {
1818 fn new(depth_max: u8, deep_size: usize, raw_val_iter: IntoIter<u64>) -> BMOCIntoFlatIter {
1819 let mut flat_iter = BMOCIntoFlatIter {
1820 depth_max,
1821 deep_size,
1822 raw_val_iter,
1823 curr_val: None,
1824 curr_val_max: 0_u64,
1825 n_returned: 0_usize,
1826 };
1827 flat_iter.next_cell();
1828 flat_iter
1829 }
1830
1831 pub fn deep_size(&self) -> usize {
1832 self.deep_size
1833 }
1834
1835 pub fn depth(&self) -> u8 {
1836 self.depth_max
1837 }
1838
1839 fn next_cell(&mut self) -> Option<u64> {
1840 match self.raw_val_iter.next() {
1841 None => self.curr_val.take(),
1842 Some(raw_value) => {
1843 let delta_depth = ((raw_value >> 1).trailing_zeros() >> 1) as u8;
1845 let twice_delta_depth = delta_depth << 1;
1846 let hash = raw_value >> (2 + twice_delta_depth);
1848 let val = hash << twice_delta_depth;
1849 self.curr_val_max = val | ((1_u64 << twice_delta_depth) - 1_u64);
1850 self.curr_val.replace(val)
1851 }
1852 }
1853 }
1854}
1855
1856impl Iterator for BMOCIntoFlatIter {
1857 type Item = u64;
1858
1859 fn next(&mut self) -> Option<u64> {
1860 if let Some(val) = self.curr_val {
1861 self.n_returned += 1;
1862 if val < self.curr_val_max {
1863 self.curr_val.replace(val + 1)
1864 } else {
1865 self.next_cell()
1866 }
1867 } else {
1868 None
1869 }
1870 }
1871
1872 fn size_hint(&self) -> (usize, Option<usize>) {
1873 let n = self.deep_size - self.n_returned;
1874 (n, Some(n))
1875 }
1876}
1877
1878pub struct BMOCFlatIter<'a> {
1879 depth_max: u8,
1880 deep_size: usize,
1881 raw_val_iter: Iter<'a, u64>,
1882 curr_val: Option<u64>,
1883 curr_val_max: u64,
1884 n_returned: usize,
1885}
1886
1887impl<'a> BMOCFlatIter<'a> {
1888 fn new(depth_max: u8, deep_size: usize, raw_val_iter: Iter<'a, u64>) -> BMOCFlatIter<'a> {
1889 let mut flat_iter = BMOCFlatIter {
1890 depth_max,
1891 deep_size,
1892 raw_val_iter,
1893 curr_val: None,
1894 curr_val_max: 0_u64,
1895 n_returned: 0_usize,
1896 };
1897 flat_iter.next_cell();
1898 flat_iter
1899 }
1900
1901 pub fn deep_size(&self) -> usize {
1902 self.deep_size
1903 }
1904
1905 pub fn depth(&self) -> u8 {
1906 self.depth_max
1907 }
1908
1909 fn next_cell(&mut self) -> Option<u64> {
1910 match self.raw_val_iter.next() {
1911 None => self.curr_val.take(),
1912 Some(&raw_value) => {
1913 let delta_depth = ((raw_value >> 1).trailing_zeros() >> 1) as u8;
1915 let twice_delta_depth = delta_depth << 1;
1916 let hash = raw_value >> (2 + twice_delta_depth);
1918 let val = hash << twice_delta_depth;
1919 self.curr_val_max = val | ((1_u64 << twice_delta_depth) - 1_u64);
1920 self.curr_val.replace(val)
1921 }
1929 }
1930 }
1931}
1932
1933impl Iterator for BMOCFlatIter<'_> {
1934 type Item = u64;
1935
1936 fn next(&mut self) -> Option<u64> {
1937 if let Some(val) = self.curr_val {
1938 self.n_returned += 1;
1939 if val < self.curr_val_max {
1940 self.curr_val.replace(val + 1)
1941 } else {
1942 self.next_cell()
1943 }
1944 } else {
1945 None
1946 }
1947 }
1948
1949 fn size_hint(&self) -> (usize, Option<usize>) {
1950 let n = self.deep_size - self.n_returned;
1951 (n, Some(n))
1952 }
1953}
1954
1955pub struct BMOCFlatIterCell<'a> {
1956 depth_max: u8,
1957 deep_size: usize,
1958 raw_val_iter: Iter<'a, u64>,
1959
1960 curr_val: Option<Cell>,
1963 curr_val_max: u64,
1964
1965 n_returned: usize,
1966}
1967
1968impl<'a> BMOCFlatIterCell<'a> {
1969 fn new(depth_max: u8, deep_size: usize, raw_val_iter: Iter<'a, u64>) -> BMOCFlatIterCell<'a> {
1970 let mut flat_iter = BMOCFlatIterCell {
1971 depth_max,
1972 deep_size,
1973 raw_val_iter,
1974 curr_val: None,
1975 curr_val_max: 0_u64,
1976 n_returned: 0_usize,
1977 };
1978 flat_iter.next_cell();
1979 flat_iter
1980 }
1981
1982 pub fn deep_size(&self) -> usize {
1983 self.deep_size
1984 }
1985
1986 pub fn depth(&self) -> u8 {
1987 self.depth_max
1988 }
1989
1990 fn next_cell(&mut self) -> Option<Cell> {
1991 match self.raw_val_iter.next() {
1992 None => self.curr_val.take(),
1993 Some(&raw_value) => {
1994 let delta_depth = ((raw_value >> 1).trailing_zeros() >> 1) as u8;
1996 let twice_delta_depth = delta_depth << 1;
1997 let hash = raw_value >> (2 + twice_delta_depth);
1999 let val = hash << twice_delta_depth;
2000 self.curr_val_max = val | ((1_u64 << twice_delta_depth) - 1_u64);
2001 self.curr_val.replace(Cell {
2002 raw_value,
2003 depth: self.depth_max,
2004 hash: val,
2005 is_full: (raw_value & 1_u64) == 1_u64,
2006 })
2007 }
2008 }
2009 }
2010}
2011
2012impl Iterator for BMOCFlatIterCell<'_> {
2013 type Item = Cell;
2014
2015 fn next(&mut self) -> Option<Cell> {
2016 if let Some(cell) = &self.curr_val {
2017 self.n_returned += 1;
2018 if cell.hash < self.curr_val_max {
2019 let new_cell = Cell {
2020 raw_value: cell.raw_value,
2021 depth: self.depth_max,
2022 hash: cell.hash + 1,
2023 is_full: cell.is_full,
2024 };
2025 self.curr_val.replace(new_cell)
2026 } else {
2027 self.next_cell()
2028 }
2029 } else {
2030 None
2031 }
2032 }
2033
2034 fn size_hint(&self) -> (usize, Option<usize>) {
2035 let n = self.deep_size - self.n_returned;
2036 (n, Some(n))
2037 }
2038}
2039
2040pub struct BMOCIter<'a> {
2041 depth_max: u8,
2042 iter: Iter<'a, u64>,
2043}
2044
2045impl Iterator for BMOCIter<'_> {
2046 type Item = Cell;
2047
2048 fn next(&mut self) -> Option<Cell> {
2049 match self.iter.next() {
2050 None => None,
2051 Some(&raw_value) => Some(Cell::new(raw_value, self.depth_max)),
2052 }
2053 }
2054
2055 fn size_hint(&self) -> (usize, Option<usize>) {
2056 self.iter.size_hint()
2057 }
2058}
2059
2060impl<'a> IntoIterator for &'a BMOC {
2061 type Item = Cell;
2062 type IntoIter = BMOCIter<'a>;
2063
2064 fn into_iter(self) -> Self::IntoIter {
2065 BMOCIter {
2066 depth_max: self.depth_max,
2067 iter: self.entries.iter(),
2068 }
2069 }
2070}
2071
2072#[inline]
2093fn build_raw_value(depth: u8, hash: u64, is_full: bool, depth_max: u8) -> u64 {
2094 let mut hash = (hash << 1) | 1_u64;
2096 hash <<= 1 + ((depth_max - depth) << 1);
2098 hash | (is_full as u64) }
2101
2102fn go_up(
2109 start_depth: &mut u8,
2110 start_hash: &mut u64,
2111 delta_depth: u8,
2112 flag: bool,
2113 builder: &mut BMOCBuilderUnsafe,
2114) {
2115 for _ in 0_u8..delta_depth {
2118 let target_hash = *start_hash | 3_u64;
2119 for h in (*start_hash + 1)..=target_hash {
2120 builder.push(*start_depth, h, flag);
2121 }
2122 *start_hash >>= 2;
2123 *start_depth -= 1;
2124 }
2125 *start_hash += 1;
2126 }
2129
2130fn go_down(
2131 start_depth: &mut u8,
2132 start_hash: &mut u64,
2133 target_depth: u8,
2134 target_hash: u64,
2135 flag: bool,
2136 builder: &mut BMOCBuilderUnsafe,
2137) {
2138 debug_assert!(target_depth >= *start_depth);
2139 let mut twice_dd = (target_depth - *start_depth) << 1;
2140 for d in *start_depth..=target_depth {
2141 let target_h_at_d = target_hash >> twice_dd;
2143 for h in *start_hash..target_h_at_d {
2144 builder.push(d, h, flag);
2145 }
2146 if d != target_depth {
2147 *start_hash = target_h_at_d << 2;
2148 twice_dd -= 2;
2149 }
2150 }
2151 *start_depth = target_depth;
2152 *start_hash = target_hash;
2153}
2154
2155pub struct CompressedMOCBuilder {
2156 moc: Vec<u8>,
2157 depth_max: u8,
2158 ibyte: usize,
2159 ibit: u8,
2160}
2161
2162impl CompressedMOCBuilder {
2163 fn new(depth_max: u8, capacity: usize) -> CompressedMOCBuilder {
2165 let mut moc = vec![0_u8; capacity + 1];
2166 moc[0] = depth_max;
2167 CompressedMOCBuilder {
2168 moc,
2169 depth_max,
2170 ibyte: 1,
2171 ibit: 0,
2172 }
2173 }
2174
2175 #[allow(clippy::wrong_self_convention)]
2176 fn to_compressed_moc(mut self) -> CompressedMOC {
2177 self.moc.resize(
2178 if self.ibit == 0 {
2179 self.ibyte
2180 } else {
2181 self.ibyte + 1
2182 },
2183 0,
2184 );
2185 CompressedMOC {
2186 moc: self.moc.into_boxed_slice(),
2187 depth_max: self.depth_max,
2188 }
2189 }
2190
2191 fn push_0(&mut self) {
2192 self.ibyte += (self.ibit == 7) as usize;
2193 self.ibit += 1;
2194 self.ibit &= 7;
2195 }
2196 fn push_1(&mut self) {
2197 self.moc[self.ibyte] |= 1_u8 << self.ibit;
2198 self.push_0();
2199 }
2200 fn push_node_empty(&mut self) {
2201 self.push_1();
2202 self.push_0();
2203 }
2204 fn push_node_full(&mut self) {
2205 self.push_1();
2206 self.push_1();
2207 }
2208 fn push_node_partial(&mut self) {
2209 self.push_0();
2210 }
2211 fn push_leaf_empty(&mut self) {
2212 self.push_0();
2213 }
2214 fn push_leaf_full(&mut self) {
2215 self.push_1();
2216 }
2217}
2218
2219pub struct CompressedMOCDecompHelper<'a> {
2220 moc: &'a [u8],
2221 ibyte: usize,
2222 ibit: u8,
2223}
2224
2225impl<'a> CompressedMOCDecompHelper<'a> {
2226 fn new(moc: &'a [u8]) -> CompressedMOCDecompHelper<'a> {
2227 CompressedMOCDecompHelper {
2228 moc,
2229 ibyte: 1,
2230 ibit: 0,
2231 }
2232 }
2233
2234 fn get(&mut self) -> bool {
2235 let r = self.moc[self.ibyte] & (1_u8 << self.ibit) != 0;
2236 self.ibyte += (self.ibit == 7) as usize;
2237 self.ibit += 1;
2238 self.ibit &= 7;
2239 r
2240 }
2241}
2242
2243pub struct CompressedMOC {
2245 moc: Box<[u8]>,
2246 depth_max: u8,
2247}
2248
2249impl CompressedMOC {
2250 pub fn depth(&self) -> u8 {
2251 self.depth_max
2252 }
2253
2254 pub fn byte_size(&self) -> usize {
2255 self.moc.len()
2256 }
2257
2258 pub fn to_b64(&self) -> String {
2259 STANDARD.encode(&self.moc)
2260 }
2261
2262 pub fn from_b64(b64_encoded: String) -> Result<CompressedMOC, DecodeError> {
2263 let decoded = STANDARD.decode(b64_encoded)?;
2264 let depth_max = decoded[0];
2265 Ok(CompressedMOC {
2266 moc: decoded.into_boxed_slice(),
2267 depth_max,
2268 })
2269 }
2270
2271 pub fn self_decompress(&self) -> BMOC {
2272 CompressedMOC::decompress(&self.moc)
2273 }
2274
2275 pub fn decompress(cmoc: &[u8]) -> BMOC {
2277 let depth_max = cmoc[0];
2278 let mut moc_builder = BMOCBuilderUnsafe::new(depth_max, 8 * (cmoc.len() - 1));
2279 let mut bits = CompressedMOCDecompHelper::new(cmoc);
2280 let mut depth = 0_u8;
2281 let mut hash = 0_u64;
2282 while depth != 0 || hash != 12 {
2283 if bits.get() {
2284 if depth == depth_max || bits.get() {
2286 moc_builder.push(depth, hash, true);
2287 }
2288 while hash & 3 == 3 && depth > 0 {
2290 hash >>= 2;
2291 depth -= 1;
2292 }
2293 hash += 1;
2295 } else {
2296 if depth == depth_max {
2298 while hash & 3 == 3 && depth > 0 {
2300 hash >>= 2;
2301 depth -= 1;
2302 }
2303 hash += 1;
2305 } else {
2306 debug_assert!(depth < depth_max);
2307 hash <<= 2;
2309 depth += 1;
2310 }
2311 }
2312 }
2313 moc_builder.to_bmoc()
2314 }
2315}
2316
2317#[cfg(test)]
2318mod tests {
2319 use super::*;
2320 use crate::nested::{cone_coverage_approx_custom, polygon_coverage};
2321
2322 fn build_compressed_moc_empty(depth: u8) -> CompressedMOC {
2323 let mut builder = BMOCBuilderFixedDepth::new(depth, true);
2324 builder.to_bmoc().unwrap().compress_lossy()
2325 }
2326
2327 fn build_compressed_moc_full(depth: u8) -> CompressedMOC {
2328 let mut builder = BMOCBuilderFixedDepth::new(depth, true);
2329 for icell in 0..12 * (1 << (depth << 1)) {
2330 builder.push(icell)
2331 }
2332 let bmoc = builder.to_bmoc().unwrap();
2333 eprintln!("Entries: {}", bmoc.entries.len());
2334 bmoc.compress_lossy()
2335 }
2336
2337 fn to_aladin_moc(bmoc: &BMOC) {
2338 print!("draw moc {}/", bmoc.get_depth_max());
2339 for cell in bmoc.flat_iter() {
2340 print!("{}, ", cell);
2341 }
2342 }
2343
2344 #[test]
2345 fn testok_compressed_moc_empty_d0() {
2346 let compressed = build_compressed_moc_empty(0);
2347 assert_eq!(compressed.byte_size(), 1 + 2);
2348 assert_eq!(compressed.moc, vec![0_u8, 0_u8, 0_u8].into_boxed_slice());
2349 let b64 = compressed.to_b64();
2350 assert_eq!(b64, "AAAA");
2351 assert_eq!(
2352 CompressedMOC::decompress(&compressed.moc)
2353 .compress_lossy()
2354 .to_b64(),
2355 b64
2356 );
2357 }
2358
2359 #[test]
2360 fn testok_compressed_moc_empty_d1() {
2361 let compressed = build_compressed_moc_empty(1);
2362 assert_eq!(compressed.byte_size(), 1 + 24 / 8);
2363 assert_eq!(
2364 compressed.moc,
2365 vec![1_u8, 85_u8, 85_u8, 85_u8].into_boxed_slice()
2366 );
2367 let b64 = compressed.to_b64();
2368 assert_eq!(b64, "AVVVVQ==");
2369 assert_eq!(
2370 CompressedMOC::decompress(&compressed.moc)
2371 .compress_lossy()
2372 .to_b64(),
2373 b64
2374 );
2375 }
2376
2377 #[test]
2378 fn testok_compressed_moc_full_d0() {
2379 let compressed = build_compressed_moc_full(0);
2380 assert_eq!(compressed.byte_size(), 1 + 2);
2381 assert_eq!(compressed.moc, vec![0_u8, 255_u8, 15_u8].into_boxed_slice());
2382 let b64 = compressed.to_b64();
2384 assert_eq!(b64, "AP8P");
2385 assert_eq!(
2386 CompressedMOC::decompress(&compressed.moc)
2387 .compress_lossy()
2388 .to_b64(),
2389 b64
2390 );
2391 }
2392
2393 #[test]
2394 fn testok_compressed_moc_full_d1() {
2395 let compressed = build_compressed_moc_full(1);
2396 assert_eq!(compressed.byte_size(), 1 + 24 / 8);
2397 eprintln!("{:?}", compressed.moc);
2398 eprintln!("{}", compressed.to_b64());
2399 let b64 = compressed.to_b64();
2400 assert_eq!(b64, "Af///w==");
2401 assert_eq!(
2402 CompressedMOC::decompress(&compressed.moc)
2403 .compress_lossy()
2404 .to_b64(),
2405 b64
2406 );
2407 }
2408
2409 #[test]
2410 fn test_ok_allsky_and_empty_bmoc() {
2411 let bmoc_allsky = BMOC::new_allsky(18);
2412 assert_eq!(bmoc_allsky.entries.len(), 12);
2413 let bmoc_empty = BMOC::new_empty(18);
2414 assert_eq!(bmoc_empty.entries.len(), 0);
2415 assert_eq!(bmoc_allsky.not(), bmoc_empty);
2416 assert_eq!(bmoc_allsky, bmoc_empty.not());
2417 }
2418
2419 #[test]
2420 fn test_ok_bmoc_not_round_trip() {
2421 let mut poly_vertices_deg: [f64; 14] = [
2422 272.511081, -19.487278, 272.515300, -19.486595, 272.517029, -19.471442, 272.511714,
2423 -19.458837, 272.506430, -19.459001, 272.496401, -19.474322, 272.504821, -19.484924,
2424 ];
2425 for v in poly_vertices_deg.iter_mut() {
2426 *v = (*v).to_radians();
2427 }
2428 let vertices: Vec<(f64, f64)> = poly_vertices_deg
2429 .iter()
2430 .copied()
2431 .step_by(2)
2432 .zip(poly_vertices_deg.iter().copied().skip(1).step_by(2))
2433 .collect();
2434 let moc_org = polygon_coverage(14, vertices.as_slice(), true);
2435 let moc_not = moc_org.not();
2436 let moc_out = moc_not.not();
2437 println!("len: {}", moc_org.size());
2438 println!("len: {}", moc_not.size());
2439 println!("len: {}", moc_out.size());
2440 assert_eq!(moc_org, moc_out);
2441 }
2442
2443 #[test]
2444 fn test_ok_bmoc_union_and_not() {
2445 let mut poly1_vertices_deg: [f64; 14] = [
2446 272.511081, -19.487278, 272.515300, -19.486595, 272.517029, -19.471442, 272.511714,
2447 -19.458837, 272.506430, -19.459001, 272.496401, -19.474322, 272.504821, -19.484924,
2448 ];
2449 for v in poly1_vertices_deg.iter_mut() {
2450 *v = (*v).to_radians();
2451 }
2452 let mut poly2_vertices_deg: [f64; 8] = [
2453 272.630446, -19.234210, 272.637274, -19.248542, 272.638942, -19.231476, 272.630868,
2454 -19.226364,
2455 ];
2456 for v in poly2_vertices_deg.iter_mut() {
2457 *v = (*v).to_radians();
2458 }
2459 let v1: Vec<(f64, f64)> = poly1_vertices_deg
2460 .iter()
2461 .copied()
2462 .step_by(2)
2463 .zip(poly1_vertices_deg.iter().copied().skip(1).step_by(2))
2464 .collect();
2465 let v2: Vec<(f64, f64)> = poly2_vertices_deg
2466 .iter()
2467 .copied()
2468 .step_by(2)
2469 .zip(poly2_vertices_deg.iter().copied().skip(1).step_by(2))
2470 .collect();
2471 let moc1 = polygon_coverage(14, v1.as_slice(), true);
2472 let moc2 = polygon_coverage(14, v2.as_slice(), true);
2473 let not_moc1 = moc1.not();
2474 let not_moc2 = moc2.not();
2475 let union = moc1.or(&moc2);
2476 let not_inter = not_moc1.and(¬_moc2);
2477 let union_bis = not_inter.not();
2478 assert_eq!(union, union_bis);
2487 }
2488
2489 #[test]
2490 fn test_ok_bmoc_xor_minus_coherency() {
2491 let mut poly1_vertices_deg: [f64; 14] = [
2493 272.511081, -19.487278, 272.515300, -19.486595, 272.517029, -19.471442, 272.511714,
2494 -19.458837, 272.506430, -19.459001, 272.496401, -19.474322, 272.504821, -19.484924,
2495 ];
2496 for v in poly1_vertices_deg.iter_mut() {
2497 *v = (*v).to_radians();
2498 }
2499 let mut poly2_vertices_deg: [f64; 8] = [
2500 272.630446, -19.234210, 272.637274, -19.248542, 272.638942, -19.231476, 272.630868,
2501 -19.226364,
2502 ];
2503 for v in poly2_vertices_deg.iter_mut() {
2504 *v = (*v).to_radians();
2505 }
2506 let mut poly3_vertices_deg: [f64; 178] = [
2507 272.536719, -19.461249, 272.542612, -19.476380, 272.537389, -19.491509, 272.540192,
2508 -19.499823, 272.535455, -19.505218, 272.528024, -19.505216, 272.523437, -19.500298,
2509 272.514082, -19.503376, 272.502271, -19.500966, 272.488647, -19.490390, 272.481932,
2510 -19.490913, 272.476737, -19.486589, 272.487633, -19.455645, 272.500386, -19.444996,
2511 272.503003, -19.437557, 272.512303, -19.432436, 272.514132, -19.423973, 272.522103,
2512 -19.421523, 272.524511, -19.413250, 272.541021, -19.400024, 272.566264, -19.397500,
2513 272.564202, -19.389111, 272.569055, -19.383210, 272.588186, -19.386539, 272.593376,
2514 -19.381832, 272.596327, -19.370541, 272.624911, -19.358915, 272.629256, -19.347842,
2515 272.642277, -19.341020, 272.651322, -19.330424, 272.653174, -19.325079, 272.648903,
2516 -19.313708, 272.639616, -19.311098, 272.638128, -19.303083, 272.632705, -19.299839,
2517 272.627971, -19.289408, 272.628226, -19.276293, 272.633750, -19.270590, 272.615109,
2518 -19.241810, 272.614704, -19.221196, 272.618224, -19.215572, 272.630809, -19.209945,
2519 272.633540, -19.198681, 272.640711, -19.195292, 272.643028, -19.186751, 272.651477,
2520 -19.182729, 272.649821, -19.174859, 272.656782, -19.169272, 272.658933, -19.161883,
2521 272.678012, -19.159481, 272.689173, -19.176982, 272.689395, -19.183512, 272.678006,
2522 -19.204016, 272.671112, -19.206598, 272.664854, -19.203523, 272.662760, -19.211156,
2523 272.654435, -19.214434, 272.652969, -19.222085, 272.656724, -19.242136, 272.650071,
2524 -19.265092, 272.652868, -19.274296, 272.660871, -19.249462, 272.670041, -19.247807,
2525 272.675533, -19.254935, 272.673291, -19.273917, 272.668710, -19.279245, 272.671460,
2526 -19.287043, 272.667507, -19.293933, 272.669261, -19.300601, 272.663969, -19.307130,
2527 272.672626, -19.308954, 272.675225, -19.316490, 272.657188, -19.349105, 272.657638,
2528 -19.367455, 272.662447, -19.372035, 272.662232, -19.378566, 272.652479, -19.386871,
2529 272.645819, -19.387933, 272.642279, -19.398277, 272.629282, -19.402739, 272.621487,
2530 -19.398197, 272.611782, -19.405716, 272.603367, -19.404667, 272.586162, -19.422703,
2531 272.561792, -19.420008, 272.555815, -19.413012, 272.546500, -19.415611, 272.537427,
2532 -19.424213, 272.533081, -19.441402,
2533 ];
2534 for v in poly3_vertices_deg.iter_mut() {
2535 *v = (*v).to_radians();
2536 }
2537 let v1: Vec<(f64, f64)> = poly1_vertices_deg
2538 .iter()
2539 .copied()
2540 .step_by(2)
2541 .zip(poly1_vertices_deg.iter().copied().skip(1).step_by(2))
2542 .collect();
2543 let v2: Vec<(f64, f64)> = poly2_vertices_deg
2544 .iter()
2545 .copied()
2546 .step_by(2)
2547 .zip(poly2_vertices_deg.iter().copied().skip(1).step_by(2))
2548 .collect();
2549 let v3: Vec<(f64, f64)> = poly3_vertices_deg
2550 .iter()
2551 .copied()
2552 .step_by(2)
2553 .zip(poly3_vertices_deg.iter().copied().skip(1).step_by(2))
2554 .collect();
2555 let moc1 = polygon_coverage(18, v1.as_slice(), true);
2556 let moc2 = polygon_coverage(18, v2.as_slice(), true);
2557 let moc3 = polygon_coverage(18, v3.as_slice(), true);
2558
2559 let union12 = moc1.or(&moc2);
2560 let res1 = moc3.xor(&union12);
2561 let res2 = moc3.minus(&union12);
2562 let res3 = moc3.and(&union12.not());
2563
2564 assert_eq!(res1, res2);
2565 assert_eq!(res1, res3);
2566 }
2567
2568 #[test]
2569 fn test_ok_bmoc_not() {
2570 let lon = 13.158329_f64.to_radians();
2571 let lat = -72.80028_f64.to_radians();
2572 let radius = 5.64323_f64.to_radians();
2573 let depth = 6;
2574 let delta_depth = 5;
2575
2576 let bmoc = cone_coverage_approx_custom(depth, delta_depth, lon, lat, radius);
2577
2578 let not_bmoc = bmoc.not();
2579
2580 println!("BMOC");
2581 for (range, flag) in bmoc.to_flagged_ranges() {
2582 println!("flag: {}; range: {:?}", flag, range);
2583 }
2584 println!("NOT BMOC");
2585 for (range, flag) in not_bmoc.to_flagged_ranges() {
2586 println!("flag: {}; range: {:?}", flag, range);
2587 }
2588 assert!(not_bmoc.to_flagged_ranges().first().unwrap().1)
2590 }
2591}