cdshealpix/nested/
bmoc.rs

1//! Definition of a BMOC, i.e. a MOC storing an additional flag telling if a cell is fully
2//! or partially covered by the MOC.
3//!
4//! So far, all BMOC logical operations (not, and, or, xor) are made from the BMOC representation.
5//! It is probably simpler and faster to work on ranges (but we have to handle the flag).
6
7use base64::{engine::general_purpose::STANDARD, DecodeError, Engine};
8
9use std::{
10  cmp::{max, Ordering},
11  slice::Iter,
12  vec::IntoIter,
13};
14
15use super::{super::nside_square_unsafe, hash, to_range};
16
17/// A very basic and simple BMOC Builder: we push elements in it assuming that we provide them
18/// in the write order, without duplicates and small cells included in larger cells.
19#[derive(Debug)]
20pub struct BMOCBuilderUnsafe {
21  // (super) // removed because of external test
22  depth_max: u8,
23  entries: Option<Vec<u64>>,
24}
25
26impl BMOCBuilderUnsafe {
27  pub fn new(depth_max: u8, capacity: usize) -> BMOCBuilderUnsafe {
28    BMOCBuilderUnsafe {
29      depth_max,
30      entries: Some(Vec::with_capacity(capacity)),
31    }
32  }
33
34  /* Commented because not used so far
35  /// Clear the content and start a fresh builder with the given initial capacity.
36  #[warn(dead_code)]
37  pub fn re_init(&mut self, capacity: usize) -> &mut BMOCBuilderUnsafe {
38    self.entries = Some(Vec::with_capacity(capacity));
39    self
40  }*/
41
42  pub fn push(&mut self, depth: u8, hash: u64, is_full: bool) -> &mut BMOCBuilderUnsafe {
43    if let Some(ref mut v) = self.entries {
44      v.push(build_raw_value(depth, hash, is_full, self.depth_max));
45    // println!("push {:?}", Cell::new(*v.last().unwrap(), self.depth_max));
46    } else {
47      panic!("Empty builder, you have to re-init it before re-using it!");
48    }
49    self
50  }
51
52  fn push_raw_unsafe(&mut self, raw_value: u64) -> &mut BMOCBuilderUnsafe {
53    // println!("push {:?}", Cell::new(raw_value, self.depth_max));
54    if let Some(ref mut v) = self.entries {
55      v.push(raw_value);
56    } else {
57      panic!("Empty builder, you have to re-init it before re-using it!");
58    }
59    self
60  }
61
62  pub fn push_all(
63    &mut self,
64    depth: u8,
65    from_hash: u64,
66    to_hash: u64,
67    are_full: bool,
68  ) -> &mut BMOCBuilderUnsafe {
69    if let Some(ref mut v) = self.entries {
70      for h in from_hash..to_hash {
71        v.push(build_raw_value(depth, h, are_full, self.depth_max));
72      }
73    } else {
74      panic!("Empty builder, you have to re-init it before re-using it!");
75    }
76    self
77  }
78
79  #[allow(clippy::wrong_self_convention)]
80  pub fn to_bmoc(mut self) -> BMOC {
81    BMOC::create_unsafe(
82      self.depth_max,
83      self
84        .entries
85        .take()
86        .expect("Empty builder!")
87        .into_boxed_slice(),
88    )
89  }
90
91  /// We consider that the pushed elements are not ordered, but they come from a valid BMOC (i.e.
92  /// no cell included in another cell)
93  #[allow(clippy::wrong_self_convention)]
94  pub fn to_bmoc_from_unordered(mut self) -> BMOC {
95    let mut res = self.entries.take().expect("Empty builder!");
96    res.sort_unstable();
97    BMOC::create_unsafe(self.depth_max, res.into_boxed_slice())
98  }
99
100  fn pack(&mut self) -> Vec<u64> {
101    let mut entries = self.entries.take().expect("Empty builder!");
102    // On-place pack
103    let mut prev_to_index = 0_usize;
104    let mut curr_to_index = entries.len();
105    while prev_to_index != curr_to_index {
106      // changes occurs
107      prev_to_index = curr_to_index;
108      let mut i_prev_moc = 0_usize;
109      let mut i_curr_moc = 0_usize;
110      while i_prev_moc < prev_to_index {
111        let mut curr_cell = entries[i_prev_moc];
112        i_prev_moc += 1;
113        let mut curr_cell_depth = get_depth(curr_cell, self.depth_max);
114        let mut curr_cell_hash =
115          get_hash_from_delta_depth(curr_cell, self.depth_max - curr_cell_depth);
116        // Look for the first cell of the larger cell (depth - 1)  (=> 2 last bits = 00), the cell must be FULL
117        while i_prev_moc < prev_to_index
118          && (curr_cell_depth == 0
119            || is_partial(curr_cell)
120            || is_not_first_cell_of_larger_cell(curr_cell_hash))
121        {
122          if i_curr_moc != i_prev_moc {
123            entries[i_curr_moc] = curr_cell;
124            i_curr_moc += 1;
125          }
126          curr_cell = entries[i_prev_moc];
127          i_prev_moc += 1;
128          curr_cell_depth = get_depth(curr_cell, self.depth_max);
129          curr_cell_hash = get_hash_from_delta_depth(curr_cell, self.depth_max - curr_cell_depth);
130        }
131        // Look at the 3 siblings
132        if i_prev_moc + 2 < prev_to_index
133          && entries[i_prev_moc]
134            == build_raw_value(curr_cell_depth, curr_cell_hash | 1, true, self.depth_max)
135          && entries[i_prev_moc + 1]
136            == build_raw_value(curr_cell_depth, curr_cell_hash | 2, true, self.depth_max)
137          && entries[i_prev_moc + 2]
138            == build_raw_value(curr_cell_depth, curr_cell_hash | 3, true, self.depth_max)
139        {
140          entries[i_curr_moc] = build_raw_value(
141            curr_cell_depth - 1,
142            curr_cell_hash >> 2,
143            true,
144            self.depth_max,
145          );
146          i_curr_moc += 1;
147          i_prev_moc += 3;
148        } else if i_curr_moc != i_prev_moc {
149          entries[i_curr_moc] = curr_cell;
150          i_curr_moc += 1;
151        }
152      }
153      curr_to_index = i_curr_moc;
154    }
155    // We may find a better algorithm doing a single pass on the input MOC
156    // Here the number of passes max = mocDepth - smallestDepthOfACellInOutputMoc
157    // YEP: new idea: do it like a buffer with a cursor on the last "unmergeable" element!!
158    entries.truncate(curr_to_index);
159    entries
160  }
161
162  fn low_depth_raw_val_at_lower_depth(&self, raw_value: u64, new_depth: u8) -> u64 {
163    debug_assert!(self.get_depth(raw_value) <= new_depth);
164    debug_assert!(new_depth <= self.depth_max);
165    let twice_delta_depth = (self.depth_max - new_depth) << 1;
166    (raw_value >> twice_delta_depth) | (raw_value & 1_u64)
167  }
168
169  // We assume the given entries form a valid BMOC (already packef, ordered, ...)
170  fn to_lower_depth(&self, new_depth: u8, mut entries: Vec<u64>) -> Vec<u64> {
171    if new_depth >= self.depth_max {
172      panic!("The given depth must be lower than the depth max of the BMOC");
173    }
174    let mut i_new = 0_usize;
175    let mut prev_hash_at_new_depth = loop {
176      if i_new == entries.len() {
177        // All cells have a depth <= new_depth
178        break None;
179      }
180      let raw_value = entries[i_new];
181      let depth = self.get_depth(raw_value);
182      if depth <= new_depth {
183        entries[i_new] = self.low_depth_raw_val_at_lower_depth(raw_value, new_depth);
184        i_new += 1;
185      } else {
186        break Some(get_hash_from_delta_depth(
187          raw_value,
188          self.depth_max - new_depth,
189        ));
190      }
191    };
192    for i in (i_new + 1)..entries.len() {
193      let raw_value = entries[i];
194      let depth = self.get_depth(raw_value);
195      if depth <= new_depth {
196        if prev_hash_at_new_depth.is_some() {
197          entries[i_new] = (prev_hash_at_new_depth.take().unwrap() << 2) | 2_u64;
198          i_new += 1;
199        }
200        entries[i_new] = self.low_depth_raw_val_at_lower_depth(raw_value, new_depth);
201        i_new += 1;
202      } else {
203        let curr_hash_at_new_depth =
204          get_hash_from_delta_depth(raw_value, self.depth_max - new_depth);
205        if let Some(prev_val_at_new_depth) = prev_hash_at_new_depth {
206          if prev_val_at_new_depth != curr_hash_at_new_depth {
207            entries[i_new] = (prev_val_at_new_depth << 2) | 2_u64; // sentinel bit + flag = 0
208            i_new += 1;
209            prev_hash_at_new_depth.replace(curr_hash_at_new_depth);
210          }
211        } else {
212          prev_hash_at_new_depth.replace(curr_hash_at_new_depth);
213        }
214      }
215    }
216    if prev_hash_at_new_depth.is_some() {
217      entries[i_new] = (prev_hash_at_new_depth.take().unwrap() << 2) | 2_u64;
218      i_new += 1;
219    }
220    entries.truncate(i_new);
221    entries
222  }
223
224  #[allow(clippy::wrong_self_convention)]
225  pub fn to_bmoc_packing(&mut self) -> BMOC {
226    let entries = self.pack();
227    BMOC::create_unsafe(self.depth_max, entries.into_boxed_slice())
228  }
229
230  #[allow(clippy::wrong_self_convention)]
231  pub fn to_lower_depth_bmoc(&mut self, new_depth: u8) -> BMOC {
232    let entries = self.entries.take().expect("Empty builder!");
233    let entries = self.to_lower_depth(new_depth, entries);
234    BMOC::create_unsafe(new_depth, entries.into_boxed_slice())
235  }
236
237  #[allow(clippy::wrong_self_convention)]
238  pub fn to_lower_depth_bmoc_packing(&mut self, new_depth: u8) -> BMOC {
239    let entries = self.pack();
240    let entries = self.to_lower_depth(new_depth, entries);
241    BMOC::create_unsafe(new_depth, entries.into_boxed_slice())
242  }
243
244  #[inline]
245  fn get_depth(&self, raw_value: u64) -> u8 {
246    self.get_depth_no_flag(rm_flag(raw_value))
247  }
248  #[inline]
249  /// Works both with no flag or with flag set to 0
250  fn get_depth_no_flag(&self, raw_value_no_flag: u64) -> u8 {
251    self.depth_max - (raw_value_no_flag.trailing_zeros() >> 1) as u8
252  }
253}
254
255pub enum Status {
256  /// The point is in the MOC
257  IN,
258  /// The point is out of the MOC
259  OUT,
260  /// The point may be in or out of the MOC
261  UNKNOWN,
262}
263
264/// Builder taking cell at the MOC maximum depth.
265pub struct BMOCBuilderFixedDepth {
266  depth: u8,
267  bmoc: Option<BMOC>,
268  is_full: bool,
269  buffer: Vec<u64>,
270  sorted: bool,
271}
272
273impl BMOCBuilderFixedDepth {
274  ///  - `is_full`: the flag to be set for each cell number (I expect`true` to be used for example
275  ///    when building catalogues MOC.
276  /// The results of logical operations between BMOC having the flag of each of their cells
277  /// set to `true` must equal the results of regular MOC logical operations.
278  pub fn new(depth: u8, is_full: bool) -> BMOCBuilderFixedDepth {
279    BMOCBuilderFixedDepth::with_capacity(depth, is_full, 10_000_000)
280  }
281
282  pub fn with_capacity(depth: u8, is_full: bool, buff_capacity: usize) -> BMOCBuilderFixedDepth {
283    BMOCBuilderFixedDepth {
284      depth,
285      bmoc: None,
286      is_full,
287      buffer: Vec::with_capacity(buff_capacity),
288      sorted: true,
289    }
290  }
291
292  /// The hash must be at the builder depth
293  pub fn push(&mut self, hash: u64) {
294    if let Some(h) = self.buffer.last() {
295      if *h == hash {
296        return;
297      } else if self.sorted && *h > hash {
298        self.sorted = false;
299      }
300    }
301    self.buffer.push(hash);
302    if self.buffer.len() == self.buffer.capacity() {
303      self.drain_buffer();
304    }
305  }
306
307  #[allow(clippy::wrong_self_convention)]
308  pub fn to_bmoc(&mut self) -> Option<BMOC> {
309    // if self.buffer.len() > 0 {
310    self.drain_buffer();
311    // }
312    self.bmoc.take()
313  }
314
315  fn drain_buffer(&mut self) {
316    if !self.sorted {
317      // Sort and remove duplicates
318      self.buffer.sort_unstable();
319      self.buffer.dedup();
320    }
321    let new_bmoc = self.buff_to_bmoc();
322    self.clear_buff();
323    self.bmoc = Some(match self.bmoc.take() {
324      Some(prev_bmoc) => prev_bmoc.or(&new_bmoc),
325      None => new_bmoc,
326    })
327  }
328
329  fn buff_to_bmoc(&mut self) -> BMOC {
330    let mut i = 0_usize;
331    let mut k = 0_usize;
332    while i < self.buffer.len() {
333      let h = self.buffer[i];
334      let sequence_len = self.largest_lower_cell_sequence_len(h, &self.buffer[i..]);
335      /*{
336        // Look at the maximum number of cell that could be merge if the hash is the first of a cell
337        let delta_depth = (h.trailing_zeros() >> 1).min(self.depth); // low_res_cell_depth = self.depth - delta_depth
338        let num_cells = 1_usize << (dd << 1); // number of depth self.depth cells in the low_res_cell = (2^dd)^2 = 2^(2*dd)
339        // Look for a sequence
340        let mut j = i + 1;
341        let mut expected_h = h + 1_u64;
342
343        while j < self.buffer.len() && sequence_len < num_cells && self.buffer[j] == expected_h {
344          j += 1;
345          sequence_len = 1;
346          expected_h += 1;
347        }
348      }*/
349      // Look at the actual low_res_cell the sequence correspond to
350      let delta_depth = sequence_len.next_power_of_two();
351      let delta_depth = if delta_depth > sequence_len {
352        delta_depth.trailing_zeros() >> 2 // take previous value and divide by 2
353      } else {
354        debug_assert_eq!(delta_depth, sequence_len);
355        delta_depth.trailing_zeros() >> 1 // divide by 2
356      } as u8;
357      let twice_dd = delta_depth << 1;
358      let sequence_len = 1_usize << twice_dd;
359      // Write the value
360      self.buffer[k] = build_raw_value(
361        self.depth - delta_depth,
362        h >> twice_dd,
363        self.is_full,
364        self.depth,
365      );
366      k += 1;
367      i += sequence_len;
368    }
369    // self.buffer.truncate(k);
370    BMOC::create_unsafe_copying(self.depth, &self.buffer[0..k])
371  }
372
373  #[inline]
374  fn largest_lower_cell_sequence_len(&self, mut h: u64, entries: &[u64]) -> usize {
375    // Look for the maximum number of cells that could be merged if the hash is the first of a cell
376    let dd = ((h.trailing_zeros() >> 1) as u8).min(self.depth); // low_res_cell_depth = self.depth - delta_depth
377    let n = 1_usize << (dd << 1); // number of depth self.depth cells in the low_res_cell = (2^dd)^2 = 2^(2*dd)
378                                  // Look for a sequence
379    let n = n.min(entries.len());
380    /*for i in 1..n {
381      h += 1;
382      if entries[i] != h {
383        return i;
384      }
385    }*/
386    for (i, e) in entries.iter().enumerate().take(n).skip(1) {
387      h += 1;
388      if *e != h {
389        return i;
390      }
391    }
392    n
393  }
394
395  fn clear_buff(&mut self) {
396    self.sorted = true;
397    self.buffer.clear();
398  }
399}
400
401/// Structure defining a simple BMOC.
402/// Three different iterators are available:
403/// - `bmoc.iter() -> Iterator<u64>` : iterates on the raw value stored in the BMOC (the ordering
404///    follow the z-order-curve order).
405/// - `bmoc.into_iter() -> Iterator<Cell>`: same a `iter()` except that it returns Cells,
406///    i.e. decoded raw value containing the `depth`, `order` and `flag`.
407/// - `bmoc.flat_iter() -> Iterator<u64>`: iterates on all the cell number at the maximum depth, in
408///    ascending order (flag information is lost).
409/// - `bmoc.flat_iter_cell() -> Iterator<Cell>` same as `flat_iter()` but conserving then `flag`
410///    information (and the depth which must always equals the BMOC depth).
411#[derive(Debug, PartialEq, Eq)]
412pub struct BMOC {
413  depth_max: u8,
414  pub entries: Box<[u64]>,
415}
416
417#[derive(Debug)]
418pub struct Cell {
419  pub raw_value: u64,
420  pub depth: u8,
421  pub hash: u64,
422  pub is_full: bool,
423}
424
425impl Cell {
426  fn new(raw_value: u64, depth_max: u8) -> Cell {
427    // Extract the flag
428    let is_full = (raw_value & 1_u64) == 1_u64;
429    // Remove the flag bit, then divide by 2 (2 bits per level)
430    let delta_depth = ((raw_value >> 1).trailing_zeros() >> 1) as u8;
431    // Remove 2 bits per depth difference + 1 sentinel bit + 1 flag bit
432    let hash = raw_value >> (2 + (delta_depth << 1));
433    let depth = depth_max - delta_depth;
434    Cell {
435      raw_value,
436      depth,
437      hash,
438      is_full,
439    }
440  }
441}
442
443impl BMOC {
444  pub fn new_empty(depth: u8) -> Self {
445    let builder = BMOCBuilderUnsafe::new(depth, 0);
446    builder.to_bmoc()
447  }
448
449  pub fn new_allsky(depth: u8) -> Self {
450    let mut builder = BMOCBuilderUnsafe::new(depth, 12);
451    builder.push_all(0, 0, 12, true);
452    builder.to_bmoc()
453  }
454
455  pub fn size(&self) -> usize {
456    self.entries.len()
457  }
458
459  /// We suppose here that the entries are already sorted (ASC natural ordering) with
460  /// no duplicates and no small cells included into larger one's.
461  pub(super) fn create_unsafe(depth_max: u8, entries: Box<[u64]>) -> BMOC {
462    BMOC { depth_max, entries }
463  }
464
465  pub(super) fn create_unsafe_copying(depth_max: u8, entries: &[u64]) -> BMOC {
466    let mut entries_copy = Vec::with_capacity(entries.len());
467    for e in entries {
468      entries_copy.push(*e);
469    }
470    BMOC {
471      depth_max,
472      entries: entries_copy.into_boxed_slice(),
473    }
474  }
475
476  pub fn get_depth_max(&self) -> u8 {
477    self.depth_max
478  }
479
480  pub fn equals(&self, other: &BMOC) -> bool {
481    if self.depth_max == other.depth_max && self.entries.len() == other.entries.len() {
482      for (r1, r2) in self.iter().zip(other.iter()) {
483        if r1 != r2 {
484          return false;
485        }
486      }
487      return true;
488    }
489    false
490  }
491
492  pub fn assert_equals(&self, other: &BMOC) {
493    if self.depth_max == other.depth_max {
494      for (r1, r2) in self.iter().zip(other.iter()) {
495        if *r1 != *r2 {
496          panic!(
497            "Left: {:?}; Right: {:?}",
498            self.from_raw_value(*r1),
499            other.from_raw_value(*r2)
500          );
501        }
502      }
503      if self.entries.len() != other.entries.len() {
504        panic!("Lengths are different");
505      }
506    } else {
507      panic!("Depths are different");
508    }
509  }
510
511  /// Test the given point and return its "Status": in, out of the MOC or maybe.
512  pub fn test_coo(&self, lon: f64, lat: f64) -> Status {
513    let h_raw = build_raw_value(
514      self.depth_max,
515      hash(self.depth_max, lon, lat),
516      true,
517      self.depth_max,
518    );
519    match self.entries.binary_search(&h_raw) {
520      Ok(i) => {
521        if is_partial(self.entries[i]) {
522          Status::UNKNOWN
523        } else {
524          Status::IN
525        }
526      }
527      Err(i) => {
528        let cell = Cell::new(h_raw, self.depth_max);
529        // look in next or previous cels
530        if i > 0 && is_in(&self.from_raw_value(self.entries[i - 1]), &cell) {
531          if is_partial(self.entries[i - 1]) {
532            Status::UNKNOWN
533          } else {
534            Status::IN
535          }
536        } else if i < self.entries.len() && is_in(&self.from_raw_value(self.entries[i]), &cell) {
537          if is_partial(self.entries[i]) {
538            Status::UNKNOWN
539          } else {
540            Status::IN
541          }
542        } else {
543          Status::OUT
544        }
545      }
546    }
547  }
548
549  /// Returns the BMOC complement:
550  /// - cells with flag set to 1 (fully covered) are removed
551  /// - cells with flag set to 0 (partially covered) are kept
552  /// - empty cells are added with flag set to 1
553  /// The method as been tested when all flags are `is_full` (i.e. regular MOC case).
554  pub fn not(&self) -> BMOC {
555    // Worst case: only 1 sub-cell by cell in the MOC (+11 for depth 0)
556    let mut builder = BMOCBuilderUnsafe::new(self.depth_max, 3 * self.entries.len() + 12);
557    // Empty MOC, easy
558    if self.entries.len() == 0 {
559      for h in 0..12_u64 {
560        builder.push(0_u8, h, true);
561      }
562      return builder.to_bmoc();
563    }
564    // Real case
565    let mut d = 0_u8;
566    let mut h = 0_u64;
567    // Go down to first cell
568    let mut cell = self.from_raw_value(self.entries[0]);
569    go_down(&mut d, &mut h, cell.depth, cell.hash, true, &mut builder);
570    if !cell.is_full {
571      builder.push_raw_unsafe(cell.raw_value);
572    }
573    // Between first and last
574    for i in 1..self.entries.len() {
575      cell = self.from_raw_value(self.entries[i]);
576      let dd = dd_4_go_up(d, h, cell.depth, cell.hash);
577      go_up(&mut d, &mut h, dd, true, &mut builder);
578      go_down(&mut d, &mut h, cell.depth, cell.hash, true, &mut builder);
579      if !cell.is_full {
580        builder.push_raw_unsafe(cell.raw_value);
581      }
582    }
583    // After last
584    let delta_depth = d;
585    go_up(&mut d, &mut h, delta_depth, true, &mut builder); // go up to depth 0
586    for h in h..12 {
587      // Complete with base cells if needed
588      builder.push(0_u8, h, true);
589    }
590    builder.to_bmoc()
591  }
592
593  /// Go to the next hash value:
594  /// - if the input hash is not the last one of the super-cell
595  ///   (the cell of depth deph - 1 the hash belongs to), the result is simply
596  ///   - output_depth = input_depth
597  ///   - output_hash = input_hash + 1
598  /// - else, the depth is changed (we go up) until the hash is not the last of the super-cell
599  ///   and the result is:
600  ///   - output_depth < input_depth
601  ///   - output_hash = input_hash_at_outpu_depth + 1
602  /*fn go_next(&self, start_depth: &mut u8, start_hash: &mut u64) {
603    while *start_depth > 0 && ((*start_hash & 3_u64) == 3_u64) {
604      *start_depth -= 1;
605      *start_hash >>= 2;
606    }
607    *start_hash += 1;
608  }*/
609
610  /// Returns the intersection of this BMOC with the given BMOC:
611  /// - all non overlapping cells are removed
612  /// - when two cells are overlapping, the overlapping part is kept
613  ///   - the value of the flag is the result of a logical AND between the flags of the merged cells.
614  /// The method as been tested when all flags are `is_full` (i.e. regular MOC case).
615  pub fn and(&self, other: &BMOC) -> BMOC {
616    let mut builder = BMOCBuilderUnsafe::new(
617      max(self.depth_max, other.depth_max),
618      max(self.entries.len(), other.entries.len()),
619    );
620    let mut it_left = self.into_iter();
621    let mut it_right = other.into_iter();
622    let mut left = it_left.next();
623    let mut right = it_right.next();
624    // We have 9 cases to take into account:
625    // -  3: dL == dR, dL < dR and dR < dL
626    // - x3: hL == hR, hL < hR and hR < hL
627    while let (Some(l), Some(r)) = (&left, &right) {
628      match l.depth.cmp(&r.depth) {
629        Ordering::Less => {
630          let hr_at_dl = r.hash >> ((r.depth - l.depth) << 1);
631          match l.hash.cmp(&hr_at_dl) {
632            Ordering::Less => left = it_left.next(),
633            Ordering::Greater => right = it_right.next(),
634            Ordering::Equal => {
635              debug_assert_eq!(l.hash, hr_at_dl);
636              builder.push(r.depth, r.hash, r.is_full && l.is_full);
637              right = it_right.next()
638            }
639          }
640        }
641        Ordering::Greater => {
642          let hl_at_dr = l.hash >> ((l.depth - r.depth) << 1);
643          match hl_at_dr.cmp(&r.hash) {
644            Ordering::Less => left = it_left.next(),
645            Ordering::Greater => right = it_right.next(),
646            Ordering::Equal => {
647              debug_assert_eq!(hl_at_dr, r.hash);
648              builder.push(l.depth, l.hash, r.is_full && l.is_full);
649              left = it_left.next()
650            }
651          }
652        }
653        Ordering::Equal => {
654          debug_assert_eq!(l.depth, r.depth);
655          match l.hash.cmp(&r.hash) {
656            Ordering::Less => left = it_left.next(),
657            Ordering::Greater => right = it_right.next(),
658            Ordering::Equal => {
659              debug_assert_eq!(l.hash, r.hash);
660              builder.push(l.depth, l.hash, r.is_full && l.is_full);
661              left = it_left.next();
662              right = it_right.next()
663            }
664          }
665        }
666      }
667    }
668    builder.to_bmoc()
669  }
670
671  /* Try making operations with as few if as possible, playing on indices
672  fn and_v2(&self, other: &BMOC) -> BMOC {
673    let mut builder = BMOCBuilderUnsafe::new(
674      max(self.depth_max, other.depth_max),
675      max(self.entries.len(), other.entries.len())
676    );
677    let mut left = self.entries;
678    let mut right = other.entries;
679    let mut ileft = 0_usize;
680    let mut iright = 0_usize;
681
682  }
683  */
684
685  /// Returns the union of this BMOC with the given BMOC:
686  /// - all non overlapping cells in both BMOCs are kept
687  /// - overlapping cells are merged, the value of the flag is the result of a logical OR between
688  /// the flags of the merged cells.
689  /// The method as been tested when all flags are `is_full` (i.e. regular MOC case).
690  pub fn or(&self, other: &BMOC) -> BMOC {
691    let mut builder = BMOCBuilderUnsafe::new(
692      max(self.depth_max, other.depth_max),
693      max(self.entries.len(), other.entries.len()),
694    );
695    let mut it_left = self.into_iter();
696    let mut it_right = other.into_iter();
697    let mut left = it_left.next();
698    let mut right = it_right.next();
699    // We have 9 cases to take into account:
700    // -  3: dL == dR, dL < dR and dR < dL
701    // - x3: hL == hR, hL < hR and hR < hL
702    while let (Some(l), Some(r)) = (&left, &right) {
703      match l.depth.cmp(&r.depth) {
704        Ordering::Less => {
705          let hr_at_dl = r.hash >> ((r.depth - l.depth) << 1);
706          if l.hash < hr_at_dl {
707            builder.push(l.depth, l.hash, l.is_full);
708            left = it_left.next();
709          } else if l.hash > hr_at_dl {
710            builder.push(r.depth, r.hash, r.is_full);
711            right = it_right.next();
712          } else if l.is_full {
713            debug_assert_eq!(l.hash, hr_at_dl);
714            builder.push(l.depth, l.hash, l.is_full);
715            right = consume_while_overlapped(l, &mut it_right);
716            left = it_left.next();
717          } else {
718            debug_assert_eq!(l.hash, hr_at_dl);
719            debug_assert!(!l.is_full);
720            let mut is_overlapped = false;
721            right = consume_while_overlapped_and_partial(l, &mut it_right, &mut is_overlapped);
722            if is_overlapped {
723              right = self.not_in_cell_4_or(l, right.unwrap(), &mut it_right, &mut builder);
724            } else {
725              // all flags set to 0 => put large cell with flag  = 0
726              builder.push(l.depth, l.hash, false);
727            }
728            left = it_left.next();
729          }
730        }
731        Ordering::Greater => {
732          let hl_at_dr = l.hash >> ((l.depth - r.depth) << 1);
733          if hl_at_dr < r.hash {
734            builder.push(l.depth, l.hash, l.is_full);
735            left = it_left.next();
736          } else if hl_at_dr > r.hash {
737            builder.push(r.depth, r.hash, r.is_full);
738            right = it_right.next();
739          } else if r.is_full {
740            debug_assert_eq!(hl_at_dr, r.hash);
741            builder.push(r.depth, r.hash, r.is_full);
742            left = consume_while_overlapped(r, &mut it_left);
743            right = it_right.next();
744          } else {
745            debug_assert_eq!(hl_at_dr, r.hash);
746            debug_assert!(!r.is_full);
747            let mut is_overlapped = false;
748            left = consume_while_overlapped_and_partial(r, &mut it_left, &mut is_overlapped);
749            if is_overlapped {
750              left = self.not_in_cell_4_or(r, left.unwrap(), &mut it_left, &mut builder);
751            } else {
752              // all flags set to 0 => put large cell with flag  = 0
753              builder.push(r.depth, r.hash, false);
754            }
755            right = it_right.next();
756          }
757        }
758        Ordering::Equal => {
759          debug_assert_eq!(l.depth, r.depth);
760          match l.hash.cmp(&r.hash) {
761            Ordering::Less => {
762              builder.push(l.depth, l.hash, l.is_full);
763              left = it_left.next();
764            }
765            Ordering::Greater => {
766              builder.push(r.depth, r.hash, r.is_full);
767              right = it_right.next();
768            }
769            Ordering::Equal => {
770              debug_assert_eq!(l.hash, r.hash);
771              builder.push(l.depth, l.hash, r.is_full || l.is_full);
772              left = it_left.next();
773              right = it_right.next();
774            }
775          }
776        }
777      }
778    }
779    while let Some(l) = &left {
780      debug_assert!(right.is_none());
781      builder.push(l.depth, l.hash, l.is_full);
782      left = it_left.next();
783    }
784    while let Some(r) = &right {
785      debug_assert!(left.is_none());
786      builder.push(r.depth, r.hash, r.is_full);
787      right = it_right.next();
788    }
789    builder.to_bmoc_packing()
790  }
791
792  fn not_in_cell_4_or(
793    &self,
794    low_resolution: &Cell,
795    mut c: Cell,
796    iter: &mut BMOCIter,
797    builder: &mut BMOCBuilderUnsafe,
798  ) -> Option<Cell> {
799    let mut d = low_resolution.depth;
800    let mut h = low_resolution.hash;
801    debug_assert!(c.is_full);
802    go_down(&mut d, &mut h, c.depth, c.hash, false, builder);
803    builder.push(c.depth, c.hash, true);
804    let mut is_overlapped = false;
805    let mut cell;
806    while {
807      cell = consume_while_overlapped_and_partial(low_resolution, iter, &mut is_overlapped);
808      is_overlapped
809    } {
810      c = cell.unwrap(); // if flag => right is not None
811      let dd = dd_4_go_up(d, h, c.depth, c.hash);
812      go_up(&mut d, &mut h, dd, false, builder);
813      go_down(&mut d, &mut h, c.depth, c.hash, false, builder);
814      builder.push(c.depth, c.hash, true);
815    }
816    let dd = d - low_resolution.depth;
817    go_up(&mut d, &mut h, dd, false, builder);
818    go_down(
819      &mut d,
820      &mut h,
821      low_resolution.depth,
822      low_resolution.hash + 1,
823      false,
824      builder,
825    );
826    cell
827  }
828
829  /// Returns the symmetric difference of this BMOC with the given BMOC:
830  /// - all non overlapping cells in both BMOCs are kept
831  /// - when two cells are overlapping, the overlapping part is:
832  ///   - removed if both flags = 1
833  ///   - kept if one of the flags = 0 (since 0 meas partially covered but O don't know which part)
834  /// The method as been tested when all flags are `is_full` (i.e. regular MOC case).
835  pub fn xor(&self, other: &BMOC) -> BMOC {
836    let mut builder = BMOCBuilderUnsafe::new(
837      max(self.depth_max, other.depth_max),
838      max(self.entries.len(), other.entries.len()),
839    );
840    let mut it_left = self.into_iter();
841    let mut it_right = other.into_iter();
842    let mut left = it_left.next();
843    let mut right = it_right.next();
844    // We have 9 cases to take into account:
845    // -  3: dL == dR, dL < dR and dR < dL
846    // - x3: hL == hR, hL < hR and hR < hL
847    while let (Some(l), Some(r)) = (&left, &right) {
848      match l.depth.cmp(&r.depth) {
849        Ordering::Less => {
850          let hr_at_dl = r.hash >> ((r.depth - l.depth) << 1);
851          if l.hash < hr_at_dl {
852            builder.push(l.depth, l.hash, l.is_full);
853            left = it_left.next();
854          } else if l.hash > hr_at_dl {
855            builder.push(r.depth, r.hash, r.is_full);
856            right = it_right.next();
857          } else if l.is_full {
858            debug_assert_eq!(l.hash, hr_at_dl);
859            right = self.not_in_cell_4_xor(l, r, &mut it_right, &mut builder);
860            left = it_left.next();
861          } else {
862            debug_assert_eq!(l.hash, hr_at_dl);
863            debug_assert!(!l.is_full);
864            builder.push(l.depth, l.hash, l.is_full);
865            right = consume_while_overlapped(l, &mut it_right);
866            left = it_left.next();
867          }
868        }
869        Ordering::Greater => {
870          let hl_at_dr = l.hash >> ((l.depth - r.depth) << 1);
871          if hl_at_dr < r.hash {
872            builder.push(l.depth, l.hash, l.is_full);
873            left = it_left.next();
874          } else if hl_at_dr > r.hash {
875            builder.push(r.depth, r.hash, r.is_full);
876            right = it_right.next();
877          } else if r.is_full {
878            debug_assert_eq!(hl_at_dr, r.hash);
879            left = self.not_in_cell_4_xor(r, l, &mut it_left, &mut builder);
880            right = it_right.next();
881          } else {
882            debug_assert_eq!(hl_at_dr, r.hash);
883            debug_assert!(!r.is_full);
884            builder.push(r.depth, r.hash, r.is_full);
885            left = consume_while_overlapped(r, &mut it_left);
886            right = it_right.next();
887          }
888        }
889        Ordering::Equal => {
890          debug_assert_eq!(l.depth, r.depth);
891          match l.hash.cmp(&r.hash) {
892            Ordering::Less => {
893              builder.push(l.depth, l.hash, l.is_full);
894              left = it_left.next();
895            }
896            Ordering::Greater => {
897              builder.push(r.depth, r.hash, r.is_full);
898              right = it_right.next();
899            }
900            Ordering::Equal => {
901              debug_assert_eq!(l.hash, r.hash);
902              let both_fully_covered = r.is_full && l.is_full;
903              if !both_fully_covered {
904                builder.push(l.depth, l.hash, both_fully_covered);
905              }
906              left = it_left.next();
907              right = it_right.next();
908            }
909          }
910        }
911      }
912    }
913    while let Some(l) = &left {
914      debug_assert!(right.is_none());
915      builder.push(l.depth, l.hash, l.is_full);
916      left = it_left.next();
917    }
918    while let Some(r) = &right {
919      debug_assert!(left.is_none());
920      builder.push(r.depth, r.hash, r.is_full);
921      right = it_right.next();
922    }
923    builder.to_bmoc_packing()
924  }
925
926  // add elements of the low resolution cell which are not in the c cell
927  fn not_in_cell_4_xor(
928    &self,
929    low_resolution: &Cell,
930    c: &Cell,
931    iter: &mut BMOCIter,
932    builder: &mut BMOCBuilderUnsafe,
933  ) -> Option<Cell> {
934    let mut d = low_resolution.depth;
935    let mut h = low_resolution.hash;
936    go_down(&mut d, &mut h, c.depth, c.hash, true, builder);
937    if !c.is_full {
938      builder.push(c.depth, c.hash, false);
939    }
940    let mut cell = iter.next();
941    while let Some(c) = &cell {
942      if !is_in(low_resolution, c) {
943        break;
944      }
945      let dd = dd_4_go_up(d, h, c.depth, c.hash);
946      go_up(&mut d, &mut h, dd, true, builder);
947      go_down(&mut d, &mut h, c.depth, c.hash, true, builder);
948      if !c.is_full {
949        builder.push(c.depth, c.hash, false);
950      }
951      cell = iter.next()
952    }
953    let dd = d - low_resolution.depth;
954    go_up(&mut d, &mut h, dd, true, builder);
955    go_down(
956      &mut d,
957      &mut h,
958      low_resolution.depth,
959      low_resolution.hash + 1,
960      true,
961      builder,
962    );
963    cell
964  }
965
966  /// Returns the difference of this BMOC (left) with the given BMOC (right):
967  /// - all non overlapping cells of this (left) BMOC are kept
968  /// - non overlapping cells of the other (right) BMOC are removed
969  ///   if full, and kept if partially covered (since A MINUS B = A AND (NOT(B))
970  /// - when two cells are overlapping, the overlapping part is:
971  ///   - removed if both flags = 1
972  ///   - kept if one of the flags = 0 (since 0 meas partially covered but O don't know which part)
973  /// Poor's man implementation: A MINUS B = A AND NOT(B)
974  pub fn minus(&self, other: &BMOC) -> BMOC {
975    let mut builder = BMOCBuilderUnsafe::new(
976      max(self.depth_max, other.depth_max),
977      max(self.entries.len(), other.entries.len()),
978    );
979    let mut it_left = self.into_iter();
980    let mut it_right = other.into_iter();
981    let mut left = it_left.next();
982    let mut right = it_right.next();
983    // We have 9 cases to take into account:
984    // -  3: dL == dR, dL < dR and dR < dL
985    // - x3: hL == hR, hL < hR and hR < hL
986    while let (Some(l), Some(r)) = (&left, &right) {
987      match l.depth.cmp(&r.depth) {
988        Ordering::Less => {
989          // The l cell is larger than the r cell
990          // - degrade r cell at the l cell depth
991          let hr_at_dl = r.hash >> ((r.depth - l.depth) << 1);
992          if l.hash < hr_at_dl {
993            builder.push(l.depth, l.hash, l.is_full);
994            left = it_left.next();
995          } else if l.hash > hr_at_dl {
996            right = it_right.next();
997          } else if l.is_full {
998            debug_assert_eq!(l.hash, hr_at_dl);
999            // add elements of the l cell which are not in common with the r cell
1000            right = self.not_in_cell_4_xor(l, r, &mut it_right, &mut builder);
1001            left = it_left.next();
1002          } else {
1003            debug_assert_eq!(l.hash, hr_at_dl);
1004            debug_assert!(!l.is_full);
1005            builder.push(l.depth, l.hash, false);
1006            right = consume_while_overlapped(l, &mut it_right);
1007            left = it_left.next();
1008          }
1009        }
1010        Ordering::Greater => {
1011          // The r cell is larger than the l cell
1012          // - degrade l cell at the r cell depth
1013          let hl_at_dr = l.hash >> ((l.depth - r.depth) << 1);
1014          if hl_at_dr < r.hash {
1015            builder.push(l.depth, l.hash, l.is_full);
1016            left = it_left.next();
1017          } else if hl_at_dr > r.hash {
1018            // remove the r cell
1019            right = it_right.next();
1020          } else if !r.is_full || !l.is_full {
1021            debug_assert_eq!(hl_at_dr, r.hash);
1022            builder.push(l.depth, l.hash, false);
1023            left = it_left.next();
1024          }
1025        }
1026        Ordering::Equal => {
1027          debug_assert_eq!(l.depth, r.depth);
1028          match l.hash.cmp(&r.hash) {
1029            Ordering::Less => {
1030              builder.push(l.depth, l.hash, l.is_full);
1031              left = it_left.next();
1032            }
1033            Ordering::Greater => {
1034              right = it_right.next();
1035            }
1036            Ordering::Equal => {
1037              debug_assert_eq!(l.hash, r.hash);
1038              let both_fully_covered = r.is_full && l.is_full;
1039              if !both_fully_covered {
1040                builder.push(l.depth, l.hash, both_fully_covered);
1041              }
1042              left = it_left.next();
1043              right = it_right.next();
1044            }
1045          }
1046        }
1047      }
1048    }
1049    while let Some(l) = &left {
1050      debug_assert!(right.is_none());
1051      builder.push(l.depth, l.hash, l.is_full);
1052      left = it_left.next();
1053    }
1054    builder.to_bmoc_packing()
1055  }
1056
1057  pub fn from_raw_value(&self, raw_value: u64) -> Cell {
1058    Cell::new(raw_value, self.depth_max)
1059  }
1060
1061  /// Returns the number of cells at depth `depth_max` the moc contains, i.e.
1062  /// the sum for each cell of the number of cells at depth `depth_max`.
1063  pub fn deep_size(&self) -> usize {
1064    let mut sum = 0_usize;
1065    for &raw_value in self.entries.iter() {
1066      let depth = self.get_depth(raw_value);
1067      sum += nside_square_unsafe(self.depth_max - depth) as usize;
1068    }
1069    sum
1070  }
1071
1072  /// Iterator on the BMOC raw values
1073  /// See method `` to extract informations from a raw value
1074  pub fn iter(&self) -> Iter<u64> {
1075    self.entries.iter()
1076  }
1077
1078  /// Returns an iterator iterating over all cells at the BMOC maximum depth
1079  /// (the iteration is made in the natural cell order).
1080  pub fn flat_iter(&self) -> BMOCFlatIter {
1081    BMOCFlatIter::new(self.depth_max, self.deep_size(), self.entries.iter())
1082  }
1083
1084  /// Returns an iterator iterating over all cells at the BMOC maximum depth
1085  /// (the iteration is made in the natural cell order).
1086  pub fn into_flat_iter(self) -> BMOCIntoFlatIter {
1087    BMOCIntoFlatIter::new(
1088      self.depth_max,
1089      self.deep_size(),
1090      self.entries.into_vec().into_iter(),
1091    )
1092  }
1093
1094  /// Returns an iterator iterating over all cells at the BMOC maximum depth
1095  /// (the iteration is made in the natural cell order).  
1096  /// Contrary to [flat_iter](fn.flat_iter.html), the full cell information (the raw BMOC value
1097  /// it belongs to, its flag) is kept.
1098  pub fn flat_iter_cell(&self) -> BMOCFlatIterCell {
1099    BMOCFlatIterCell::new(self.depth_max, self.deep_size(), self.entries.iter())
1100  }
1101
1102  /// Returns an array containing all the BMOC cells flattened at the maximum depth.
1103  /// This is an utility methods basically calling `deep_size` to initialize an array
1104  /// and `flat_iter` to retrieve all cells.
1105  pub fn to_flat_array(&self) -> Box<[u64]> {
1106    let mut res: Vec<u64> = Vec::with_capacity(self.deep_size());
1107    for cell in self.flat_iter() {
1108      res.push(cell);
1109    }
1110    res.into_boxed_slice()
1111  }
1112
1113  fn get_depth(&self, raw_value: u64) -> u8 {
1114    self.get_depth_no_flag(rm_flag(raw_value))
1115  }
1116
1117  /// Works both with no flag or with flag set to 0
1118  fn get_depth_no_flag(&self, raw_value_no_flag: u64) -> u8 {
1119    self.depth_max - (raw_value_no_flag.trailing_zeros() >> 1) as u8
1120  }
1121
1122  fn get_depth_icell(&self, raw_value: u64) -> (u8, u64) {
1123    // Remove the flag bit, then divide by 2 (2 bits per level)
1124    let delta_depth = ((raw_value >> 1).trailing_zeros() >> 1) as u8;
1125    // Remove 2 bits per depth difference + 1 sentinel bit + 1 flag bit
1126    let hash = raw_value >> (2 + (delta_depth << 1));
1127    let depth = self.depth_max - delta_depth;
1128    (depth, hash)
1129  }
1130
1131  /// Transform this (B)MOC as a simple (sorted) array of ranges
1132  /// (WARNING: the ranges are at the MOC depth, not at the depth 29).
1133  /// During the operation, we loose the `flag` information attached to each BMOC cell.  
1134  pub fn to_ranges(&self) -> Box<[std::ops::Range<u64>]> {
1135    let mut ranges: Vec<std::ops::Range<u64>> = Vec::with_capacity(self.entries.len());
1136    let mut prev_min = 0_u64;
1137    let mut prev_max = 0_u64;
1138    for cell in self.into_iter() {
1139      if cell.depth < self.depth_max {
1140        let range = to_range(cell.hash, self.depth_max - cell.depth);
1141        if range.start != prev_max {
1142          if prev_min != prev_max {
1143            // false only at first call, then always true
1144            ranges.push(prev_min..prev_max);
1145          }
1146          prev_min = range.start;
1147        }
1148        prev_max = range.end;
1149      } else if cell.hash == prev_max {
1150        prev_max += 1;
1151      } else {
1152        if prev_min != prev_max {
1153          // false only at first call, then always true
1154          ranges.push(prev_min..prev_max);
1155        }
1156        prev_min = cell.hash;
1157        prev_max = cell.hash + 1;
1158      }
1159    }
1160    if prev_min != prev_max {
1161      // false only at first call, then always true
1162      ranges.push(prev_min..prev_max);
1163    }
1164    ranges.into_boxed_slice()
1165  }
1166
1167  /// Transform this (B)MOC as a simple (sorted) array of ranges.
1168  /// Ranges containing different flag values are split in sub-ranges
1169  pub fn to_flagged_ranges(&self) -> Vec<(std::ops::Range<u64>, bool)> {
1170    let mut ranges: Vec<(std::ops::Range<u64>, bool)> = Vec::with_capacity(self.entries.len());
1171    let mut prev_min = 0_u64;
1172    let mut prev_max = 0_u64;
1173    let mut prev_flag = self
1174      .entries
1175      .get(0)
1176      .map(|v| (v & 1_u64) == 1_u64)
1177      .unwrap_or(false);
1178    for cell in self.into_iter() {
1179      if cell.depth < self.depth_max {
1180        let range = to_range(cell.hash, self.depth_max - cell.depth);
1181        if range.start == prev_max && (prev_max == 0 || cell.is_full == prev_flag) {
1182          prev_max = range.end;
1183        } else {
1184          if prev_min != prev_max {
1185            // false only at first call, then always true
1186            ranges.push((prev_min..prev_max, prev_flag));
1187          }
1188          prev_min = range.start;
1189          prev_max = range.end;
1190          prev_flag = cell.is_full;
1191        }
1192      } else if cell.hash == prev_max && cell.is_full == prev_flag {
1193        prev_max += 1;
1194      } else {
1195        if prev_min != prev_max {
1196          // false only at first call, then always true
1197          ranges.push((prev_min..prev_max, prev_flag));
1198        }
1199        prev_min = cell.hash;
1200        prev_max = cell.hash + 1;
1201        prev_flag = cell.is_full;
1202      }
1203    }
1204    if prev_min != prev_max {
1205      // false only at first call, then always true
1206      ranges.push((prev_min..prev_max, prev_flag));
1207    }
1208    ranges.shrink_to_fit();
1209    ranges
1210  }
1211
1212  /// Transform this (B)MOC in a very compressed version. We call it `lossy` because
1213  /// during the operation, we loose the `flag` information attached to each BMOC cell.
1214  /// # Remark
1215  /// * If needed we could store the flag information!
1216  /// # Info
1217  /// * Original idea by F.-X. Pineau (see Java library), improved by M. Reinecke (through
1218  /// private communication) leading to an even better compression factor.
1219  /// * Although its seems (c.f. M. Reinecke) that this is quite similar to `Interpolative coding`,
1220  /// M. Reinecke tests show a slightly better compression factor. M. Reinecke raised the following
1221  /// question: was it worth implementing this specific case instead of using an
1222  /// `Interpolative coding` library?
1223  /// # Idea
1224  /// * The basic idea consists in...
1225  #[allow(clippy::many_single_char_names)]
1226  pub fn compress_lossy(&self) -> CompressedMOC {
1227    let n = self.entries.len();
1228    let dm = self.depth_max;
1229    let mut b = CompressedMOCBuilder::new(dm, 4 + 3 * n);
1230    if n == 0 {
1231      // Special case of empty MOC
1232      if dm == 0 {
1233        for _ in 0..12 {
1234          b.push_leaf_empty();
1235        }
1236      } else {
1237        for _ in 0..12 {
1238          b.push_node_empty();
1239        }
1240      }
1241      return b.to_compressed_moc();
1242    } else if dm == 0 {
1243      // Special case of other MOC at depth max = 0
1244      let (curr_d, _) = self.get_depth_icell(self.entries[0]);
1245      assert_eq!(curr_d, 0);
1246      let mut h = 0_u64;
1247      for (_, curr_h) in self.entries.iter().map(|e| self.get_depth_icell(*e)) {
1248        for _ in h..curr_h {
1249          b.push_leaf_empty();
1250        }
1251        b.push_leaf_full();
1252        h = curr_h + 1;
1253      }
1254      for _ in h..12 {
1255        b.push_leaf_empty();
1256      }
1257      return b.to_compressed_moc();
1258    }
1259    // Let's start serious things
1260    let mut d;
1261    let mut h = 0;
1262    let (curr_d, curr_h) = self.get_depth_icell(self.entries[0]);
1263    // go down to curr hash
1264    for dd in (0..=curr_d).rev() {
1265      let target_h = curr_h >> (dd << 1);
1266      if curr_d == dm && dd == 0 {
1267        for _ in h..target_h {
1268          b.push_leaf_empty();
1269        }
1270        b.push_leaf_full();
1271      } else {
1272        for _ in h..target_h {
1273          b.push_node_empty();
1274        }
1275        if dd == 0 {
1276          b.push_node_full()
1277        } else {
1278          b.push_node_partial()
1279        };
1280      }
1281      h = target_h << 2;
1282    }
1283    d = curr_d;
1284    h = curr_h;
1285    // middle, go up and down
1286    let mut i = 1_usize;
1287    while i < n {
1288      let (curr_d, curr_h) = self.get_depth_icell(self.entries[i]);
1289      // go up (if needed)!
1290      let target_h = if d > curr_d {
1291        // case previous hash deeper that current hash
1292        let dd = d - curr_d;
1293        curr_h << (dd << 1)
1294      } else {
1295        // case current hash deeper that previous hash, need to go up?
1296        let dd = curr_d - d;
1297        curr_h >> (dd << 1)
1298      };
1299      let mut dd = ((63 - (h ^ target_h).leading_zeros()) >> 1) as u8;
1300      if dd > d {
1301        dd = d;
1302      }
1303      // - go up to common depth
1304      if dd > 0 && d == dm {
1305        for _ in h & 3..3 {
1306          // <=> (h + 1) & 3 < 4
1307          b.push_leaf_empty();
1308        }
1309        h >>= 2;
1310        dd -= 1;
1311        d -= 1;
1312      }
1313      for _ in 0..dd {
1314        for _ in h & 3..3 {
1315          // <=> (h + 1) & 3 < 4
1316          b.push_node_empty();
1317        }
1318        h >>= 2;
1319      }
1320      d -= dd;
1321      h += 1;
1322      // - go down
1323      let dd = curr_d - d;
1324      for rdd in (0..=dd).rev() {
1325        let target_h = curr_h >> (rdd << 1);
1326        if curr_d == dm && rdd == 0 {
1327          for _ in h..target_h {
1328            b.push_leaf_empty();
1329          }
1330          b.push_leaf_full();
1331        } else {
1332          for _ in h..target_h {
1333            b.push_node_empty();
1334          }
1335          if rdd == 0 {
1336            b.push_node_full()
1337          } else {
1338            b.push_node_partial()
1339          };
1340        }
1341        h = target_h << 2;
1342      }
1343      d = curr_d;
1344      h = curr_h;
1345      i += 1;
1346    }
1347    // - go up to depth 0
1348    if d == dm {
1349      for _ in h & 3..3 {
1350        b.push_leaf_empty();
1351      }
1352      h >>= 2;
1353      d -= 1;
1354    }
1355    for _ in 0..d {
1356      for _ in h & 3..3 {
1357        b.push_node_empty();
1358      }
1359      h >>= 2;
1360    }
1361    // - complete till base cell 11
1362    if dm == 0 {
1363      for _ in h + 1..12 {
1364        b.push_leaf_empty();
1365      }
1366    } else {
1367      for _ in h + 1..12 {
1368        b.push_node_empty();
1369      }
1370    }
1371    b.to_compressed_moc()
1372  }
1373}
1374
1375#[inline]
1376fn consume_while_overlapped(low_resolution: &Cell, iter: &mut BMOCIter) -> Option<Cell> {
1377  let mut cell = iter.next();
1378  while {
1379    match &cell {
1380      Some(c) => is_in(low_resolution, c),
1381      None => false,
1382    }
1383  } {
1384    cell = iter.next();
1385  }
1386  cell
1387}
1388
1389/// Returns boolean:
1390/// - false = returned cell do not overlap any more
1391/// - true =  returned cell overlap and its flag is 'full'
1392#[inline]
1393fn consume_while_overlapped_and_partial(
1394  low_resolution: &Cell,
1395  iter: &mut BMOCIter,
1396  res_is_overlapped: &mut bool,
1397) -> Option<Cell> {
1398  let mut cell = iter.next();
1399  while {
1400    match &cell {
1401      Some(c) => {
1402        if is_in(low_resolution, c) {
1403          if c.is_full {
1404            *res_is_overlapped = true;
1405            false
1406          } else {
1407            true
1408          }
1409        } else {
1410          false
1411        }
1412      }
1413      None => false,
1414    }
1415  } {
1416    cell = iter.next();
1417  }
1418  cell
1419  /*let mut cell = iter.next();
1420  while {
1421    match &cell {
1422      Some(c) => is_in(low_res_depth, low_res_hash,  c.depth, c.hash),
1423      None => false,
1424    }
1425  } {
1426    if cell.is_full {
1427      *res_is_overlapped = true;
1428      return cell;
1429    }
1430    cell = iter.next();
1431  }
1432  *res_is_overlapped = false;
1433  cell*/
1434}
1435
1436#[inline]
1437fn dd_4_go_up(d: u8, h: u64, next_d: u8, next_h: u64) -> u8 {
1438  // debug_assert!(d != next_d || h != next_h);
1439  let target_h_at_d = if next_d < d {
1440    // previous hash deeper than current hash => need to go up
1441    next_h << ((d - next_d) << 1)
1442  } else {
1443    // current hash deeper then (or equal to) previous hash => need to go up only if current hash
1444    next_h >> ((next_d - d) << 1)
1445  };
1446  // - look at the difference to see if we have to go up to add lower level cells
1447  // We look at the depth of the deeper common cell (i.e. all most significant bits are the same)
1448  // With XOR (^), we only set to 1 the bits which are set to 1 in a value and 0 in the other.
1449  // If number of leading = 64 => the two cell are identical, WRONG :/
1450  // If number of leading zero = 63 or 62 => are in the same cell => dd = 0
1451  // If number of leading zero = 60 or 61 => dd = 1
1452  // We just have to add .min(d) since base cells are coded on 4 bits (not 2)
1453  let xor = h ^ target_h_at_d;
1454  if xor != 0 {
1455    ((63_u8 - (xor.leading_zeros() as u8)) >> 1).min(d)
1456  } else {
1457    0
1458  }
1459}
1460
1461/// Returns `true` if the given high resolution cell is in the low resolution cell
1462#[inline]
1463fn is_in(low_resolution: &Cell, high_resolution: &Cell) -> bool {
1464  low_resolution.depth <= high_resolution.depth
1465    && low_resolution.hash
1466      == (high_resolution.hash >> ((high_resolution.depth - low_resolution.depth) << 1))
1467}
1468/*
1469fn is_in(low_res_depth: u8, low_res_hash: u64, high_res_depth: u8, high_res_hash: u64) -> bool {
1470  low_res_depth < high_res_depth
1471    && low_res_hash == (high_res_hash >> (high_res_depth - low_res_depth) << 1)
1472}*/
1473
1474#[inline]
1475fn rm_flag(raw_value: u64) -> u64 {
1476  raw_value >> 1
1477}
1478
1479#[inline]
1480fn is_partial(raw_value: u64) -> bool {
1481  (raw_value & 1_u64) == 0_u64
1482}
1483
1484#[inline]
1485fn is_not_first_cell_of_larger_cell(hash: u64) -> bool {
1486  (hash & 3_u64) != 0_u64
1487}
1488
1489#[inline]
1490fn get_depth(raw_value: u64, depth_max: u8) -> u8 {
1491  get_depth_no_flag(rm_flag(raw_value), depth_max)
1492}
1493
1494#[inline]
1495fn get_depth_no_flag(raw_value_no_flag: u64, depth_max: u8) -> u8 {
1496  depth_max - (raw_value_no_flag.trailing_zeros() >> 1) as u8
1497}
1498
1499#[inline]
1500fn get_hash_from_delta_depth(raw_value: u64, delta_depth: u8) -> u64 {
1501  raw_value >> (2 + (delta_depth << 1))
1502}
1503
1504pub struct BMOCIntoFlatIter {
1505  depth_max: u8,
1506  deep_size: usize,
1507  raw_val_iter: IntoIter<u64>,
1508  curr_val: Option<u64>,
1509  curr_val_max: u64,
1510  n_returned: usize,
1511}
1512
1513impl BMOCIntoFlatIter {
1514  fn new(depth_max: u8, deep_size: usize, raw_val_iter: IntoIter<u64>) -> BMOCIntoFlatIter {
1515    let mut flat_iter = BMOCIntoFlatIter {
1516      depth_max,
1517      deep_size,
1518      raw_val_iter,
1519      curr_val: None,
1520      curr_val_max: 0_u64,
1521      n_returned: 0_usize,
1522    };
1523    flat_iter.next_cell();
1524    flat_iter
1525  }
1526
1527  pub fn deep_size(&self) -> usize {
1528    self.deep_size
1529  }
1530
1531  pub fn depth(&self) -> u8 {
1532    self.depth_max
1533  }
1534
1535  fn next_cell(&mut self) -> Option<u64> {
1536    match self.raw_val_iter.next() {
1537      None => self.curr_val.take(),
1538      Some(raw_value) => {
1539        // Remove the flag bit, then divide by 2 (2 bits per level)
1540        let delta_depth = ((raw_value >> 1).trailing_zeros() >> 1) as u8;
1541        let twice_delta_depth = delta_depth << 1;
1542        // Remove 2 bits per depth difference + 1 sentinel bit + 1 flag bit
1543        let hash = raw_value >> (2 + twice_delta_depth);
1544        let val = hash << twice_delta_depth;
1545        self.curr_val_max = val | ((1_u64 << twice_delta_depth) - 1_u64);
1546        self.curr_val.replace(val)
1547      }
1548    }
1549  }
1550}
1551
1552impl Iterator for BMOCIntoFlatIter {
1553  type Item = u64;
1554
1555  fn next(&mut self) -> Option<u64> {
1556    if let Some(val) = self.curr_val {
1557      self.n_returned += 1;
1558      if val < self.curr_val_max {
1559        self.curr_val.replace(val + 1)
1560      } else {
1561        self.next_cell()
1562      }
1563    } else {
1564      None
1565    }
1566  }
1567
1568  fn size_hint(&self) -> (usize, Option<usize>) {
1569    let n = self.deep_size - self.n_returned;
1570    (n, Some(n))
1571  }
1572}
1573
1574pub struct BMOCFlatIter<'a> {
1575  depth_max: u8,
1576  deep_size: usize,
1577  raw_val_iter: Iter<'a, u64>,
1578  curr_val: Option<u64>,
1579  curr_val_max: u64,
1580  n_returned: usize,
1581}
1582
1583impl<'a> BMOCFlatIter<'a> {
1584  fn new(depth_max: u8, deep_size: usize, raw_val_iter: Iter<'a, u64>) -> BMOCFlatIter<'a> {
1585    let mut flat_iter = BMOCFlatIter {
1586      depth_max,
1587      deep_size,
1588      raw_val_iter,
1589      curr_val: None,
1590      curr_val_max: 0_u64,
1591      n_returned: 0_usize,
1592    };
1593    flat_iter.next_cell();
1594    flat_iter
1595  }
1596
1597  pub fn deep_size(&self) -> usize {
1598    self.deep_size
1599  }
1600
1601  pub fn depth(&self) -> u8 {
1602    self.depth_max
1603  }
1604
1605  fn next_cell(&mut self) -> Option<u64> {
1606    match self.raw_val_iter.next() {
1607      None => self.curr_val.take(),
1608      Some(&raw_value) => {
1609        // Remove the flag bit, then divide by 2 (2 bits per level)
1610        let delta_depth = ((raw_value >> 1).trailing_zeros() >> 1) as u8;
1611        let twice_delta_depth = delta_depth << 1;
1612        // Remove 2 bits per depth difference + 1 sentinel bit + 1 flag bit
1613        let hash = raw_value >> (2 + twice_delta_depth);
1614        let val = hash << twice_delta_depth;
1615        self.curr_val_max = val | ((1_u64 << twice_delta_depth) - 1_u64);
1616        self.curr_val.replace(val)
1617        /*// Remove the flag bit, then divide by 2 (2 bits per level)
1618        let twice_delta_depth = (raw_value >> 1).trailing_zeros() as u8;
1619        // Remove 2 bits per depth difference + 1 sentinel bit + 1 flag bit
1620        let mask = 0xFFFFFFFFFFFFFFFC_u64 << twice_delta_depth;
1621        let min = raw_value & mask;
1622        self.curr_val_max = min | ((!mask) >> 1);
1623        self.curr_val.replace(min)*/
1624      }
1625    }
1626  }
1627}
1628
1629impl<'a> Iterator for BMOCFlatIter<'a> {
1630  type Item = u64;
1631
1632  fn next(&mut self) -> Option<u64> {
1633    if let Some(val) = self.curr_val {
1634      self.n_returned += 1;
1635      if val < self.curr_val_max {
1636        self.curr_val.replace(val + 1)
1637      } else {
1638        self.next_cell()
1639      }
1640    } else {
1641      None
1642    }
1643  }
1644
1645  fn size_hint(&self) -> (usize, Option<usize>) {
1646    let n = self.deep_size - self.n_returned;
1647    (n, Some(n))
1648  }
1649}
1650
1651pub struct BMOCFlatIterCell<'a> {
1652  depth_max: u8,
1653  deep_size: usize,
1654  raw_val_iter: Iter<'a, u64>,
1655
1656  //curr_raw_val: u64,
1657  //curr_flag: bool,
1658  curr_val: Option<Cell>,
1659  curr_val_max: u64,
1660
1661  n_returned: usize,
1662}
1663
1664impl<'a> BMOCFlatIterCell<'a> {
1665  fn new(depth_max: u8, deep_size: usize, raw_val_iter: Iter<'a, u64>) -> BMOCFlatIterCell<'a> {
1666    let mut flat_iter = BMOCFlatIterCell {
1667      depth_max,
1668      deep_size,
1669      raw_val_iter,
1670      curr_val: None,
1671      curr_val_max: 0_u64,
1672      n_returned: 0_usize,
1673    };
1674    flat_iter.next_cell();
1675    flat_iter
1676  }
1677
1678  pub fn deep_size(&self) -> usize {
1679    self.deep_size
1680  }
1681
1682  pub fn depth(&self) -> u8 {
1683    self.depth_max
1684  }
1685
1686  fn next_cell(&mut self) -> Option<Cell> {
1687    match self.raw_val_iter.next() {
1688      None => self.curr_val.take(),
1689      Some(&raw_value) => {
1690        // Remove the flag bit, then divide by 2 (2 bits per level)
1691        let delta_depth = ((raw_value >> 1).trailing_zeros() >> 1) as u8;
1692        let twice_delta_depth = delta_depth << 1;
1693        // Remove 2 bits per depth difference + 1 sentinel bit + 1 flag bit
1694        let hash = raw_value >> (2 + twice_delta_depth);
1695        let val = hash << twice_delta_depth;
1696        self.curr_val_max = val | ((1_u64 << twice_delta_depth) - 1_u64);
1697        self.curr_val.replace(Cell {
1698          raw_value,
1699          depth: self.depth_max,
1700          hash: val,
1701          is_full: (raw_value & 1_u64) == 1_u64,
1702        })
1703      }
1704    }
1705  }
1706}
1707
1708impl<'a> Iterator for BMOCFlatIterCell<'a> {
1709  type Item = Cell;
1710
1711  fn next(&mut self) -> Option<Cell> {
1712    if let Some(cell) = &self.curr_val {
1713      self.n_returned += 1;
1714      if cell.hash < self.curr_val_max {
1715        let new_cell = Cell {
1716          raw_value: cell.raw_value,
1717          depth: self.depth_max,
1718          hash: cell.hash + 1,
1719          is_full: cell.is_full,
1720        };
1721        self.curr_val.replace(new_cell)
1722      } else {
1723        self.next_cell()
1724      }
1725    } else {
1726      None
1727    }
1728  }
1729
1730  fn size_hint(&self) -> (usize, Option<usize>) {
1731    let n = self.deep_size - self.n_returned;
1732    (n, Some(n))
1733  }
1734}
1735
1736pub struct BMOCIter<'a> {
1737  depth_max: u8,
1738  iter: Iter<'a, u64>,
1739}
1740
1741impl<'a> Iterator for BMOCIter<'a> {
1742  type Item = Cell;
1743
1744  fn next(&mut self) -> Option<Cell> {
1745    match self.iter.next() {
1746      None => None,
1747      Some(&raw_value) => Some(Cell::new(raw_value, self.depth_max)),
1748    }
1749  }
1750
1751  fn size_hint(&self) -> (usize, Option<usize>) {
1752    self.iter.size_hint()
1753  }
1754}
1755
1756impl<'a> IntoIterator for &'a BMOC {
1757  type Item = Cell;
1758  type IntoIter = BMOCIter<'a>;
1759
1760  fn into_iter(self) -> Self::IntoIter {
1761    BMOCIter {
1762      depth_max: self.depth_max,
1763      iter: self.entries.iter(),
1764    }
1765  }
1766}
1767
1768/// Create a BMOC raw value coding the depth, the hash and a flag in a way such that
1769/// the natural ordering follow a z-order curve.
1770///
1771/// # Inputs
1772/// - `depth`: depth of the hash value
1773/// - `hash`: hash value
1774/// - `is_full`: must be `false` (not full) or `true` (full)
1775/// - `depth_max`: the depth of the BMOC (we can use 29 for a unique raw value, but it will work
1776///   only with languages supporting unsigned 64 bit integers)
1777///
1778/// # Outputs
1779/// - the value coded like this:
1780///   - BBBBxx...xxS00...00F if depth < depth_max
1781///   - BBBBxx...xxxx...xxSF if depth = depht_max
1782///   - with in bith cases:
1783///     -  B: the 4 bits coding the base hash [0- 11]
1784///     - xx: the 2 bits of level x
1785///     -  S: the sentinel bit coding the depth
1786///     - 00: if (depth != depht_max) those bits are unused bits
1787///     -  F: the flag bit (0: partial, 1: full)
1788#[inline]
1789fn build_raw_value(depth: u8, hash: u64, is_full: bool, depth_max: u8) -> u64 {
1790  // Set the sentinel bit
1791  let mut hash = (hash << 1) | 1_u64;
1792  // Shift according to the depth and add space for the flag bit
1793  hash <<= 1 + ((depth_max - depth) << 1);
1794  // Set the flag bit if needed
1795  hash | (is_full as u64) // see https://doc.rust-lang.org/std/primitive.bool.html
1796}
1797
1798/// Fill with all cells from `start_hash` at `start_depth` to `start_hash_at_target_depth + 1`.
1799/// with `target_depth` = `start_depth - delta_depth`.
1800/// - `flag`: value of the is_full flag to be set in cells while going up
1801///
1802/// The output depth is the input depth minus delta_depth
1803/// The output hash value is the input hash at the output depth, plus one
1804fn go_up(
1805  start_depth: &mut u8,
1806  start_hash: &mut u64,
1807  delta_depth: u8,
1808  flag: bool,
1809  builder: &mut BMOCBuilderUnsafe,
1810) {
1811  // let output_depth = *start_depth - delta_depth;       // For debug only
1812  // let output_hash = (*start_hash >> (delta_depth << 1)) + 1; // For debug only
1813  for _ in 0_u8..delta_depth {
1814    let target_hash = *start_hash | 3_u64;
1815    for h in (*start_hash + 1)..=target_hash {
1816      builder.push(*start_depth, h, flag);
1817    }
1818    *start_hash >>= 2;
1819    *start_depth -= 1;
1820  }
1821  *start_hash += 1;
1822  // debug_assert_eq!(*start_depth, output_depth);
1823  // debug_assert_eq!(*start_hash, output_hash);
1824}
1825
1826fn go_down(
1827  start_depth: &mut u8,
1828  start_hash: &mut u64,
1829  target_depth: u8,
1830  target_hash: u64,
1831  flag: bool,
1832  builder: &mut BMOCBuilderUnsafe,
1833) {
1834  debug_assert!(target_depth >= *start_depth);
1835  let mut twice_dd = (target_depth - *start_depth) << 1;
1836  for d in *start_depth..=target_depth {
1837    //range(0, target_depth - start_depth).rev() {
1838    let target_h_at_d = target_hash >> twice_dd;
1839    for h in *start_hash..target_h_at_d {
1840      builder.push(d, h, flag);
1841    }
1842    if d != target_depth {
1843      *start_hash = target_h_at_d << 2;
1844      twice_dd -= 2;
1845    }
1846  }
1847  *start_depth = target_depth;
1848  *start_hash = target_hash;
1849}
1850
1851pub struct CompressedMOCBuilder {
1852  moc: Vec<u8>,
1853  depth_max: u8,
1854  ibyte: usize,
1855  ibit: u8,
1856}
1857
1858impl CompressedMOCBuilder {
1859  /// Capacity = number of bytes.
1860  fn new(depth_max: u8, capacity: usize) -> CompressedMOCBuilder {
1861    let mut moc = vec![0_u8; capacity + 1];
1862    moc[0] = depth_max;
1863    CompressedMOCBuilder {
1864      moc,
1865      depth_max,
1866      ibyte: 1,
1867      ibit: 0,
1868    }
1869  }
1870
1871  #[allow(clippy::wrong_self_convention)]
1872  fn to_compressed_moc(mut self) -> CompressedMOC {
1873    self.moc.resize(
1874      if self.ibit == 0 {
1875        self.ibyte
1876      } else {
1877        self.ibyte + 1
1878      },
1879      0,
1880    );
1881    CompressedMOC {
1882      moc: self.moc.into_boxed_slice(),
1883      depth_max: self.depth_max,
1884    }
1885  }
1886
1887  fn push_0(&mut self) {
1888    self.ibyte += (self.ibit == 7) as usize;
1889    self.ibit += 1;
1890    self.ibit &= 7;
1891  }
1892  fn push_1(&mut self) {
1893    self.moc[self.ibyte] |= 1_u8 << self.ibit;
1894    self.push_0();
1895  }
1896  fn push_node_empty(&mut self) {
1897    self.push_1();
1898    self.push_0();
1899  }
1900  fn push_node_full(&mut self) {
1901    self.push_1();
1902    self.push_1();
1903  }
1904  fn push_node_partial(&mut self) {
1905    self.push_0();
1906  }
1907  fn push_leaf_empty(&mut self) {
1908    self.push_0();
1909  }
1910  fn push_leaf_full(&mut self) {
1911    self.push_1();
1912  }
1913}
1914
1915pub struct CompressedMOCDecompHelper<'a> {
1916  moc: &'a [u8],
1917  ibyte: usize,
1918  ibit: u8,
1919}
1920
1921impl<'a> CompressedMOCDecompHelper<'a> {
1922  fn new(moc: &'a [u8]) -> CompressedMOCDecompHelper<'a> {
1923    CompressedMOCDecompHelper {
1924      moc,
1925      ibyte: 1,
1926      ibit: 0,
1927    }
1928  }
1929
1930  fn get(&mut self) -> bool {
1931    let r = self.moc[self.ibyte] & (1_u8 << self.ibit) != 0;
1932    self.ibyte += (self.ibit == 7) as usize;
1933    self.ibit += 1;
1934    self.ibit &= 7;
1935    r
1936  }
1937}
1938
1939/// First elements contains the maximum depth
1940pub struct CompressedMOC {
1941  moc: Box<[u8]>,
1942  depth_max: u8,
1943}
1944
1945impl CompressedMOC {
1946  pub fn depth(&self) -> u8 {
1947    self.depth_max
1948  }
1949
1950  pub fn byte_size(&self) -> usize {
1951    self.moc.len()
1952  }
1953
1954  pub fn to_b64(&self) -> String {
1955    STANDARD.encode(&self.moc)
1956  }
1957
1958  pub fn from_b64(b64_encoded: String) -> Result<CompressedMOC, DecodeError> {
1959    let decoded = STANDARD.decode(b64_encoded)?;
1960    let depth_max = decoded[0];
1961    Ok(CompressedMOC {
1962      moc: decoded.into_boxed_slice(),
1963      depth_max,
1964    })
1965  }
1966
1967  pub fn self_decompress(&self) -> BMOC {
1968    CompressedMOC::decompress(&self.moc)
1969  }
1970
1971  // TODO: create an iterator (to iterate on cells while decompressing)
1972  pub fn decompress(cmoc: &[u8]) -> BMOC {
1973    let depth_max = cmoc[0];
1974    let mut moc_builder = BMOCBuilderUnsafe::new(depth_max, 8 * (cmoc.len() - 1));
1975    let mut bits = CompressedMOCDecompHelper::new(cmoc);
1976    let mut depth = 0_u8;
1977    let mut hash = 0_u64;
1978    while depth != 0 || hash != 12 {
1979      if bits.get() {
1980        // bit = 1
1981        if depth == depth_max || bits.get() {
1982          moc_builder.push(depth, hash, true);
1983        }
1984        // go up if needed
1985        while hash & 3 == 3 && depth > 0 {
1986          hash >>= 2;
1987          depth -= 1;
1988        }
1989        // take next hash
1990        hash += 1;
1991      } else {
1992        // bit = 0
1993        if depth == depth_max {
1994          // go up if needed
1995          while hash & 3 == 3 && depth > 0 {
1996            hash >>= 2;
1997            depth -= 1;
1998          }
1999          // take next hash
2000          hash += 1;
2001        } else {
2002          debug_assert!(depth < depth_max);
2003          // go down of 1 level
2004          hash <<= 2;
2005          depth += 1;
2006        }
2007      }
2008    }
2009    moc_builder.to_bmoc()
2010  }
2011}
2012
2013#[cfg(test)]
2014mod tests {
2015  use super::*;
2016  use crate::nested::{cone_coverage_approx_custom, polygon_coverage};
2017
2018  fn build_compressed_moc_empty(depth: u8) -> CompressedMOC {
2019    let mut builder = BMOCBuilderFixedDepth::new(depth, true);
2020    builder.to_bmoc().unwrap().compress_lossy()
2021  }
2022
2023  fn build_compressed_moc_full(depth: u8) -> CompressedMOC {
2024    let mut builder = BMOCBuilderFixedDepth::new(depth, true);
2025    for icell in 0..12 * (1 << (depth << 1)) {
2026      builder.push(icell)
2027    }
2028    let bmoc = builder.to_bmoc().unwrap();
2029    eprintln!("Entries: {}", bmoc.entries.len());
2030    bmoc.compress_lossy()
2031  }
2032
2033  fn to_aladin_moc(bmoc: &BMOC) {
2034    print!("draw moc {}/", bmoc.get_depth_max());
2035    for cell in bmoc.flat_iter() {
2036      print!("{}, ", cell);
2037    }
2038  }
2039
2040  #[test]
2041  fn testok_compressed_moc_empty_d0() {
2042    let compressed = build_compressed_moc_empty(0);
2043    assert_eq!(compressed.byte_size(), 1 + 2);
2044    assert_eq!(compressed.moc, vec![0_u8, 0_u8, 0_u8].into_boxed_slice());
2045    let b64 = compressed.to_b64();
2046    assert_eq!(b64, "AAAA");
2047    assert_eq!(
2048      CompressedMOC::decompress(&compressed.moc)
2049        .compress_lossy()
2050        .to_b64(),
2051      b64
2052    );
2053  }
2054
2055  #[test]
2056  fn testok_compressed_moc_empty_d1() {
2057    let compressed = build_compressed_moc_empty(1);
2058    assert_eq!(compressed.byte_size(), 1 + 24 / 8);
2059    assert_eq!(
2060      compressed.moc,
2061      vec![1_u8, 85_u8, 85_u8, 85_u8].into_boxed_slice()
2062    );
2063    let b64 = compressed.to_b64();
2064    assert_eq!(b64, "AVVVVQ==");
2065    assert_eq!(
2066      CompressedMOC::decompress(&compressed.moc)
2067        .compress_lossy()
2068        .to_b64(),
2069      b64
2070    );
2071  }
2072
2073  #[test]
2074  fn testok_compressed_moc_full_d0() {
2075    let compressed = build_compressed_moc_full(0);
2076    assert_eq!(compressed.byte_size(), 1 + 2);
2077    assert_eq!(compressed.moc, vec![0_u8, 255_u8, 15_u8].into_boxed_slice());
2078    // eprintln!("{}", compressed.to_b64());
2079    let b64 = compressed.to_b64();
2080    assert_eq!(b64, "AP8P");
2081    assert_eq!(
2082      CompressedMOC::decompress(&compressed.moc)
2083        .compress_lossy()
2084        .to_b64(),
2085      b64
2086    );
2087  }
2088
2089  #[test]
2090  fn testok_compressed_moc_full_d1() {
2091    let compressed = build_compressed_moc_full(1);
2092    assert_eq!(compressed.byte_size(), 1 + 24 / 8);
2093    eprintln!("{:?}", compressed.moc);
2094    eprintln!("{}", compressed.to_b64());
2095    let b64 = compressed.to_b64();
2096    assert_eq!(b64, "Af///w==");
2097    assert_eq!(
2098      CompressedMOC::decompress(&compressed.moc)
2099        .compress_lossy()
2100        .to_b64(),
2101      b64
2102    );
2103  }
2104
2105  #[test]
2106  fn test_ok_allsky_and_empty_bmoc() {
2107    let bmoc_allsky = BMOC::new_allsky(18);
2108    assert_eq!(bmoc_allsky.entries.len(), 12);
2109    let bmoc_empty = BMOC::new_empty(18);
2110    assert_eq!(bmoc_empty.entries.len(), 0);
2111    assert_eq!(bmoc_allsky.not(), bmoc_empty);
2112    assert_eq!(bmoc_allsky, bmoc_empty.not());
2113  }
2114
2115  #[test]
2116  fn test_ok_bmoc_not_round_trip() {
2117    let mut poly_vertices_deg: [f64; 14] = [
2118      272.511081, -19.487278, 272.515300, -19.486595, 272.517029, -19.471442, 272.511714,
2119      -19.458837, 272.506430, -19.459001, 272.496401, -19.474322, 272.504821, -19.484924,
2120    ];
2121    for v in poly_vertices_deg.iter_mut() {
2122      *v = (*v).to_radians();
2123    }
2124    let vertices: Vec<(f64, f64)> = poly_vertices_deg
2125      .iter()
2126      .copied()
2127      .step_by(2)
2128      .zip(poly_vertices_deg.iter().copied().skip(1).step_by(2))
2129      .collect();
2130    let moc_org = polygon_coverage(14, vertices.as_slice(), true);
2131    let moc_not = moc_org.not();
2132    let moc_out = moc_not.not();
2133    println!("len: {}", moc_org.size());
2134    println!("len: {}", moc_not.size());
2135    println!("len: {}", moc_out.size());
2136    assert_eq!(moc_org, moc_out);
2137  }
2138
2139  #[test]
2140  fn test_ok_bmoc_union_and_not() {
2141    let mut poly1_vertices_deg: [f64; 14] = [
2142      272.511081, -19.487278, 272.515300, -19.486595, 272.517029, -19.471442, 272.511714,
2143      -19.458837, 272.506430, -19.459001, 272.496401, -19.474322, 272.504821, -19.484924,
2144    ];
2145    for v in poly1_vertices_deg.iter_mut() {
2146      *v = (*v).to_radians();
2147    }
2148    let mut poly2_vertices_deg: [f64; 8] = [
2149      272.630446, -19.234210, 272.637274, -19.248542, 272.638942, -19.231476, 272.630868,
2150      -19.226364,
2151    ];
2152    for v in poly2_vertices_deg.iter_mut() {
2153      *v = (*v).to_radians();
2154    }
2155    let v1: Vec<(f64, f64)> = poly1_vertices_deg
2156      .iter()
2157      .copied()
2158      .step_by(2)
2159      .zip(poly1_vertices_deg.iter().copied().skip(1).step_by(2))
2160      .collect();
2161    let v2: Vec<(f64, f64)> = poly2_vertices_deg
2162      .iter()
2163      .copied()
2164      .step_by(2)
2165      .zip(poly2_vertices_deg.iter().copied().skip(1).step_by(2))
2166      .collect();
2167    let moc1 = polygon_coverage(14, v1.as_slice(), true);
2168    let moc2 = polygon_coverage(14, v2.as_slice(), true);
2169    let not_moc1 = moc1.not();
2170    let not_moc2 = moc2.not();
2171    let union = moc1.or(&moc2);
2172    let not_inter = not_moc1.and(&not_moc2);
2173    let union_bis = not_inter.not();
2174    //to_aladin_moc(&moc1);
2175    //println!("\n");
2176    //to_aladin_moc(&moc2);
2177    //println!("\n");
2178    // to_aladin_moc(&union);
2179    //println!("\n");
2180    // to_aladin_moc(&union_bis);
2181    //println!("\n");
2182    assert_eq!(union, union_bis);
2183  }
2184
2185  #[test]
2186  fn test_ok_bmoc_xor_minus_coherency() {
2187    // No overlapping parts, so we do no test thoroughly XOR and MINUS!!
2188    let mut poly1_vertices_deg: [f64; 14] = [
2189      272.511081, -19.487278, 272.515300, -19.486595, 272.517029, -19.471442, 272.511714,
2190      -19.458837, 272.506430, -19.459001, 272.496401, -19.474322, 272.504821, -19.484924,
2191    ];
2192    for v in poly1_vertices_deg.iter_mut() {
2193      *v = (*v).to_radians();
2194    }
2195    let mut poly2_vertices_deg: [f64; 8] = [
2196      272.630446, -19.234210, 272.637274, -19.248542, 272.638942, -19.231476, 272.630868,
2197      -19.226364,
2198    ];
2199    for v in poly2_vertices_deg.iter_mut() {
2200      *v = (*v).to_radians();
2201    }
2202    let mut poly3_vertices_deg: [f64; 178] = [
2203      272.536719, -19.461249, 272.542612, -19.476380, 272.537389, -19.491509, 272.540192,
2204      -19.499823, 272.535455, -19.505218, 272.528024, -19.505216, 272.523437, -19.500298,
2205      272.514082, -19.503376, 272.502271, -19.500966, 272.488647, -19.490390, 272.481932,
2206      -19.490913, 272.476737, -19.486589, 272.487633, -19.455645, 272.500386, -19.444996,
2207      272.503003, -19.437557, 272.512303, -19.432436, 272.514132, -19.423973, 272.522103,
2208      -19.421523, 272.524511, -19.413250, 272.541021, -19.400024, 272.566264, -19.397500,
2209      272.564202, -19.389111, 272.569055, -19.383210, 272.588186, -19.386539, 272.593376,
2210      -19.381832, 272.596327, -19.370541, 272.624911, -19.358915, 272.629256, -19.347842,
2211      272.642277, -19.341020, 272.651322, -19.330424, 272.653174, -19.325079, 272.648903,
2212      -19.313708, 272.639616, -19.311098, 272.638128, -19.303083, 272.632705, -19.299839,
2213      272.627971, -19.289408, 272.628226, -19.276293, 272.633750, -19.270590, 272.615109,
2214      -19.241810, 272.614704, -19.221196, 272.618224, -19.215572, 272.630809, -19.209945,
2215      272.633540, -19.198681, 272.640711, -19.195292, 272.643028, -19.186751, 272.651477,
2216      -19.182729, 272.649821, -19.174859, 272.656782, -19.169272, 272.658933, -19.161883,
2217      272.678012, -19.159481, 272.689173, -19.176982, 272.689395, -19.183512, 272.678006,
2218      -19.204016, 272.671112, -19.206598, 272.664854, -19.203523, 272.662760, -19.211156,
2219      272.654435, -19.214434, 272.652969, -19.222085, 272.656724, -19.242136, 272.650071,
2220      -19.265092, 272.652868, -19.274296, 272.660871, -19.249462, 272.670041, -19.247807,
2221      272.675533, -19.254935, 272.673291, -19.273917, 272.668710, -19.279245, 272.671460,
2222      -19.287043, 272.667507, -19.293933, 272.669261, -19.300601, 272.663969, -19.307130,
2223      272.672626, -19.308954, 272.675225, -19.316490, 272.657188, -19.349105, 272.657638,
2224      -19.367455, 272.662447, -19.372035, 272.662232, -19.378566, 272.652479, -19.386871,
2225      272.645819, -19.387933, 272.642279, -19.398277, 272.629282, -19.402739, 272.621487,
2226      -19.398197, 272.611782, -19.405716, 272.603367, -19.404667, 272.586162, -19.422703,
2227      272.561792, -19.420008, 272.555815, -19.413012, 272.546500, -19.415611, 272.537427,
2228      -19.424213, 272.533081, -19.441402,
2229    ];
2230    for v in poly3_vertices_deg.iter_mut() {
2231      *v = (*v).to_radians();
2232    }
2233    let v1: Vec<(f64, f64)> = poly1_vertices_deg
2234      .iter()
2235      .copied()
2236      .step_by(2)
2237      .zip(poly1_vertices_deg.iter().copied().skip(1).step_by(2))
2238      .collect();
2239    let v2: Vec<(f64, f64)> = poly2_vertices_deg
2240      .iter()
2241      .copied()
2242      .step_by(2)
2243      .zip(poly2_vertices_deg.iter().copied().skip(1).step_by(2))
2244      .collect();
2245    let v3: Vec<(f64, f64)> = poly3_vertices_deg
2246      .iter()
2247      .copied()
2248      .step_by(2)
2249      .zip(poly3_vertices_deg.iter().copied().skip(1).step_by(2))
2250      .collect();
2251    let moc1 = polygon_coverage(18, v1.as_slice(), true);
2252    let moc2 = polygon_coverage(18, v2.as_slice(), true);
2253    let moc3 = polygon_coverage(18, v3.as_slice(), true);
2254
2255    let union12 = moc1.or(&moc2);
2256    let res1 = moc3.xor(&union12);
2257    let res2 = moc3.minus(&union12);
2258    let res3 = moc3.and(&union12.not());
2259
2260    assert_eq!(res1, res2);
2261    assert_eq!(res1, res3);
2262  }
2263
2264  #[test]
2265  fn test_ok_bmoc_not() {
2266    let lon = 13.158329_f64.to_radians();
2267    let lat = -72.80028_f64.to_radians();
2268    let radius = 5.64323_f64.to_radians();
2269    let depth = 6;
2270    let delta_depth = 5;
2271
2272    let bmoc = cone_coverage_approx_custom(depth, delta_depth, lon, lat, radius);
2273
2274    let not_bmoc = bmoc.not();
2275
2276    println!("BMOC");
2277    for (range, flag) in bmoc.to_flagged_ranges() {
2278      println!("flag: {}; range: {:?}", flag, range);
2279    }
2280    println!("NOT BMOC");
2281    for (range, flag) in not_bmoc.to_flagged_ranges() {
2282      println!("flag: {}; range: {:?}", flag, range);
2283    }
2284    // Asssert that the first range has the flag 'true'.
2285    assert!(not_bmoc.to_flagged_ranges().first().unwrap().1)
2286  }
2287}