use base64::{encode, decode, DecodeError};
use std::vec::IntoIter;
use std::slice::Iter;
use std::cmp::{max, Ordering};
use super::{to_range, hash};
use super::super::{nside_square_unsafe};
#[derive(Debug)]
pub struct BMOCBuilderUnsafe { depth_max: u8,
entries: Option<Vec<u64>>,
}
impl BMOCBuilderUnsafe {
pub fn new(depth_max: u8, capacity: usize) -> BMOCBuilderUnsafe {
BMOCBuilderUnsafe {
depth_max,
entries: Some(Vec::with_capacity(capacity)),
}
}
pub fn push(&mut self, depth: u8, hash: u64, is_full: bool) -> &mut BMOCBuilderUnsafe {
if let Some(ref mut v) = self.entries {
v.push(build_raw_value(depth, hash, is_full, self.depth_max));
} else {
panic!("Empty builder, you have to re-init it before re-using it!");
}
self
}
fn push_raw_unsafe(&mut self, raw_value: u64) -> &mut BMOCBuilderUnsafe {
if let Some(ref mut v) = self.entries {
v.push(raw_value);
} else {
panic!("Empty builder, you have to re-init it before re-using it!");
}
self
}
pub fn push_all(&mut self, depth: u8, from_hash: u64, to_hash: u64, are_full: bool) -> &mut BMOCBuilderUnsafe {
if let Some(ref mut v) = self.entries {
for h in from_hash..to_hash {
v.push(build_raw_value(depth, h, are_full, self.depth_max));
}
} else {
panic!("Empty builder, you have to re-init it before re-using it!");
}
self
}
#[allow(clippy::wrong_self_convention)]
pub fn to_bmoc(mut self) -> BMOC {
BMOC::create_unsafe(self.depth_max, self.entries.take().expect("Empty builder!").into_boxed_slice())
}
#[allow(clippy::wrong_self_convention)]
pub fn to_bmoc_from_unordered(mut self) -> BMOC {
let mut res = self.entries.take().expect("Empty builder!");
res.sort_unstable();
BMOC::create_unsafe(self.depth_max, res.into_boxed_slice())
}
fn pack(&mut self) -> Vec<u64> {
let mut entries = self.entries.take().expect("Empty builder!");
let mut prev_to_index = 0_usize;
let mut curr_to_index = entries.len();
while prev_to_index != curr_to_index { prev_to_index = curr_to_index;
let mut i_prev_moc = 0_usize;
let mut i_curr_moc = 0_usize;
while i_prev_moc < prev_to_index {
let mut curr_cell = entries[i_prev_moc];
i_prev_moc += 1;
let mut curr_cell_depth = get_depth(curr_cell, self.depth_max);
let mut curr_cell_hash = get_hash_from_delta_depth(curr_cell, self.depth_max - curr_cell_depth);
while i_prev_moc < prev_to_index &&
(curr_cell_depth == 0 || is_partial(curr_cell) || is_not_first_cell_of_larger_cell(curr_cell_hash)) {
if i_curr_moc != i_prev_moc {
entries[i_curr_moc] = curr_cell;
i_curr_moc += 1;
}
curr_cell = entries[i_prev_moc];
i_prev_moc += 1;
curr_cell_depth = get_depth(curr_cell, self.depth_max);
curr_cell_hash = get_hash_from_delta_depth(curr_cell, self.depth_max - curr_cell_depth);
}
if i_prev_moc + 2 < prev_to_index
&& entries[i_prev_moc] == build_raw_value(curr_cell_depth, curr_cell_hash | 1, true, self.depth_max)
&& entries[i_prev_moc + 1] == build_raw_value(curr_cell_depth, curr_cell_hash | 2, true, self.depth_max)
&& entries[i_prev_moc + 2] == build_raw_value(curr_cell_depth, curr_cell_hash | 3, true, self.depth_max) {
entries[i_curr_moc] = build_raw_value(curr_cell_depth - 1, curr_cell_hash >> 2, true, self.depth_max);
i_curr_moc += 1;
i_prev_moc += 3;
} else if i_curr_moc != i_prev_moc {
entries[i_curr_moc] = curr_cell;
i_curr_moc += 1;
}
}
curr_to_index = i_curr_moc;
}
entries.truncate(curr_to_index);
entries
}
fn low_depth_raw_val_at_lower_depth(&self, raw_value: u64, new_depth: u8) -> u64 {
debug_assert!(self.get_depth(raw_value) <= new_depth);
debug_assert!(new_depth <= self.depth_max);
let twice_delta_depth = (self.depth_max - new_depth) << 1;
(raw_value >> twice_delta_depth) | (raw_value & 1_u64)
}
fn to_lower_depth(&self, new_depth: u8, mut entries: Vec<u64>) -> Vec<u64> {
if new_depth >= self.depth_max {
panic!("The given depth must be lower than the depth max of the BMOC");
}
let mut i_new = 0_usize;
let mut prev_hash_at_new_depth = loop {
if i_new == entries.len() {
break None;
}
let raw_value = entries[i_new];
let depth = self.get_depth(raw_value);
if depth <= new_depth {
entries[i_new] = self.low_depth_raw_val_at_lower_depth(raw_value, new_depth);
i_new += 1;
} else {
break Some(get_hash_from_delta_depth(raw_value, self.depth_max - new_depth));
}
};
for i in (i_new + 1)..entries.len() {
let raw_value = entries[i];
let depth = self.get_depth(raw_value);
if depth <= new_depth {
if prev_hash_at_new_depth.is_some() {
entries[i_new] = (prev_hash_at_new_depth.take().unwrap() << 2) | 2_u64;
i_new += 1;
}
entries[i_new] = self.low_depth_raw_val_at_lower_depth(raw_value, new_depth);
i_new += 1;
} else {
let curr_hash_at_new_depth = get_hash_from_delta_depth(raw_value, self.depth_max - new_depth);
if let Some(prev_val_at_new_depth) = prev_hash_at_new_depth {
if prev_val_at_new_depth != curr_hash_at_new_depth {
entries[i_new] = (prev_val_at_new_depth << 2) | 2_u64; i_new += 1;
prev_hash_at_new_depth.replace(curr_hash_at_new_depth);
}
} else {
prev_hash_at_new_depth.replace(curr_hash_at_new_depth);
}
}
}
if prev_hash_at_new_depth.is_some() {
entries[i_new] = (prev_hash_at_new_depth.take().unwrap() << 2) | 2_u64;
i_new += 1;
}
entries.truncate(i_new);
entries
}
#[allow(clippy::wrong_self_convention)]
pub fn to_bmoc_packing(&mut self) -> BMOC {
let entries = self.pack();
BMOC::create_unsafe(self.depth_max, entries.into_boxed_slice())
}
#[allow(clippy::wrong_self_convention)]
pub fn to_lower_depth_bmoc(&mut self, new_depth: u8) -> BMOC {
let entries = self.entries.take().expect("Empty builder!");
let entries = self.to_lower_depth(new_depth, entries);
BMOC::create_unsafe(new_depth, entries.into_boxed_slice())
}
#[allow(clippy::wrong_self_convention)]
pub fn to_lower_depth_bmoc_packing(&mut self, new_depth: u8) -> BMOC {
let entries = self.pack();
let entries = self.to_lower_depth(new_depth, entries);
BMOC::create_unsafe(new_depth, entries.into_boxed_slice())
}
#[inline]
fn get_depth(&self, raw_value: u64) -> u8 {
self.get_depth_no_flag(rm_flag(raw_value))
}
#[inline]
fn get_depth_no_flag(&self, raw_value_no_flag: u64) -> u8 {
self.depth_max - (raw_value_no_flag.trailing_zeros() >> 1) as u8
}
}
pub enum Status {
IN,
OUT,
UNKNOWN,
}
pub struct BMOCBuilderFixedDepth {
depth: u8,
bmoc: Option<BMOC>,
is_full: bool,
buffer: Vec<u64>,
sorted: bool,
}
impl BMOCBuilderFixedDepth {
pub fn new(depth: u8, is_full: bool) -> BMOCBuilderFixedDepth {
BMOCBuilderFixedDepth::with_capacity(depth, is_full, 10_000_000)
}
pub fn with_capacity(depth: u8, is_full: bool, buff_capacity: usize) -> BMOCBuilderFixedDepth {
BMOCBuilderFixedDepth {
depth,
bmoc: None,
is_full,
buffer: Vec::with_capacity(buff_capacity),
sorted: true,
}
}
pub fn push(&mut self, hash: u64) {
if let Some(h) = self.buffer.last() {
if *h == hash {
return;
} else if self.sorted && *h > hash {
self.sorted = false;
}
}
self.buffer.push(hash);
if self.buffer.len() == self.buffer.capacity() {
self.drain_buffer();
}
}
#[allow(clippy::wrong_self_convention)]
pub fn to_bmoc(&mut self) -> Option<BMOC> {
self.drain_buffer();
self.bmoc.take()
}
fn drain_buffer(&mut self) {
if !self.sorted {
self.buffer.sort_unstable();
self.buffer.dedup();
}
let new_bmoc = self.buff_to_bmoc();
self.clear_buff();
self.bmoc = Some(
match self.bmoc.take() {
Some(prev_bmoc) => prev_bmoc.or(&new_bmoc),
None => new_bmoc,
}
)
}
fn buff_to_bmoc(&mut self) -> BMOC {
let mut i = 0_usize;
let mut k = 0_usize;
while i < self.buffer.len() {
let h = self.buffer[i];
let sequence_len = self.largest_lower_cell_sequence_len(h, &self.buffer[i..]);
let delta_depth = sequence_len.next_power_of_two();
let delta_depth = if delta_depth > sequence_len {
delta_depth.trailing_zeros() >> 2 } else {
debug_assert_eq!(delta_depth, sequence_len);
delta_depth.trailing_zeros() >> 1 } as u8;
let twice_dd = delta_depth << 1;
let sequence_len = 1_usize << twice_dd;
self.buffer[k] = build_raw_value(self.depth - delta_depth, h >> twice_dd, self.is_full, self.depth);
k += 1;
i += sequence_len;
}
BMOC::create_unsafe_copying(self.depth, &self.buffer[0..k])
}
#[inline]
fn largest_lower_cell_sequence_len(&self, mut h: u64, entries: &[u64]) -> usize {
let dd = ((h.trailing_zeros() >> 1) as u8).min(self.depth); let n = 1_usize << (dd << 1); let n = n.min(entries.len());
for (i, e) in entries.iter().enumerate().take(n).skip(1) {
h += 1;
if *e != h {
return i;
}
}
n
}
fn clear_buff(&mut self) {
self.sorted = true;
self.buffer.clear();
}
}
pub struct BMOC {
depth_max: u8,
pub entries: Box<[u64]>,
}
#[derive(Debug)]
pub struct Cell {
pub raw_value: u64,
pub depth: u8,
pub hash: u64,
pub is_full: bool,
}
impl Cell {
fn new(raw_value: u64, depth_max: u8) -> Cell {
let is_full = (raw_value & 1_u64) == 1_u64;
let delta_depth = ((raw_value >> 1).trailing_zeros() >> 1) as u8;
let hash = raw_value >> (2 + (delta_depth << 1));
let depth = depth_max - delta_depth;
Cell { raw_value, depth, hash, is_full }
}
}
impl BMOC {
pub fn size(&self) -> usize {
self.entries.len()
}
pub(super) fn create_unsafe(depth_max: u8, entries: Box<[u64]>) -> BMOC {
BMOC { depth_max, entries }
}
pub(super) fn create_unsafe_copying(depth_max: u8, entries: &[u64]) -> BMOC {
let mut entries_copy = Vec::with_capacity(entries.len());
for e in entries {
entries_copy.push(*e);
}
BMOC { depth_max, entries: entries_copy.into_boxed_slice() }
}
pub fn get_depth_max(&self) -> u8 {
self.depth_max
}
pub fn equals(&self, other: &BMOC) -> bool {
if self.depth_max == other.depth_max && self.entries.len() == other.entries.len() {
for (r1, r2) in self.iter().zip(other.iter()) {
if r1 != r2 {
return false;
}
}
return true;
}
false
}
pub fn assert_equals(&self, other: &BMOC) {
if self.depth_max == other.depth_max {
for (r1, r2) in self.iter().zip(other.iter()) {
if *r1 != *r2 {
panic!("Left: {:?}; Right: {:?}", self.from_raw_value(*r1), other.from_raw_value(*r2));
}
}
if self.entries.len() != other.entries.len() {
panic!("Lengths are different");
}
} else {
panic!("Depths are different");
}
}
pub fn test_coo(&self, lon: f64, lat: f64) -> Status {
let h_raw = build_raw_value(self.depth_max, hash(self.depth_max, lon, lat), true, self.depth_max);
match self.entries.binary_search(&h_raw) {
Ok(i) =>
if is_partial(self.entries[i]) {
Status::UNKNOWN
} else {
Status::IN
},
Err(i) => {
let cell = Cell::new(h_raw, self.depth_max);
if i > 0 && is_in(&self.from_raw_value(self.entries[i - 1]), &cell) {
if is_partial(self.entries[i - 1]) {
Status::UNKNOWN
} else {
Status::IN
}
} else if i < self.entries.len() && is_in(&self.from_raw_value(self.entries[i]), &cell) {
if is_partial(self.entries[i]) {
Status::UNKNOWN
} else {
Status::IN
}
} else {
Status::OUT
}
},
}
}
pub fn not(&self) -> BMOC {
let mut builder = BMOCBuilderUnsafe::new(self.depth_max, 3 * self.entries.len() + 12);
if self.entries.len() == 0 {
for h in 0..12_u64 {
builder.push(0_u8, h, true);
}
return builder.to_bmoc();
}
let mut d = 0_u8;
let mut h = 0_u64;
let mut cell = self.from_raw_value(self.entries[0]);
go_down(&mut d, &mut h, cell.depth, cell.hash, true, &mut builder);
if !cell.is_full {
builder.push_raw_unsafe(cell.raw_value);
}
for i in 1..self.entries.len() {
cell = self.from_raw_value(self.entries[i]);
let dd = dd_4_go_up(d, h, cell.depth, cell.hash);
go_up(&mut d, &mut h, dd, true, &mut builder);
go_down(&mut d, &mut h, cell.depth, cell.hash, true, &mut builder);
if !cell.is_full {
builder.push_raw_unsafe(cell.raw_value);
}
}
let delta_depth = d;
go_up(&mut d, &mut h, delta_depth, true, &mut builder); for h in h..12 { builder.push(0_u8, h, true);
}
builder.to_bmoc()
}
pub fn and(&self, other: &BMOC) -> BMOC {
let mut builder = BMOCBuilderUnsafe::new(
max(self.depth_max, other.depth_max),
max(self.entries.len(), other.entries.len())
);
let mut it_left = self.into_iter();
let mut it_right = other.into_iter();
let mut left = it_left.next();
let mut right = it_right.next();
while let (Some(l), Some(r)) = (&left, &right) {
match l.depth.cmp(&r.depth) {
Ordering::Less => {
let hr_at_dl = r.hash >> ((r.depth - l.depth) << 1);
match l.hash.cmp(&hr_at_dl) {
Ordering::Less => left = it_left.next(),
Ordering::Greater => right = it_right.next(),
Ordering::Equal => {
debug_assert_eq!(l.hash, hr_at_dl);
builder.push(r.depth, r.hash, r.is_full && l.is_full);
right = it_right.next()
}
}
},
Ordering::Greater => {
let hl_at_dr = l.hash >> ((l.depth - r.depth) << 1);
match hl_at_dr.cmp(&r.hash) {
Ordering::Less => left = it_left.next(),
Ordering::Greater => right = it_right.next(),
Ordering::Equal => {
debug_assert_eq!(hl_at_dr, r.hash);
builder.push(l.depth, l.hash, r.is_full && l.is_full);
left = it_left.next()
}
}
},
Ordering::Equal => {
debug_assert_eq!(l.depth, r.depth);
match l.depth.cmp(&r.depth) {
Ordering::Less => left = it_left.next(),
Ordering::Greater => right = it_right.next(),
Ordering::Equal => {
debug_assert_eq!(l.hash, r.hash);
builder.push(l.depth, l.hash, r.is_full && l.is_full);
left = it_left.next();
right = it_right.next()
}
}
}
}
}
builder.to_bmoc()
}
pub fn or(&self, other: &BMOC) -> BMOC {
let mut builder = BMOCBuilderUnsafe::new(
max(self.depth_max, other.depth_max),
max(self.entries.len(), other.entries.len())
);
let mut it_left = self.into_iter();
let mut it_right = other.into_iter();
let mut left = it_left.next();
let mut right = it_right.next();
while let (Some(l), Some(r)) = (&left, &right) {
match l.depth.cmp(&r.depth) {
Ordering::Less => {
let hr_at_dl = r.hash >> ((r.depth - l.depth) << 1);
if l.hash < hr_at_dl {
builder.push(l.depth, l.hash, l.is_full);
left = it_left.next();
} else if l.hash > hr_at_dl {
builder.push(r.depth, r.hash, r.is_full);
right = it_right.next();
} else if l.is_full {
debug_assert_eq!(l.hash, hr_at_dl);
builder.push(l.depth, l.hash, l.is_full);
right = consume_while_overlapped(l, &mut it_right);
left = it_left.next();
} else {
debug_assert_eq!(l.hash, hr_at_dl);
debug_assert!(!l.is_full);
let mut is_overlapped = false;
right = consume_while_overlapped_and_partial(l, &mut it_right, &mut is_overlapped);
if is_overlapped {
right = self.not_in_cell_4_or(l, right.unwrap(), &mut it_right, &mut builder);
} else { builder.push(l.depth, l.hash, false);
}
left = it_left.next();
}
},
Ordering::Greater => {
let hl_at_dr = l.hash >> ((l.depth - r.depth) << 1);
if hl_at_dr < r.hash {
builder.push(l.depth, l.hash, l.is_full);
left = it_left.next();
} else if hl_at_dr > r.hash {
builder.push(r.depth, r.hash, r.is_full);
right = it_right.next();
} else if r.is_full {
debug_assert_eq!(hl_at_dr, r.hash);
builder.push(r.depth, r.hash, r.is_full);
left = consume_while_overlapped(r, &mut it_left);
right = it_right.next();
} else {
debug_assert_eq!(hl_at_dr, r.hash);
debug_assert!(!r.is_full);
let mut is_overlapped = false;
left = consume_while_overlapped_and_partial(r, &mut it_left, &mut is_overlapped);
if is_overlapped {
left = self.not_in_cell_4_or(r, left.unwrap(), &mut it_left, &mut builder);
} else { builder.push(r.depth, r.hash, false);
}
right = it_right.next();
}
},
Ordering::Equal => {
debug_assert_eq!(l.depth, r.depth);
match l.hash.cmp(&r.hash) {
Ordering::Less => {
builder.push(l.depth, l.hash, l.is_full);
left = it_left.next();
},
Ordering::Greater => {
builder.push(r.depth, r.hash, r.is_full);
right = it_right.next();
},
Ordering::Equal => {
debug_assert_eq!(l.hash, r.hash);
builder.push(l.depth, l.hash, r.is_full || l.is_full);
left = it_left.next();
right = it_right.next();
}
}
}
}
}
while let Some(l) = &left {
debug_assert!(right.is_none());
builder.push(l.depth, l.hash, l.is_full);
left = it_left.next();
}
while let Some(r) = &right {
debug_assert!(left.is_none());
builder.push(r.depth, r.hash, r.is_full);
right = it_right.next();
}
builder.to_bmoc_packing()
}
fn not_in_cell_4_or(&self, low_resolution: &Cell, mut c: Cell, iter: &mut BMOCIter, builder: &mut BMOCBuilderUnsafe) -> Option<Cell> {
let mut d = low_resolution.depth;
let mut h = low_resolution.hash;
debug_assert!(c.is_full);
go_down(&mut d, &mut h, c.depth, c.hash, false, builder);
builder.push(c.depth, c.hash, true);
let mut is_overlapped = false;
let mut cell;
while {
cell = consume_while_overlapped_and_partial(low_resolution, iter, &mut is_overlapped);
is_overlapped
} {
c = cell.unwrap(); let dd = dd_4_go_up(d, h, c.depth, c.hash);
go_up(&mut d, &mut h, dd, false, builder);
go_down(&mut d, &mut h, c.depth, c.hash, false, builder);
builder.push(c.depth, c.hash, true);
}
let dd = d - low_resolution.depth;
go_up(&mut d, &mut h, dd, false, builder);
go_down(&mut d, &mut h, low_resolution.depth, low_resolution.hash + 1, false, builder);
cell
}
pub fn xor(&self, other: &BMOC) -> BMOC {
let mut builder = BMOCBuilderUnsafe::new(
max(self.depth_max, other.depth_max),
max(self.entries.len(), other.entries.len())
);
let mut it_left = self.into_iter();
let mut it_right = other.into_iter();
let mut left = it_left.next();
let mut right = it_right.next();
while let (Some(l), Some(r)) = (&left, &right) {
match l.depth.cmp(&r.depth) {
Ordering::Less => {
let hr_at_dl = r.hash >> ((r.depth - l.depth) << 1);
if l.hash < hr_at_dl {
builder.push(l.depth, l.hash, l.is_full);
left = it_left.next();
} else if l.hash > hr_at_dl {
builder.push(r.depth, r.hash, r.is_full);
right = it_right.next();
} else if l.is_full {
debug_assert_eq!(l.hash, hr_at_dl);
right = self.not_in_cell_4_xor(l, r, &mut it_right, &mut builder);
left = it_left.next();
} else {
debug_assert_eq!(l.hash, hr_at_dl);
debug_assert!(!l.is_full);
builder.push(l.depth, l.hash, l.is_full);
right = consume_while_overlapped(l, &mut it_right);
left = it_left.next();
}
},
Ordering::Greater => {
let hl_at_dr = l.hash >> ((l.depth - r.depth) << 1);
if hl_at_dr < r.hash {
builder.push(l.depth, l.hash, l.is_full);
left = it_left.next();
} else if hl_at_dr > r.hash {
builder.push(r.depth, r.hash, r.is_full);
right = it_right.next();
} else if r.is_full {
debug_assert_eq!(hl_at_dr, r.hash);
left = self.not_in_cell_4_xor(r, l, &mut it_left, &mut builder);
right = it_right.next();
} else {
debug_assert_eq!(hl_at_dr, r.hash);
debug_assert!(!r.is_full);
builder.push(r.depth, r.hash, r.is_full);
left = consume_while_overlapped(r, &mut it_left);
right = it_right.next();
}
},
Ordering::Equal => {
debug_assert_eq!(l.depth, r.depth);
match l.hash.cmp(&r.hash) {
Ordering::Less => {
builder.push(l.depth, l.hash, l.is_full);
left = it_left.next();
},
Ordering::Greater => {
builder.push(r.depth, r.hash, r.is_full);
right = it_right.next();
},
Ordering::Equal => {
debug_assert_eq!(l.hash, r.hash);
let both_fully_covered = r.is_full && l.is_full;
if !both_fully_covered {
builder.push(l.depth, l.hash, both_fully_covered);
}
left = it_left.next();
right = it_right.next();
}
}
}
}
}
while let Some(l) = &left {
debug_assert!(right.is_none());
builder.push(l.depth, l.hash, l.is_full);
left = it_left.next();
}
while let Some(r) = &right {
debug_assert!(left.is_none());
builder.push(r.depth, r.hash, r.is_full);
right = it_right.next();
}
builder.to_bmoc_packing()
}
fn not_in_cell_4_xor(&self, low_resolution: &Cell, c: &Cell, iter: &mut BMOCIter, builder: &mut BMOCBuilderUnsafe) -> Option<Cell> {
let mut d = low_resolution.depth;
let mut h = low_resolution.hash;
go_down(&mut d, &mut h, c.depth, c.hash, true, builder);
if !c.is_full {
builder.push(c.depth, c.hash, false);
}
let mut cell = iter.next();
while let Some(c) = &cell {
if !is_in(low_resolution, c) {
break;
}
let dd = dd_4_go_up(d, h, c.depth, c.hash);
go_up(&mut d, &mut h, dd, true, builder);
go_down(&mut d, &mut h, c.depth, c.hash, true, builder);
if !c.is_full {
builder.push(c.depth, c.hash, false);
}
cell = iter.next()
}
let dd = d - low_resolution.depth;
go_up(&mut d, &mut h, dd, true, builder);
go_down(&mut d, &mut h, low_resolution.depth, low_resolution.hash + 1, true, builder);
cell
}
pub fn from_raw_value(&self, raw_value: u64) -> Cell {
Cell::new(raw_value, self.depth_max)
}
pub fn deep_size(&self) -> usize {
let mut sum = 0_usize;
for &raw_value in self.entries.iter() {
let depth = self.get_depth(raw_value);
sum += nside_square_unsafe(self.depth_max - depth) as usize;
}
sum
}
pub fn iter(&self) -> Iter<u64> {
self.entries.iter()
}
pub fn flat_iter(&self) -> BMOCFlatIter {
BMOCFlatIter::new(self.depth_max, self.deep_size(), self.entries.iter())
}
pub fn into_flat_iter(self) -> BMOCIntoFlatIter {
BMOCIntoFlatIter::new(self.depth_max, self.deep_size(), self.entries.into_vec().into_iter())
}
pub fn flat_iter_cell(&self) -> BMOCFlatIterCell {
BMOCFlatIterCell::new(self.depth_max, self.deep_size(), self.entries.iter())
}
pub fn to_flat_array(&self) -> Box<[u64]> {
let mut res: Vec<u64> = Vec::with_capacity(self.deep_size());
for cell in self.flat_iter() {
res.push(cell);
}
res.into_boxed_slice()
}
fn get_depth(&self, raw_value: u64) -> u8 {
self.get_depth_no_flag(rm_flag(raw_value))
}
fn get_depth_no_flag(&self, raw_value_no_flag: u64) -> u8 {
self.depth_max - (raw_value_no_flag.trailing_zeros() >> 1) as u8
}
fn get_depth_icell(&self, raw_value: u64) -> (u8, u64) {
let delta_depth = ((raw_value >> 1).trailing_zeros() >> 1) as u8;
let hash = raw_value >> (2 + (delta_depth << 1));
let depth = self.depth_max - delta_depth;
(depth, hash)
}
pub fn to_ranges(&self) -> Box<[std::ops::Range<u64>]> {
let mut ranges: Vec<std::ops::Range<u64>> = Vec::with_capacity(self.entries.len());
let mut prev_min = 0_u64;
let mut prev_max = 0_u64;
for cell in self.into_iter() {
if cell.depth < self.depth_max {
let range = to_range(cell.hash, self.depth_max - cell.depth);
if range.start != prev_max {
if prev_min != prev_max { ranges.push(prev_min..prev_max);
}
prev_min = range.start;
}
prev_max = range.end;
} else if cell.hash == prev_max {
prev_max += 1;
} else {
if prev_min != prev_max { ranges.push(prev_min..prev_max);
}
prev_min = cell.hash;
prev_max = cell.hash + 1;
}
}
if prev_min != prev_max { ranges.push(prev_min..prev_max);
}
ranges.into_boxed_slice()
}
pub fn to_flagged_ranges(&self) -> Vec<(std::ops::Range<u64>, bool)> {
let mut ranges: Vec<(std::ops::Range<u64>, bool)> = Vec::with_capacity(self.entries.len());
let mut prev_min = 0_u64;
let mut prev_max = 0_u64;
let mut prev_flag = false;
for cell in self.into_iter() {
if cell.depth < self.depth_max {
let range = to_range(cell.hash, self.depth_max - cell.depth);
if range.start == prev_max && (prev_max == 0 || cell.is_full == prev_flag) {
prev_max = range.end;
} else {
if prev_min != prev_max { ranges.push((prev_min..prev_max, prev_flag));
}
prev_min = range.start;
prev_max = range.end;
prev_flag = cell.is_full;
}
} else if cell.hash == prev_max && cell.is_full == prev_flag {
prev_max += 1;
} else {
if prev_min != prev_max { ranges.push((prev_min..prev_max, prev_flag));
}
prev_min = cell.hash;
prev_max = cell.hash + 1;
prev_flag = cell.is_full;
}
}
if prev_min != prev_max { ranges.push((prev_min..prev_max, prev_flag));
}
ranges.shrink_to_fit();
ranges
}
#[allow(clippy::many_single_char_names)]
pub fn compress_lossy(&self) -> CompressedMOC {
let n = self.entries.len();
let dm = self.depth_max;
let mut b = CompressedMOCBuilder::new(dm, 4 + 3 * n);
if n == 0 { if dm == 0 {
for _ in 0..12 {
b.push_leaf_empty();
}
} else {
for _ in 0..12 {
b.push_node_empty();
}
}
return b.to_compressed_moc();
} else if dm == 0 { let (curr_d, _) = self.get_depth_icell(self.entries[0]);
assert_eq!(curr_d, 0);
let mut h = 0_u64;
for (_, curr_h) in self.entries.iter().map(|e| self.get_depth_icell(*e)) {
for _ in h..curr_h {
b.push_leaf_empty();
}
b.push_leaf_full();
h = curr_h + 1;
}
for _ in h..12 {
b.push_leaf_empty();
}
return b.to_compressed_moc();
}
let mut d;
let mut h = 0;
let (curr_d, curr_h) = self.get_depth_icell(self.entries[0]);
for dd in (0..=curr_d).rev() {
let target_h = curr_h >> (dd << 1);
if curr_d == dm && dd == 0 {
for _ in h..target_h {
b.push_leaf_empty();
}
b.push_leaf_full();
} else {
for _ in h..target_h {
b.push_node_empty();
}
if dd == 0 { b.push_node_full() } else { b.push_node_partial() };
}
h = target_h << 2;
}
d = curr_d;
h = curr_h;
let mut i = 1_usize;
while i < n {
let (curr_d, curr_h) = self.get_depth_icell(self.entries[i]);
let target_h = if d > curr_d { let dd = d - curr_d;
curr_h << (dd << 1)
} else { let dd = curr_d - d;
curr_h >> (dd << 1)
};
let mut dd = ((63 - (h ^ target_h).leading_zeros()) >> 1) as u8;
if dd > d {
dd = d;
}
if dd > 0 && d == dm {
for _ in h & 3..3 { b.push_leaf_empty();
}
h >>= 2;
dd -= 1;
d -= 1;
}
for _ in 0..dd {
for _ in h & 3..3 { b.push_node_empty();
}
h >>= 2;
}
d -= dd;
h += 1;
let dd = curr_d - d;
for rdd in (0..=dd).rev() {
let target_h = curr_h >> (rdd << 1);
if curr_d == dm && rdd == 0 {
for _ in h..target_h {
b.push_leaf_empty();
}
b.push_leaf_full();
} else {
for _ in h..target_h {
b.push_node_empty();
}
if rdd == 0 { b.push_node_full() } else { b.push_node_partial() };
}
h = target_h << 2;
}
d = curr_d;
h = curr_h;
i += 1;
}
if d == dm {
for _ in h & 3..3 {
b.push_leaf_empty();
}
h >>= 2;
d -= 1;
}
for _ in 0..d {
for _ in h & 3..3 {
b.push_node_empty();
}
h >>= 2;
}
if dm == 0 {
for _ in h + 1..12 {
b.push_leaf_empty();
}
} else {
for _ in h + 1..12 {
b.push_node_empty();
}
}
b.to_compressed_moc()
}
}
#[inline]
fn consume_while_overlapped(low_resolution: &Cell, iter: &mut BMOCIter) -> Option<Cell> {
let mut cell = iter.next();
while {
match &cell {
Some(c) => is_in(low_resolution, c),
None => false,
}
} {
cell = iter.next();
}
cell
}
#[inline]
fn consume_while_overlapped_and_partial(low_resolution: &Cell, iter: &mut BMOCIter, res_is_overlapped: &mut bool) -> Option<Cell> {
let mut cell = iter.next();
while {
match &cell {
Some(c) => {
if is_in(low_resolution, c) {
if c.is_full {
*res_is_overlapped = true;
false
} else {
true
}
} else {
false
}
},
None => false,
}
} {
cell = iter.next();
}
cell
}
#[inline]
fn dd_4_go_up(d: u8, h: u64, next_d: u8, next_h: u64) -> u8 {
let target_h_at_d = if next_d < d {
next_h << ((d - next_d) << 1)
} else {
next_h >> ((next_d - d) << 1)
};
let xor = h ^ target_h_at_d;
if xor != 0 {
((63_u8 - (xor.leading_zeros() as u8)) >> 1).min(d)
} else {
0
}
}
#[inline]
fn is_in(low_resolution: &Cell, high_resolution: &Cell) -> bool {
low_resolution.depth <= high_resolution.depth
&& low_resolution.hash == (high_resolution.hash >> ((high_resolution.depth - low_resolution.depth) << 1))
}
#[inline]
fn rm_flag(raw_value: u64) -> u64 {
raw_value >> 1
}
#[inline]
fn is_partial(raw_value: u64) -> bool {
(raw_value & 1_u64) == 0_u64
}
#[inline]
fn is_not_first_cell_of_larger_cell(hash: u64) -> bool {
(hash & 3_u64) != 0_u64
}
#[inline]
fn get_depth(raw_value: u64, depth_max: u8) -> u8 {
get_depth_no_flag(rm_flag(raw_value), depth_max)
}
#[inline]
fn get_depth_no_flag(raw_value_no_flag: u64, depth_max: u8) -> u8 {
depth_max - (raw_value_no_flag.trailing_zeros() >> 1) as u8
}
#[inline]
fn get_hash_from_delta_depth(raw_value: u64, delta_depth: u8) -> u64 {
raw_value >> (2 + (delta_depth << 1))
}
pub struct BMOCIntoFlatIter {
depth_max: u8,
deep_size: usize,
raw_val_iter: IntoIter<u64>,
curr_val: Option<u64>,
curr_val_max: u64,
n_returned: usize,
}
impl BMOCIntoFlatIter {
fn new(depth_max: u8, deep_size: usize, raw_val_iter: IntoIter<u64>) -> BMOCIntoFlatIter {
let mut flat_iter = BMOCIntoFlatIter {
depth_max, deep_size, raw_val_iter,
curr_val: None, curr_val_max: 0_u64, n_returned: 0_usize
};
flat_iter.next_cell();
flat_iter
}
pub fn deep_size(&self) -> usize {
self.deep_size
}
pub fn depth(&self) -> u8 {
self.depth_max
}
fn next_cell(&mut self) -> Option<u64> {
match self.raw_val_iter.next() {
None => self.curr_val.take(),
Some(raw_value) => {
let delta_depth = ((raw_value >> 1).trailing_zeros() >> 1) as u8;
let twice_delta_depth = delta_depth << 1;
let hash = raw_value >> (2 + twice_delta_depth);
let val = hash << twice_delta_depth;
self.curr_val_max = val | ((1_u64 << twice_delta_depth) - 1_u64);
self.curr_val.replace(val)
},
}
}
}
impl Iterator for BMOCIntoFlatIter {
type Item = u64;
fn next(&mut self) -> Option<u64> {
if let Some(val) = self.curr_val {
self.n_returned += 1;
if val < self.curr_val_max {
self.curr_val.replace(val + 1)
} else {
self.next_cell()
}
} else {
None
}
}
fn size_hint(&self) -> (usize, Option<usize>) {
let n = self.deep_size - self.n_returned;
(n, Some(n))
}
}
pub struct BMOCFlatIter<'a> {
depth_max: u8,
deep_size: usize,
raw_val_iter: Iter<'a, u64>,
curr_val: Option<u64>,
curr_val_max: u64,
n_returned: usize,
}
impl<'a> BMOCFlatIter<'a> {
fn new(depth_max: u8, deep_size: usize, raw_val_iter: Iter<'a, u64>) -> BMOCFlatIter<'a> {
let mut flat_iter = BMOCFlatIter {
depth_max, deep_size, raw_val_iter,
curr_val: None, curr_val_max: 0_u64, n_returned: 0_usize
};
flat_iter.next_cell();
flat_iter
}
pub fn deep_size(&self) -> usize {
self.deep_size
}
pub fn depth(&self) -> u8 {
self.depth_max
}
fn next_cell(&mut self) -> Option<u64> {
match self.raw_val_iter.next() {
None => self.curr_val.take(),
Some(&raw_value) => {
let delta_depth = ((raw_value >> 1).trailing_zeros() >> 1) as u8;
let twice_delta_depth = delta_depth << 1;
let hash = raw_value >> (2 + twice_delta_depth);
let val = hash << twice_delta_depth;
self.curr_val_max = val | ((1_u64 << twice_delta_depth) - 1_u64);
self.curr_val.replace(val)
},
}
}
}
impl<'a> Iterator for BMOCFlatIter<'a> {
type Item = u64;
fn next(&mut self) -> Option<u64> {
if let Some(val) = self.curr_val {
self.n_returned += 1;
if val < self.curr_val_max {
self.curr_val.replace(val + 1)
} else {
self.next_cell()
}
} else {
None
}
}
fn size_hint(&self) -> (usize, Option<usize>) {
let n = self.deep_size - self.n_returned;
(n, Some(n))
}
}
pub struct BMOCFlatIterCell<'a> {
depth_max: u8,
deep_size: usize,
raw_val_iter: Iter<'a, u64>,
curr_val: Option<Cell>,
curr_val_max: u64,
n_returned: usize,
}
impl<'a> BMOCFlatIterCell<'a> {
fn new(depth_max: u8, deep_size: usize, raw_val_iter: Iter<'a, u64>) -> BMOCFlatIterCell<'a> {
let mut flat_iter = BMOCFlatIterCell {
depth_max, deep_size, raw_val_iter,
curr_val: None, curr_val_max: 0_u64, n_returned: 0_usize
};
flat_iter.next_cell();
flat_iter
}
pub fn deep_size(&self) -> usize {
self.deep_size
}
pub fn depth(&self) -> u8 {
self.depth_max
}
fn next_cell(&mut self) -> Option<Cell> {
match self.raw_val_iter.next() {
None => self.curr_val.take(),
Some(&raw_value) => {
let delta_depth = ((raw_value >> 1).trailing_zeros() >> 1) as u8;
let twice_delta_depth = delta_depth << 1;
let hash = raw_value >> (2 + twice_delta_depth);
let val = hash << twice_delta_depth;
self.curr_val_max = val | ((1_u64 << twice_delta_depth) - 1_u64);
self.curr_val.replace(Cell {
raw_value,
depth: self.depth_max,
hash: val,
is_full: (raw_value & 1_u64) == 1_u64,
})
},
}
}
}
impl<'a> Iterator for BMOCFlatIterCell<'a> {
type Item = Cell;
fn next(&mut self) -> Option<Cell> {
if let Some(cell) = &self.curr_val {
self.n_returned += 1;
if cell.hash < self.curr_val_max {
let new_cell = Cell {
raw_value: cell.raw_value,
depth: self.depth_max,
hash: cell.hash + 1,
is_full: cell.is_full,
};
self.curr_val.replace(new_cell)
} else {
self.next_cell()
}
} else {
None
}
}
fn size_hint(&self) -> (usize, Option<usize>) {
let n = self.deep_size - self.n_returned;
(n, Some(n))
}
}
pub struct BMOCIter<'a> {
depth_max: u8,
iter: Iter<'a, u64>,
}
impl<'a> Iterator for BMOCIter<'a> {
type Item = Cell;
fn next(&mut self) -> Option<Cell> {
match self.iter.next() {
None => None,
Some(&raw_value) => Some(Cell::new(raw_value, self.depth_max)),
}
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
}
impl<'a> IntoIterator for &'a BMOC {
type Item = Cell;
type IntoIter = BMOCIter<'a>;
fn into_iter(self) -> Self::IntoIter {
BMOCIter { depth_max: self.depth_max, iter: self.entries.iter() }
}
}
#[inline]
fn build_raw_value(depth: u8, hash: u64, is_full: bool, depth_max: u8) -> u64 {
let mut hash = (hash << 1) | 1_u64;
hash <<= 1 + ((depth_max - depth) << 1);
hash | (is_full as u64) }
fn go_up(start_depth: &mut u8, start_hash: &mut u64, delta_depth: u8, flag: bool, builder: &mut BMOCBuilderUnsafe) {
for _ in 0_u8..delta_depth {
let target_hash = *start_hash | 3_u64;
for h in (*start_hash + 1)..=target_hash {
builder.push(*start_depth, h, flag);
}
*start_hash >>= 2;
*start_depth -= 1;
}
*start_hash += 1;
}
fn go_down(start_depth: &mut u8, start_hash: &mut u64,
target_depth: u8, target_hash: u64, flag: bool, builder: &mut BMOCBuilderUnsafe) {
debug_assert!(target_depth >= *start_depth);
let mut twice_dd = (target_depth - *start_depth) << 1;
for d in *start_depth..=target_depth { let target_h_at_d = target_hash >> twice_dd;
for h in *start_hash..target_h_at_d {
builder.push(d, h, flag);
}
if d != target_depth {
*start_hash = target_h_at_d << 2;
twice_dd -= 2;
}
}
*start_depth = target_depth;
*start_hash = target_hash;
}
pub struct CompressedMOCBuilder {
moc: Vec<u8>,
depth_max: u8,
ibyte: usize,
ibit: u8,
}
impl CompressedMOCBuilder {
fn new(depth_max: u8, capacity: usize) -> CompressedMOCBuilder {
let mut moc = vec![0_u8; capacity + 1];
moc[0] = depth_max;
CompressedMOCBuilder {
moc,
depth_max,
ibyte: 1,
ibit: 0,
}
}
#[allow(clippy::wrong_self_convention)]
fn to_compressed_moc(mut self) -> CompressedMOC {
self.moc.resize(if self.ibit == 0 { self.ibyte } else { self.ibyte + 1 } , 0);
CompressedMOC {
moc: self.moc.into_boxed_slice(),
depth_max: self.depth_max,
}
}
fn push_0(&mut self) {
self.ibyte += (self.ibit == 7) as usize;
self.ibit += 1;
self.ibit &= 7;
}
fn push_1(&mut self) {
self.moc[self.ibyte] |= 1_u8 << self.ibit;
self.push_0();
}
fn push_node_empty(&mut self) {
self.push_1();
self.push_0();
}
fn push_node_full(&mut self) {
self.push_1();
self.push_1();
}
fn push_node_partial(&mut self) {
self.push_0();
}
fn push_leaf_empty(&mut self) {
self.push_0();
}
fn push_leaf_full(&mut self) {
self.push_1();
}
}
pub struct CompressedMOCDecompHelper<'a> {
moc: &'a [u8],
ibyte: usize,
ibit: u8,
}
impl<'a> CompressedMOCDecompHelper<'a> {
fn new(moc: &'a [u8]) -> CompressedMOCDecompHelper<'a> {
CompressedMOCDecompHelper {
moc,
ibyte: 1,
ibit: 0,
}
}
fn get(&mut self) -> bool {
let r = self.moc[self.ibyte] & (1_u8 << self.ibit) != 0;
self.ibyte += (self.ibit == 7) as usize;
self.ibit += 1;
self.ibit &= 7;
r
}
}
pub struct CompressedMOC {
moc: Box<[u8]>,
depth_max: u8,
}
impl CompressedMOC {
pub fn depth(&self) -> u8 {
self.depth_max
}
pub fn byte_size(&self) -> usize {
self.moc.len()
}
pub fn to_b64(&self) -> String {
encode(&self.moc)
}
pub fn from_b64(b64_encoded: String) -> Result<CompressedMOC, DecodeError> {
let decoded = decode(&b64_encoded)?;
let depth_max = decoded[0];
Ok(
CompressedMOC {
moc: decoded.into_boxed_slice(),
depth_max,
}
)
}
pub fn self_decompress(&self) -> BMOC {
CompressedMOC::decompress(&self.moc)
}
pub fn decompress(cmoc: &[u8]) -> BMOC {
let depth_max = cmoc[0];
let mut moc_builder = BMOCBuilderUnsafe::new(depth_max, 8 * (cmoc.len() - 1));
let mut bits = CompressedMOCDecompHelper::new(cmoc);
let mut depth = 0_u8;
let mut hash = 0_u64;
while depth != 0 || hash != 12 {
if bits.get() { if depth == depth_max || bits.get() {
moc_builder.push(depth, hash, true);
}
while hash & 3 == 3 && depth > 0 {
hash >>= 2;
depth -= 1;
}
hash += 1;
} else { if depth == depth_max {
while hash & 3 == 3 && depth > 0 {
hash >>= 2;
depth -= 1;
}
hash += 1;
} else {
debug_assert!(depth < depth_max);
hash <<= 2;
depth += 1;
}
}
}
moc_builder.to_bmoc()
}
}
#[cfg(test)]
mod tests {
use super::*;
fn build_compressed_moc_empty(depth: u8) -> CompressedMOC {
let mut builder = BMOCBuilderFixedDepth::new(depth, true);
builder.to_bmoc().unwrap().compress_lossy()
}
fn build_compressed_moc_full(depth: u8) -> CompressedMOC {
let mut builder = BMOCBuilderFixedDepth::new(depth, true);
for icell in 0..12 * (1 << (depth << 1)) {
builder.push(icell)
}
let bmoc = builder.to_bmoc().unwrap();
eprintln!("Entries: {}", bmoc.entries.len());
bmoc.compress_lossy()
}
#[test]
fn testok_compressed_moc_empty_d0() {
let compressed = build_compressed_moc_empty(0);
assert_eq!(compressed.byte_size(), 1 + 2);
assert_eq!(compressed.moc, vec![0_u8, 0_u8, 0_u8].into_boxed_slice());
let b64 = compressed.to_b64();
assert_eq!(b64, "AAAA");
assert_eq!(CompressedMOC::decompress(&compressed.moc).compress_lossy().to_b64(), b64);
}
#[test]
fn testok_compressed_moc_empty_d1() {
let compressed = build_compressed_moc_empty(1);
assert_eq!(compressed.byte_size(), 1 + 24 / 8);
assert_eq!(compressed.moc, vec![1_u8, 85_u8, 85_u8, 85_u8].into_boxed_slice());
let b64 = compressed.to_b64();
assert_eq!(b64, "AVVVVQ==");
assert_eq!(CompressedMOC::decompress(&compressed.moc).compress_lossy().to_b64(), b64);
}
#[test]
fn testok_compressed_moc_full_d0() {
let compressed = build_compressed_moc_full(0);
assert_eq!(compressed.byte_size(), 1 + 2);
assert_eq!(compressed.moc, vec![0_u8, 255_u8, 15_u8].into_boxed_slice());
let b64 = compressed.to_b64();
assert_eq!(b64, "AP8P");
assert_eq!(CompressedMOC::decompress(&compressed.moc).compress_lossy().to_b64(), b64);
}
#[test]
fn testok_compressed_moc_full_d1() {
let compressed = build_compressed_moc_full(1);
assert_eq!(compressed.byte_size(), 1 + 24 / 8);
eprintln!("{:?}", compressed.moc);
eprintln!("{}", compressed.to_b64());
let b64 = compressed.to_b64();
assert_eq!(b64, "Af///w==");
assert_eq!(CompressedMOC::decompress(&compressed.moc).compress_lossy().to_b64(), b64);
}
}