arrow-select 58.1.0

Selection kernels for arrow arrays
Documentation
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements.  See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership.  The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License.  You may obtain a copy of the License at
//
//   http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied.  See the License for the
// specific language governing permissions and limitations
// under the License.

//! Dictionary utilities for Arrow arrays

use std::sync::Arc;

use crate::filter::filter;
use crate::interleave::interleave;
use ahash::RandomState;
use arrow_array::builder::BooleanBufferBuilder;
use arrow_array::types::{
    ArrowDictionaryKeyType, ArrowPrimitiveType, BinaryType, ByteArrayType, LargeBinaryType,
    LargeUtf8Type, Utf8Type,
};
use arrow_array::{
    AnyDictionaryArray, Array, ArrayRef, ArrowNativeTypeOp, BooleanArray, DictionaryArray,
    GenericByteArray, PrimitiveArray, downcast_dictionary_array,
};
use arrow_array::{cast::AsArray, downcast_primitive};
use arrow_buffer::{ArrowNativeType, BooleanBuffer, ScalarBuffer, ToByteSlice};
use arrow_schema::{ArrowError, DataType};

/// Garbage collects a [DictionaryArray] by removing unreferenced values.
///
/// Returns a new [DictionaryArray] such that there are no values
/// that are not referenced by at least one key. There may still be duplicate
/// values.
///
/// See also [`garbage_collect_any_dictionary`] if you need to handle multiple dictionary types
pub fn garbage_collect_dictionary<K: ArrowDictionaryKeyType>(
    dictionary: &DictionaryArray<K>,
) -> Result<DictionaryArray<K>, ArrowError> {
    let keys = dictionary.keys();
    let values = dictionary.values();

    let mask = dictionary.occupancy();

    // If no work to do, return the original dictionary
    if mask.count_set_bits() == values.len() {
        return Ok(dictionary.clone());
    }

    // Create a mapping from the old keys to the new keys, use a Vec for easy indexing
    let mut key_remap = vec![K::Native::ZERO; values.len()];
    for (new_idx, old_idx) in mask.set_indices().enumerate() {
        key_remap[old_idx] = K::Native::from_usize(new_idx)
            .expect("new index should fit in K::Native, as old index was in range");
    }

    // ... and then build the new keys array
    let new_keys = keys.unary(|key| {
        key_remap
            .get(key.as_usize())
            .copied()
            // nulls may be present in the keys, and they will have arbitrary value; we don't care
            // and can safely return zero
            .unwrap_or(K::Native::ZERO)
    });

    // Create a new values array by filtering using the mask
    let values = filter(dictionary.values(), &BooleanArray::new(mask, None))?;

    DictionaryArray::try_new(new_keys, values)
}

/// Equivalent to [`garbage_collect_dictionary`] but without requiring casting to a specific key type.
pub fn garbage_collect_any_dictionary(
    dictionary: &dyn AnyDictionaryArray,
) -> Result<ArrayRef, ArrowError> {
    // FIXME: this is a workaround for MSRV Rust versions below 1.86 where trait upcasting is not stable.
    // From 1.86 onward, `&dyn AnyDictionaryArray` can be directly passed to `downcast_dictionary_array!`.
    let dictionary = &*dictionary.slice(0, dictionary.len());
    downcast_dictionary_array!(
        dictionary => garbage_collect_dictionary(dictionary).map(|dict| Arc::new(dict) as ArrayRef),
        _ => unreachable!("have a dictionary array")
    )
}

/// A best effort interner that maintains a fixed number of buckets
/// and interns keys based on their hash value
///
/// Hash collisions will result in replacement
struct Interner<'a, V> {
    state: RandomState,
    buckets: Vec<Option<InternerBucket<'a, V>>>,
    shift: u32,
}

/// A single bucket in [`Interner`].
type InternerBucket<'a, V> = (Option<&'a [u8]>, V);

impl<'a, V> Interner<'a, V> {
    /// Capacity controls the number of unique buckets allocated within the Interner
    ///
    /// A larger capacity reduces the probability of hash collisions, and should be set
    /// based on an approximation of the upper bound of unique values
    fn new(capacity: usize) -> Self {
        // Add additional buckets to help reduce collisions
        let shift = (capacity as u64 + 128).leading_zeros();
        let num_buckets = (u64::MAX >> shift) as usize;
        let buckets = (0..num_buckets.saturating_add(1)).map(|_| None).collect();
        Self {
            // A fixed seed to ensure deterministic behaviour
            state: RandomState::with_seeds(0, 0, 0, 0),
            buckets,
            shift,
        }
    }

    fn intern<F: FnOnce() -> Result<V, E>, E>(
        &mut self,
        new: Option<&'a [u8]>,
        f: F,
    ) -> Result<&V, E> {
        let hash = self.state.hash_one(new);
        let bucket_idx = hash >> self.shift;
        Ok(match &mut self.buckets[bucket_idx as usize] {
            Some((current, v)) => {
                if *current != new {
                    *v = f()?;
                    *current = new;
                }
                v
            }
            slot => &slot.insert((new, f()?)).1,
        })
    }
}

pub(crate) struct MergedDictionaries<K: ArrowDictionaryKeyType> {
    /// Provides `key_mappings[`array_idx`][`old_key`] -> new_key`
    pub key_mappings: Vec<Vec<K::Native>>,
    /// The new values
    pub values: ArrayRef,
}

/// Performs a cheap, pointer-based comparison of two byte array
///
/// See [`ScalarBuffer::ptr_eq`]
fn bytes_ptr_eq<T: ByteArrayType>(a: &dyn Array, b: &dyn Array) -> bool {
    match (a.as_bytes_opt::<T>(), b.as_bytes_opt::<T>()) {
        (Some(a), Some(b)) => {
            let values_eq = a.values().ptr_eq(b.values()) && a.offsets().ptr_eq(b.offsets());
            match (a.nulls(), b.nulls()) {
                (Some(a), Some(b)) => values_eq && a.inner().ptr_eq(b.inner()),
                (None, None) => values_eq,
                _ => false,
            }
        }
        _ => false,
    }
}

/// A type-erased function that compares two array for pointer equality
type PtrEq = fn(&dyn Array, &dyn Array) -> bool;

/// A weak heuristic of whether to merge dictionary values that aims to only
/// perform the expensive merge computation when it is likely to yield at least
/// some return over the naive approach used by MutableArrayData
///
/// `len` is the total length of the merged output
///
/// Returns `(should_merge, has_overflow)` where:
/// - `should_merge`: whether dictionary values should be merged
/// - `has_overflow`: whether the combined dictionary values would overflow the key type
pub(crate) fn should_merge_dictionary_values<K: ArrowDictionaryKeyType>(
    dictionaries: &[&DictionaryArray<K>],
    len: usize,
) -> (bool, bool) {
    use DataType::*;
    let first_values = dictionaries[0].values().as_ref();
    let ptr_eq: PtrEq = match first_values.data_type() {
        Utf8 => bytes_ptr_eq::<Utf8Type>,
        LargeUtf8 => bytes_ptr_eq::<LargeUtf8Type>,
        Binary => bytes_ptr_eq::<BinaryType>,
        LargeBinary => bytes_ptr_eq::<LargeBinaryType>,
        dt => {
            if !dt.is_primitive() {
                return (
                    false,
                    K::Native::from_usize(dictionaries.iter().map(|d| d.values().len()).sum())
                        .is_none(),
                );
            }
            |a, b| a.to_data().ptr_eq(&b.to_data())
        }
    };

    let mut single_dictionary = true;
    let mut total_values = first_values.len();
    for dict in dictionaries.iter().skip(1) {
        let values = dict.values().as_ref();
        total_values += values.len();
        if single_dictionary {
            single_dictionary = ptr_eq(first_values, values)
        }
    }

    let overflow = K::Native::from_usize(total_values).is_none();
    let values_exceed_length = total_values >= len;

    (
        !single_dictionary && (overflow || values_exceed_length),
        overflow,
    )
}

/// Given an array of dictionaries and an optional key mask compute a values array
/// containing referenced values, along with mappings from the [`DictionaryArray`]
/// keys to the new keys within this values array. Best-effort will be made to ensure
/// that the dictionary values are unique
///
/// This method is meant to be very fast and the output dictionary values
/// may not be unique, unlike `GenericByteDictionaryBuilder` which is slower
/// but produces unique values
pub(crate) fn merge_dictionary_values<K: ArrowDictionaryKeyType>(
    dictionaries: &[&DictionaryArray<K>],
    masks: Option<&[BooleanBuffer]>,
) -> Result<MergedDictionaries<K>, ArrowError> {
    let mut num_values = 0;

    let mut values_arrays = Vec::with_capacity(dictionaries.len());
    let mut value_slices = Vec::with_capacity(dictionaries.len());

    for (idx, dictionary) in dictionaries.iter().enumerate() {
        let mask = masks.and_then(|m| m.get(idx));
        let key_mask_owned;
        let key_mask = match (dictionary.nulls(), mask) {
            (Some(n), None) => Some(n.inner()),
            (None, Some(n)) => Some(n),
            (Some(n), Some(m)) => {
                key_mask_owned = n.inner() & m;
                Some(&key_mask_owned)
            }
            (None, None) => None,
        };
        let keys = dictionary.keys().values();
        let values = dictionary.values().as_ref();
        let values_mask = compute_values_mask(keys, key_mask, values.len());

        let masked_values = get_masked_values(values, &values_mask);
        num_values += masked_values.len();
        value_slices.push(masked_values);
        values_arrays.push(values)
    }

    // Map from value to new index
    let mut interner = Interner::new(num_values);
    // Interleave indices for new values array
    let mut indices = Vec::with_capacity(num_values);

    // Compute the mapping for each dictionary
    let key_mappings = dictionaries
        .iter()
        .enumerate()
        .zip(value_slices)
        .map(|((dictionary_idx, dictionary), values)| {
            let zero = K::Native::from_usize(0).unwrap();
            let mut mapping = vec![zero; dictionary.values().len()];

            for (value_idx, value) in values {
                mapping[value_idx] =
                    *interner.intern(value, || match K::Native::from_usize(indices.len()) {
                        Some(idx) => {
                            indices.push((dictionary_idx, value_idx));
                            Ok(idx)
                        }
                        None => Err(ArrowError::DictionaryKeyOverflowError),
                    })?;
            }
            Ok(mapping)
        })
        .collect::<Result<Vec<_>, ArrowError>>()?;

    Ok(MergedDictionaries {
        key_mappings,
        values: interleave(&values_arrays, &indices)?,
    })
}

/// Return a mask identifying the values that are referenced by keys in `dictionary`
/// at the positions indicated by `selection`
fn compute_values_mask<K: ArrowNativeType>(
    keys: &ScalarBuffer<K>,
    mask: Option<&BooleanBuffer>,
    max_key: usize,
) -> BooleanBuffer {
    let mut builder = BooleanBufferBuilder::new(max_key);
    builder.advance(max_key);

    match mask {
        Some(n) => n
            .set_indices()
            .for_each(|idx| builder.set_bit(keys[idx].as_usize(), true)),
        None => keys
            .iter()
            .for_each(|k| builder.set_bit(k.as_usize(), true)),
    }
    builder.finish()
}

/// Process primitive array values to bytes
fn masked_primitives_to_bytes<'a, T: ArrowPrimitiveType>(
    array: &'a PrimitiveArray<T>,
    mask: &BooleanBuffer,
) -> Vec<(usize, Option<&'a [u8]>)>
where
    T::Native: ToByteSlice,
{
    let mut out = Vec::with_capacity(mask.count_set_bits());
    let values = array.values();
    for idx in mask.set_indices() {
        out.push((
            idx,
            array.is_valid(idx).then_some(values[idx].to_byte_slice()),
        ))
    }
    out
}

macro_rules! masked_primitive_to_bytes_helper {
    ($t:ty, $array:expr, $mask:expr) => {
        masked_primitives_to_bytes::<$t>($array.as_primitive(), $mask)
    };
}

/// Return a Vec containing for each set index in `mask`, the index and byte value of that index
fn get_masked_values<'a>(
    array: &'a dyn Array,
    mask: &BooleanBuffer,
) -> Vec<(usize, Option<&'a [u8]>)> {
    downcast_primitive! {
        array.data_type() => (masked_primitive_to_bytes_helper, array, mask),
        DataType::Utf8 => masked_bytes(array.as_string::<i32>(), mask),
        DataType::LargeUtf8 => masked_bytes(array.as_string::<i64>(), mask),
        DataType::Binary => masked_bytes(array.as_binary::<i32>(), mask),
        DataType::LargeBinary => masked_bytes(array.as_binary::<i64>(), mask),
        _ => unimplemented!("Dictionary merging for type {} is not implemented", array.data_type()),
    }
}

/// Compute [`get_masked_values`] for a [`GenericByteArray`]
///
/// Note: this does not check the null mask and will return values contained in null slots
fn masked_bytes<'a, T: ByteArrayType>(
    array: &'a GenericByteArray<T>,
    mask: &BooleanBuffer,
) -> Vec<(usize, Option<&'a [u8]>)> {
    let mut out = Vec::with_capacity(mask.count_set_bits());
    for idx in mask.set_indices() {
        out.push((
            idx,
            array.is_valid(idx).then_some(array.value(idx).as_ref()),
        ))
    }
    out
}

#[cfg(test)]
mod tests {
    use super::*;

    use arrow_array::cast::as_string_array;
    use arrow_array::types::Int8Type;
    use arrow_array::types::Int32Type;
    use arrow_array::{DictionaryArray, Int8Array, Int32Array, StringArray};
    use arrow_buffer::{BooleanBuffer, Buffer, NullBuffer, OffsetBuffer};
    use std::sync::Arc;

    #[test]
    fn test_garbage_collect_i32_dictionary() {
        let values = StringArray::from_iter_values(["a", "b", "c", "d"]);
        let keys = Int32Array::from_iter_values([0, 1, 1, 3, 0, 0, 1]);
        let dict = DictionaryArray::<Int32Type>::new(keys, Arc::new(values));

        // Only "a", "b", "d" are referenced, "c" is not
        let gc = garbage_collect_dictionary(&dict).unwrap();

        let expected_values = StringArray::from_iter_values(["a", "b", "d"]);
        let expected_keys = Int32Array::from_iter_values([0, 1, 1, 2, 0, 0, 1]);
        let expected = DictionaryArray::<Int32Type>::new(expected_keys, Arc::new(expected_values));

        assert_eq!(gc, expected);
    }

    #[test]
    fn test_garbage_collect_any_dictionary() {
        let values = StringArray::from_iter_values(["a", "b", "c", "d"]);
        let keys = Int32Array::from_iter_values([0, 1, 1, 3, 0, 0, 1]);
        let dict = DictionaryArray::<Int32Type>::new(keys, Arc::new(values));

        let gc = garbage_collect_any_dictionary(&dict).unwrap();

        let expected_values = StringArray::from_iter_values(["a", "b", "d"]);
        let expected_keys = Int32Array::from_iter_values([0, 1, 1, 2, 0, 0, 1]);
        let expected = DictionaryArray::<Int32Type>::new(expected_keys, Arc::new(expected_values));

        assert_eq!(gc.as_ref(), &expected);
    }

    #[test]
    fn test_garbage_collect_with_nulls() {
        let values = StringArray::from_iter_values(["a", "b", "c"]);
        let keys = Int8Array::from(vec![Some(2), None, Some(0)]);
        let dict = DictionaryArray::<Int8Type>::new(keys, Arc::new(values));

        let gc = garbage_collect_dictionary(&dict).unwrap();

        let expected_values = StringArray::from_iter_values(["a", "c"]);
        let expected_keys = Int8Array::from(vec![Some(1), None, Some(0)]);
        let expected = DictionaryArray::<Int8Type>::new(expected_keys, Arc::new(expected_values));

        assert_eq!(gc, expected);
    }

    #[test]
    fn test_garbage_collect_empty_dictionary() {
        let values = StringArray::from_iter_values::<&str, _>([]);
        let keys = Int32Array::from_iter_values([]);
        let dict = DictionaryArray::<Int32Type>::new(keys, Arc::new(values));

        let gc = garbage_collect_dictionary(&dict).unwrap();

        assert_eq!(gc, dict);
    }

    #[test]
    fn test_garbage_collect_dictionary_all_unreferenced() {
        let values = StringArray::from_iter_values(["a", "b", "c"]);
        let keys = Int32Array::from(vec![None, None, None]);
        let dict = DictionaryArray::<Int32Type>::new(keys, Arc::new(values));

        let gc = garbage_collect_dictionary(&dict).unwrap();

        // All keys are null, so dictionary values can be empty
        let expected_values = StringArray::from_iter_values::<&str, _>([]);
        let expected_keys = Int32Array::from(vec![None, None, None]);
        let expected = DictionaryArray::<Int32Type>::new(expected_keys, Arc::new(expected_values));

        assert_eq!(gc, expected);
    }

    #[test]
    fn test_merge_strings() {
        let a = DictionaryArray::<Int32Type>::from_iter(["a", "b", "a", "b", "d", "c", "e"]);
        let b = DictionaryArray::<Int32Type>::from_iter(["c", "f", "c", "d", "a", "d"]);
        let merged = merge_dictionary_values(&[&a, &b], None).unwrap();

        let values = as_string_array(merged.values.as_ref());
        let actual: Vec<_> = values.iter().map(Option::unwrap).collect();
        assert_eq!(&actual, &["a", "b", "d", "c", "e", "f"]);

        assert_eq!(merged.key_mappings.len(), 2);
        assert_eq!(&merged.key_mappings[0], &[0, 1, 2, 3, 4]);
        assert_eq!(&merged.key_mappings[1], &[3, 5, 2, 0]);

        let a_slice = a.slice(1, 4);
        let merged = merge_dictionary_values(&[&a_slice, &b], None).unwrap();

        let values = as_string_array(merged.values.as_ref());
        let actual: Vec<_> = values.iter().map(Option::unwrap).collect();
        assert_eq!(&actual, &["a", "b", "d", "c", "f"]);

        assert_eq!(merged.key_mappings.len(), 2);
        assert_eq!(&merged.key_mappings[0], &[0, 1, 2, 0, 0]);
        assert_eq!(&merged.key_mappings[1], &[3, 4, 2, 0]);

        // Mask out only ["b", "b", "d"] from a
        let a_mask = BooleanBuffer::from_iter([false, true, false, true, true, false, false]);
        let b_mask = BooleanBuffer::new_set(b.len());
        let merged = merge_dictionary_values(&[&a, &b], Some(&[a_mask, b_mask])).unwrap();

        let values = as_string_array(merged.values.as_ref());
        let actual: Vec<_> = values.iter().map(Option::unwrap).collect();
        assert_eq!(&actual, &["b", "d", "c", "f", "a"]);

        assert_eq!(merged.key_mappings.len(), 2);
        assert_eq!(&merged.key_mappings[0], &[0, 0, 1, 0, 0]);
        assert_eq!(&merged.key_mappings[1], &[2, 3, 1, 4]);
    }

    #[test]
    fn test_merge_nulls() {
        let buffer = Buffer::from(b"helloworldbingohelloworld");
        let offsets = OffsetBuffer::from_lengths([5, 5, 5, 5, 5]);
        let nulls = NullBuffer::from(vec![true, false, true, true, true]);
        let values = StringArray::new(offsets, buffer, Some(nulls));

        let key_values = vec![1, 2, 3, 1, 8, 2, 3];
        let key_nulls = NullBuffer::from(vec![true, true, false, true, false, true, true]);
        let keys = Int32Array::new(key_values.into(), Some(key_nulls));
        let a = DictionaryArray::new(keys, Arc::new(values));
        // [NULL, "bingo", NULL, NULL, NULL, "bingo", "hello"]

        let b = DictionaryArray::new(Int32Array::new_null(10), Arc::new(StringArray::new_null(0)));

        let merged = merge_dictionary_values(&[&a, &b], None).unwrap();
        let expected = StringArray::from(vec![None, Some("bingo"), Some("hello")]);
        assert_eq!(merged.values.as_ref(), &expected);
        assert_eq!(merged.key_mappings.len(), 2);
        assert_eq!(&merged.key_mappings[0], &[0, 0, 1, 2, 0]);
        assert_eq!(&merged.key_mappings[1], &[] as &[i32; 0]);
    }

    #[test]
    fn test_merge_keys_smaller() {
        let values = StringArray::from_iter_values(["a", "b"]);
        let keys = Int32Array::from_iter_values([1]);
        let a = DictionaryArray::new(keys, Arc::new(values));

        let merged = merge_dictionary_values(&[&a], None).unwrap();
        let expected = StringArray::from(vec!["b"]);
        assert_eq!(merged.values.as_ref(), &expected);
    }
}