vortex_sampling_compressor/compressors/
struct_.rs

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
use itertools::Itertools;
use vortex_array::aliases::hash_set::HashSet;
use vortex_array::array::{Struct, StructArray};
use vortex_array::compress::compute_pruning_stats;
use vortex_array::encoding::EncodingRef;
use vortex_array::stats::ArrayStatistics as _;
use vortex_array::variants::StructArrayTrait;
use vortex_array::{Array, ArrayDef, IntoArray};
use vortex_error::VortexResult;

use crate::compressors::{CompressedArray, CompressionTree, EncodingCompressor};
use crate::SamplingCompressor;

#[derive(Debug)]
pub struct StructCompressor;

impl EncodingCompressor for StructCompressor {
    fn id(&self) -> &str {
        Struct::ID.as_ref()
    }

    fn cost(&self) -> u8 {
        0
    }

    fn can_compress(&self, array: &Array) -> Option<&dyn EncodingCompressor> {
        StructArray::try_from(array)
            .ok()
            .map(|_| self as &dyn EncodingCompressor)
    }

    fn compress<'a>(
        &'a self,
        array: &Array,
        like: Option<CompressionTree<'a>>,
        ctx: SamplingCompressor<'a>,
    ) -> VortexResult<CompressedArray<'a>> {
        let array = StructArray::try_from(array)?;
        let compressed_validity = ctx.compress_validity(array.validity())?;

        let children_trees = match like {
            Some(tree) => tree.children,
            None => vec![None; array.nfields()],
        };

        let (arrays, trees) = array
            .children()
            .zip_eq(children_trees)
            .map(|(array, like)| {
                // these are extremely valuable when reading/writing, but are potentially much more expensive
                // to compute post-compression. That's because not all encodings implement stats, so we would
                // potentially have to canonicalize during writes just to get stats, which would be silly.
                // Also, we only really require them for column chunks, not for every array.
                compute_pruning_stats(&array)?;
                ctx.compress(&array, like.as_ref())
            })
            .process_results(|iter| iter.map(|x| (x.array, x.path)).unzip())?;

        Ok(CompressedArray::compressed(
            StructArray::try_new(
                array.names().clone(),
                arrays,
                array.len(),
                compressed_validity,
            )?
            .into_array(),
            Some(CompressionTree::new(self, trees)),
            Some(array.statistics()),
        ))
    }

    fn used_encodings(&self) -> HashSet<EncodingRef> {
        HashSet::from([])
    }
}