datafusion_spark/function/hash/
crc32.rs1use std::any::Any;
19use std::sync::Arc;
20
21use arrow::array::{ArrayRef, Int64Array};
22use arrow::datatypes::{DataType, Field, FieldRef};
23use crc32fast::Hasher;
24use datafusion_common::cast::{
25 as_binary_array, as_binary_view_array, as_fixed_size_binary_array,
26 as_large_binary_array,
27};
28use datafusion_common::types::{NativeType, logical_string};
29use datafusion_common::utils::take_function_args;
30use datafusion_common::{Result, internal_err};
31use datafusion_expr::{
32 Coercion, ColumnarValue, ReturnFieldArgs, ScalarFunctionArgs, ScalarUDFImpl,
33 Signature, TypeSignatureClass, Volatility,
34};
35use datafusion_functions::utils::make_scalar_function;
36
37#[derive(Debug, PartialEq, Eq, Hash)]
39pub struct SparkCrc32 {
40 signature: Signature,
41}
42
43impl Default for SparkCrc32 {
44 fn default() -> Self {
45 Self::new()
46 }
47}
48
49impl SparkCrc32 {
50 pub fn new() -> Self {
51 Self {
52 signature: Signature::coercible(
53 vec![Coercion::new_implicit(
54 TypeSignatureClass::Binary,
55 vec![TypeSignatureClass::Native(logical_string())],
56 NativeType::Binary,
57 )],
58 Volatility::Immutable,
59 ),
60 }
61 }
62}
63
64impl ScalarUDFImpl for SparkCrc32 {
65 fn as_any(&self) -> &dyn Any {
66 self
67 }
68
69 fn name(&self) -> &str {
70 "crc32"
71 }
72
73 fn signature(&self) -> &Signature {
74 &self.signature
75 }
76
77 fn return_type(&self, _arg_types: &[DataType]) -> Result<DataType> {
78 internal_err!("return_field_from_args should be used instead")
79 }
80
81 fn return_field_from_args(&self, args: ReturnFieldArgs) -> Result<FieldRef> {
82 let nullable = args.arg_fields.iter().any(|f| f.is_nullable());
83 Ok(Arc::new(Field::new(self.name(), DataType::Int64, nullable)))
84 }
85
86 fn invoke_with_args(&self, args: ScalarFunctionArgs) -> Result<ColumnarValue> {
87 make_scalar_function(spark_crc32, vec![])(&args.args)
88 }
89}
90
91fn spark_crc32_digest(value: &[u8]) -> i64 {
92 let mut hasher = Hasher::new();
93 hasher.update(value);
94 hasher.finalize() as i64
95}
96
97fn spark_crc32_impl<'a>(input: impl Iterator<Item = Option<&'a [u8]>>) -> ArrayRef {
98 let result = input
99 .map(|value| value.map(spark_crc32_digest))
100 .collect::<Int64Array>();
101 Arc::new(result)
102}
103
104fn spark_crc32(args: &[ArrayRef]) -> Result<ArrayRef> {
105 let [input] = take_function_args("crc32", args)?;
106
107 match input.data_type() {
108 DataType::Null => Ok(Arc::new(Int64Array::new_null(input.len()))),
109 DataType::Binary => {
110 let input = as_binary_array(input)?;
111 Ok(spark_crc32_impl(input.iter()))
112 }
113 DataType::LargeBinary => {
114 let input = as_large_binary_array(input)?;
115 Ok(spark_crc32_impl(input.iter()))
116 }
117 DataType::BinaryView => {
118 let input = as_binary_view_array(input)?;
119 Ok(spark_crc32_impl(input.iter()))
120 }
121 DataType::FixedSizeBinary(_) => {
122 let input = as_fixed_size_binary_array(input)?;
123 Ok(spark_crc32_impl(input.iter()))
124 }
125 dt => {
126 internal_err!("Unsupported data type for crc32: {dt}")
127 }
128 }
129}
130
131#[cfg(test)]
132mod tests {
133 use super::*;
134
135 #[test]
136 fn test_crc32_nullability() -> Result<()> {
137 let crc32_func = SparkCrc32::new();
138
139 let field_not_null = Arc::new(Field::new("data", DataType::Binary, false));
141 let result = crc32_func.return_field_from_args(ReturnFieldArgs {
142 arg_fields: std::slice::from_ref(&field_not_null),
143 scalar_arguments: &[None],
144 })?;
145 assert!(!result.is_nullable());
146 assert_eq!(result.data_type(), &DataType::Int64);
147
148 let field_nullable = Arc::new(Field::new("data", DataType::Binary, true));
150 let result = crc32_func.return_field_from_args(ReturnFieldArgs {
151 arg_fields: &[field_nullable],
152 scalar_arguments: &[None],
153 })?;
154 assert!(result.is_nullable());
155 assert_eq!(result.data_type(), &DataType::Int64);
156
157 Ok(())
158 }
159}