use std::any::Any;
use std::sync::Arc;
use arrow::array::{ArrayRef, StringArray};
use arrow::datatypes::{DataType, Field, FieldRef};
use datafusion_common::cast::{
as_binary_array, as_binary_view_array, as_fixed_size_binary_array,
as_large_binary_array,
};
use datafusion_common::types::{NativeType, logical_string};
use datafusion_common::utils::take_function_args;
use datafusion_common::{Result, internal_err};
use datafusion_expr::{
Coercion, ColumnarValue, ReturnFieldArgs, ScalarFunctionArgs, ScalarUDFImpl,
Signature, TypeSignatureClass, Volatility,
};
use datafusion_functions::utils::make_scalar_function;
use sha1::{Digest, Sha1};
#[derive(Debug, PartialEq, Eq, Hash)]
pub struct SparkSha1 {
signature: Signature,
aliases: Vec<String>,
}
impl Default for SparkSha1 {
fn default() -> Self {
Self::new()
}
}
impl SparkSha1 {
pub fn new() -> Self {
Self {
signature: Signature::coercible(
vec![Coercion::new_implicit(
TypeSignatureClass::Binary,
vec![TypeSignatureClass::Native(logical_string())],
NativeType::Binary,
)],
Volatility::Immutable,
),
aliases: vec!["sha".to_string()],
}
}
}
impl ScalarUDFImpl for SparkSha1 {
fn as_any(&self) -> &dyn Any {
self
}
fn name(&self) -> &str {
"sha1"
}
fn aliases(&self) -> &[String] {
&self.aliases
}
fn signature(&self) -> &Signature {
&self.signature
}
fn return_type(&self, _arg_types: &[DataType]) -> Result<DataType> {
internal_err!("return_field_from_args should be used instead")
}
fn return_field_from_args(&self, args: ReturnFieldArgs) -> Result<FieldRef> {
let nullable = args.arg_fields.iter().any(|f| f.is_nullable());
Ok(Arc::new(Field::new(self.name(), DataType::Utf8, nullable)))
}
fn invoke_with_args(&self, args: ScalarFunctionArgs) -> Result<ColumnarValue> {
make_scalar_function(spark_sha1, vec![])(&args.args)
}
}
const HEX_CHARS_LOWER: &[u8; 16] = b"0123456789abcdef";
#[inline]
fn spark_sha1_digest(value: &[u8]) -> String {
let result = Sha1::digest(value);
let mut s = String::with_capacity(result.len() * 2);
for &b in result.as_slice() {
s.push(HEX_CHARS_LOWER[(b >> 4) as usize] as char);
s.push(HEX_CHARS_LOWER[(b & 0x0f) as usize] as char);
}
s
}
fn spark_sha1_impl<'a>(input: impl Iterator<Item = Option<&'a [u8]>>) -> ArrayRef {
let result = input
.map(|value| value.map(spark_sha1_digest))
.collect::<StringArray>();
Arc::new(result)
}
fn spark_sha1(args: &[ArrayRef]) -> Result<ArrayRef> {
let [input] = take_function_args("sha1", args)?;
match input.data_type() {
DataType::Null => Ok(Arc::new(StringArray::new_null(input.len()))),
DataType::Binary => {
let input = as_binary_array(input)?;
Ok(spark_sha1_impl(input.iter()))
}
DataType::LargeBinary => {
let input = as_large_binary_array(input)?;
Ok(spark_sha1_impl(input.iter()))
}
DataType::BinaryView => {
let input = as_binary_view_array(input)?;
Ok(spark_sha1_impl(input.iter()))
}
DataType::FixedSizeBinary(_) => {
let input = as_fixed_size_binary_array(input)?;
Ok(spark_sha1_impl(input.iter()))
}
dt => {
internal_err!("Unsupported data type for sha1: {dt}")
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_sha1_nullability() -> Result<()> {
let func = SparkSha1::new();
let non_nullable: FieldRef = Arc::new(Field::new("col", DataType::Binary, false));
let out = func.return_field_from_args(ReturnFieldArgs {
arg_fields: &[Arc::clone(&non_nullable)],
scalar_arguments: &[None],
})?;
assert!(!out.is_nullable());
assert_eq!(out.data_type(), &DataType::Utf8);
let nullable: FieldRef = Arc::new(Field::new("col", DataType::Binary, true));
let out = func.return_field_from_args(ReturnFieldArgs {
arg_fields: &[Arc::clone(&nullable)],
scalar_arguments: &[None],
})?;
assert!(out.is_nullable());
assert_eq!(out.data_type(), &DataType::Utf8);
Ok(())
}
}