use std::any::Any;
use std::sync::Arc;
use arrow::datatypes::{DataType, Field, FieldRef, TimeUnit};
use datafusion_common::types::logical_date;
use datafusion_common::utils::take_function_args;
use datafusion_common::{Result, internal_err};
use datafusion_expr::simplify::{ExprSimplifyResult, SimplifyContext};
use datafusion_expr::{
Coercion, ColumnarValue, Expr, ExprSchemable, ReturnFieldArgs, ScalarFunctionArgs,
ScalarUDFImpl, Signature, TypeSignatureClass, Volatility,
};
#[derive(Debug, PartialEq, Eq, Hash)]
pub struct SparkUnixDate {
signature: Signature,
}
impl Default for SparkUnixDate {
fn default() -> Self {
Self::new()
}
}
impl SparkUnixDate {
pub fn new() -> Self {
Self {
signature: Signature::coercible(
vec![Coercion::new_exact(TypeSignatureClass::Native(
logical_date(),
))],
Volatility::Immutable,
),
}
}
}
impl ScalarUDFImpl for SparkUnixDate {
fn as_any(&self) -> &dyn Any {
self
}
fn name(&self) -> &str {
"unix_date"
}
fn signature(&self) -> &Signature {
&self.signature
}
fn return_type(&self, _arg_types: &[DataType]) -> Result<DataType> {
internal_err!("return_field_from_args should be used instead")
}
fn return_field_from_args(&self, args: ReturnFieldArgs) -> Result<FieldRef> {
let nullable = args.arg_fields[0].is_nullable();
Ok(Arc::new(Field::new(self.name(), DataType::Int32, nullable)))
}
fn invoke_with_args(&self, _args: ScalarFunctionArgs) -> Result<ColumnarValue> {
internal_err!("invoke_with_args should not be called on SparkUnixDate")
}
fn simplify(
&self,
args: Vec<Expr>,
info: &SimplifyContext,
) -> Result<ExprSimplifyResult> {
let [date] = take_function_args(self.name(), args)?;
Ok(ExprSimplifyResult::Simplified(
date.cast_to(&DataType::Date32, info.schema())?
.cast_to(&DataType::Int32, info.schema())?,
))
}
}
#[derive(Debug, PartialEq, Eq, Hash)]
pub struct SparkUnixTimestamp {
time_unit: TimeUnit,
signature: Signature,
name: &'static str,
}
impl SparkUnixTimestamp {
pub fn new(name: &'static str, time_unit: TimeUnit) -> Self {
Self {
signature: Signature::coercible(
vec![Coercion::new_exact(TypeSignatureClass::Timestamp)],
Volatility::Immutable,
),
time_unit,
name,
}
}
pub fn microseconds() -> Self {
Self::new("unix_micros", TimeUnit::Microsecond)
}
pub fn milliseconds() -> Self {
Self::new("unix_millis", TimeUnit::Millisecond)
}
pub fn seconds() -> Self {
Self::new("unix_seconds", TimeUnit::Second)
}
}
impl ScalarUDFImpl for SparkUnixTimestamp {
fn as_any(&self) -> &dyn Any {
self
}
fn name(&self) -> &str {
self.name
}
fn signature(&self) -> &Signature {
&self.signature
}
fn return_type(&self, _arg_types: &[DataType]) -> Result<DataType> {
internal_err!("return_field_from_args should be used instead")
}
fn return_field_from_args(&self, args: ReturnFieldArgs) -> Result<FieldRef> {
let nullable = args.arg_fields[0].is_nullable();
Ok(Arc::new(Field::new(self.name(), DataType::Int64, nullable)))
}
fn invoke_with_args(&self, _args: ScalarFunctionArgs) -> Result<ColumnarValue> {
internal_err!("invoke_with_args should not be called on `{}`", self.name())
}
fn simplify(
&self,
args: Vec<Expr>,
info: &SimplifyContext,
) -> Result<ExprSimplifyResult> {
let [ts] = take_function_args(self.name(), args)?;
Ok(ExprSimplifyResult::Simplified(
ts.cast_to(
&DataType::Timestamp(self.time_unit, Some("UTC".into())),
info.schema(),
)?
.cast_to(&DataType::Int64, info.schema())?,
))
}
}