use std::any::Any;
use std::fmt;
use std::sync::Arc;
use super::ColumnarValue;
use crate::error::{DataFusionError, Result};
use crate::physical_plan::PhysicalExpr;
use crate::scalar::ScalarValue;
use arrow::compute;
use arrow::compute::kernels;
use arrow::compute::CastOptions;
use arrow::datatypes::{DataType, Schema};
use arrow::record_batch::RecordBatch;
use compute::can_cast_types;
pub const DEFAULT_DATAFUSION_CAST_OPTIONS: CastOptions = CastOptions { safe: false };
#[derive(Debug)]
pub struct CastExpr {
expr: Arc<dyn PhysicalExpr>,
cast_type: DataType,
cast_options: CastOptions,
}
impl CastExpr {
pub fn new(
expr: Arc<dyn PhysicalExpr>,
cast_type: DataType,
cast_options: CastOptions,
) -> Self {
Self {
expr,
cast_type,
cast_options,
}
}
pub fn expr(&self) -> &Arc<dyn PhysicalExpr> {
&self.expr
}
pub fn cast_type(&self) -> &DataType {
&self.cast_type
}
}
impl fmt::Display for CastExpr {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "CAST({} AS {:?})", self.expr, self.cast_type)
}
}
impl PhysicalExpr for CastExpr {
fn as_any(&self) -> &dyn Any {
self
}
fn data_type(&self, _input_schema: &Schema) -> Result<DataType> {
Ok(self.cast_type.clone())
}
fn nullable(&self, input_schema: &Schema) -> Result<bool> {
self.expr.nullable(input_schema)
}
fn evaluate(&self, batch: &RecordBatch) -> Result<ColumnarValue> {
let value = self.expr.evaluate(batch)?;
match value {
ColumnarValue::Array(array) => {
Ok(ColumnarValue::Array(kernels::cast::cast_with_options(
&array,
&self.cast_type,
&self.cast_options,
)?))
}
ColumnarValue::Scalar(scalar) => {
let scalar_array = scalar.to_array();
let cast_array = kernels::cast::cast_with_options(
&scalar_array,
&self.cast_type,
&self.cast_options,
)?;
let cast_scalar = ScalarValue::try_from_array(&cast_array, 0)?;
Ok(ColumnarValue::Scalar(cast_scalar))
}
}
}
}
pub fn cast_with_options(
expr: Arc<dyn PhysicalExpr>,
input_schema: &Schema,
cast_type: DataType,
cast_options: CastOptions,
) -> Result<Arc<dyn PhysicalExpr>> {
let expr_type = expr.data_type(input_schema)?;
if expr_type == cast_type {
Ok(expr.clone())
} else if can_cast_types(&expr_type, &cast_type) {
Ok(Arc::new(CastExpr::new(expr, cast_type, cast_options)))
} else {
Err(DataFusionError::Internal(format!(
"Unsupported CAST from {:?} to {:?}",
expr_type, cast_type
)))
}
}
pub fn cast(
expr: Arc<dyn PhysicalExpr>,
input_schema: &Schema,
cast_type: DataType,
) -> Result<Arc<dyn PhysicalExpr>> {
cast_with_options(
expr,
input_schema,
cast_type,
DEFAULT_DATAFUSION_CAST_OPTIONS,
)
}
#[cfg(test)]
mod tests {
use super::*;
use crate::error::Result;
use crate::physical_plan::expressions::col;
use arrow::array::{StringArray, Time64NanosecondArray};
use arrow::{
array::{Array, Int32Array, Int64Array, TimestampNanosecondArray, UInt32Array},
datatypes::*,
};
macro_rules! generic_test_cast {
($A_ARRAY:ident, $A_TYPE:expr, $A_VEC:expr, $TYPEARRAY:ident, $TYPE:expr, $VEC:expr, $CAST_OPTIONS:expr) => {{
let schema = Schema::new(vec![Field::new("a", $A_TYPE, false)]);
let a = $A_ARRAY::from($A_VEC);
let batch =
RecordBatch::try_new(Arc::new(schema.clone()), vec![Arc::new(a)])?;
let expression = cast_with_options(col("a"), &schema, $TYPE, $CAST_OPTIONS)?;
assert_eq!(format!("CAST(a AS {:?})", $TYPE), format!("{}", expression));
assert_eq!(expression.data_type(&schema)?, $TYPE);
let result = expression.evaluate(&batch)?.into_array(batch.num_rows());
assert_eq!(*result.data_type(), $TYPE);
assert_eq!(result.len(), $A_VEC.len());
let result = result
.as_any()
.downcast_ref::<$TYPEARRAY>()
.expect("failed to downcast");
for (i, x) in $VEC.iter().enumerate() {
match x {
Some(x) => assert_eq!(result.value(i), *x),
None => assert!(!result.is_valid(i)),
}
}
}};
}
#[test]
fn test_cast_i32_u32() -> Result<()> {
generic_test_cast!(
Int32Array,
DataType::Int32,
vec![1, 2, 3, 4, 5],
UInt32Array,
DataType::UInt32,
vec![
Some(1_u32),
Some(2_u32),
Some(3_u32),
Some(4_u32),
Some(5_u32)
],
DEFAULT_DATAFUSION_CAST_OPTIONS
);
Ok(())
}
#[test]
fn test_cast_i32_utf8() -> Result<()> {
generic_test_cast!(
Int32Array,
DataType::Int32,
vec![1, 2, 3, 4, 5],
StringArray,
DataType::Utf8,
vec![Some("1"), Some("2"), Some("3"), Some("4"), Some("5")],
DEFAULT_DATAFUSION_CAST_OPTIONS
);
Ok(())
}
#[allow(clippy::redundant_clone)]
#[test]
fn test_cast_i64_t64() -> Result<()> {
let original = vec![1, 2, 3, 4, 5];
let expected: Vec<Option<i64>> = original
.iter()
.map(|i| Some(Time64NanosecondArray::from(vec![*i]).value(0)))
.collect();
generic_test_cast!(
Int64Array,
DataType::Int64,
original.clone(),
TimestampNanosecondArray,
DataType::Timestamp(TimeUnit::Nanosecond, None),
expected,
DEFAULT_DATAFUSION_CAST_OPTIONS
);
Ok(())
}
#[test]
fn invalid_cast() {
let schema = Schema::new(vec![Field::new("a", DataType::Int32, false)]);
let result = cast(col("a"), &schema, DataType::LargeBinary);
result.expect_err("expected Invalid CAST");
}
#[test]
fn invalid_cast_with_options_error() -> Result<()> {
let schema = Schema::new(vec![Field::new("a", DataType::Utf8, false)]);
let a = StringArray::from(vec!["9.1"]);
let batch = RecordBatch::try_new(Arc::new(schema.clone()), vec![Arc::new(a)])?;
let expression = cast_with_options(
col("a"),
&schema,
DataType::Int32,
DEFAULT_DATAFUSION_CAST_OPTIONS,
)?;
let result = expression.evaluate(&batch);
match result {
Ok(_) => panic!("expected error"),
Err(e) => {
assert!(e.to_string().contains(
"Cast error: Cannot cast string '9.1' to value of arrow::datatypes::types::Int32Type type"
))
}
}
Ok(())
}
}