Skip to main content

datafusion_spark/function/datetime/
date_sub.rs

1// Licensed to the Apache Software Foundation (ASF) under one
2// or more contributor license agreements.  See the NOTICE file
3// distributed with this work for additional information
4// regarding copyright ownership.  The ASF licenses this file
5// to you under the Apache License, Version 2.0 (the
6// "License"); you may not use this file except in compliance
7// with the License.  You may obtain a copy of the License at
8//
9//   http://www.apache.org/licenses/LICENSE-2.0
10//
11// Unless required by applicable law or agreed to in writing,
12// software distributed under the License is distributed on an
13// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14// KIND, either express or implied.  See the License for the
15// specific language governing permissions and limitations
16// under the License.
17
18use std::any::Any;
19use std::sync::Arc;
20
21use arrow::array::ArrayRef;
22use arrow::compute;
23use arrow::datatypes::{DataType, Date32Type, Field, FieldRef};
24use datafusion_common::cast::{
25    as_date32_array, as_int8_array, as_int16_array, as_int32_array,
26};
27use datafusion_common::{Result, internal_err};
28use datafusion_expr::{
29    ColumnarValue, ReturnFieldArgs, ScalarFunctionArgs, ScalarUDFImpl, Signature,
30    TypeSignature, Volatility,
31};
32use datafusion_functions::utils::make_scalar_function;
33
34#[derive(Debug, PartialEq, Eq, Hash)]
35pub struct SparkDateSub {
36    signature: Signature,
37}
38
39impl Default for SparkDateSub {
40    fn default() -> Self {
41        Self::new()
42    }
43}
44
45impl SparkDateSub {
46    pub fn new() -> Self {
47        Self {
48            signature: Signature::one_of(
49                vec![
50                    TypeSignature::Exact(vec![DataType::Date32, DataType::Int8]),
51                    TypeSignature::Exact(vec![DataType::Date32, DataType::Int16]),
52                    TypeSignature::Exact(vec![DataType::Date32, DataType::Int32]),
53                ],
54                Volatility::Immutable,
55            ),
56        }
57    }
58}
59
60impl ScalarUDFImpl for SparkDateSub {
61    fn as_any(&self) -> &dyn Any {
62        self
63    }
64
65    fn name(&self) -> &str {
66        "date_sub"
67    }
68
69    fn signature(&self) -> &Signature {
70        &self.signature
71    }
72
73    fn return_type(&self, _arg_types: &[DataType]) -> Result<DataType> {
74        internal_err!("return_field_from_args should be used instead")
75    }
76
77    fn return_field_from_args(&self, args: ReturnFieldArgs) -> Result<FieldRef> {
78        let nullable = args.arg_fields.iter().any(|f| f.is_nullable())
79            || args
80                .scalar_arguments
81                .iter()
82                .any(|arg| matches!(arg, Some(sv) if sv.is_null()));
83
84        Ok(Arc::new(Field::new(
85            self.name(),
86            DataType::Date32,
87            nullable,
88        )))
89    }
90
91    fn invoke_with_args(&self, args: ScalarFunctionArgs) -> Result<ColumnarValue> {
92        make_scalar_function(spark_date_sub, vec![])(&args.args)
93    }
94}
95
96fn spark_date_sub(args: &[ArrayRef]) -> Result<ArrayRef> {
97    let [date_arg, days_arg] = args else {
98        return internal_err!(
99            "Spark `date_sub` function requires 2 arguments, got {}",
100            args.len()
101        );
102    };
103    let date_array = as_date32_array(date_arg)?;
104    let result = match days_arg.data_type() {
105        DataType::Int8 => {
106            let days_array = as_int8_array(days_arg)?;
107            compute::binary::<_, _, _, Date32Type>(
108                date_array,
109                days_array,
110                |date, days| date.wrapping_sub(days as i32),
111            )?
112        }
113        DataType::Int16 => {
114            let days_array = as_int16_array(days_arg)?;
115            compute::binary::<_, _, _, Date32Type>(
116                date_array,
117                days_array,
118                |date, days| date.wrapping_sub(days as i32),
119            )?
120        }
121        DataType::Int32 => {
122            let days_array = as_int32_array(days_arg)?;
123            compute::binary::<_, _, _, Date32Type>(
124                date_array,
125                days_array,
126                |date, days| date.wrapping_sub(days),
127            )?
128        }
129        _ => {
130            return internal_err!(
131                "Spark `date_sub` function: argument must be int8, int16, int32, got {:?}",
132                days_arg.data_type()
133            );
134        }
135    };
136    Ok(Arc::new(result))
137}
138
139#[cfg(test)]
140mod tests {
141    use super::*;
142    use datafusion_common::ScalarValue;
143
144    #[test]
145    fn test_date_sub_nullability_non_nullable_args() {
146        let udf = SparkDateSub::new();
147        let date_field = Arc::new(Field::new("d", DataType::Date32, false));
148        let days_field = Arc::new(Field::new("n", DataType::Int32, false));
149
150        let result = udf
151            .return_field_from_args(ReturnFieldArgs {
152                arg_fields: &[date_field, days_field],
153                scalar_arguments: &[None, None],
154            })
155            .unwrap();
156
157        assert!(!result.is_nullable());
158        assert_eq!(result.data_type(), &DataType::Date32);
159    }
160
161    #[test]
162    fn test_date_sub_nullability_nullable_arg() {
163        let udf = SparkDateSub::new();
164        let date_field = Arc::new(Field::new("d", DataType::Date32, false));
165        let nullable_days_field = Arc::new(Field::new("n", DataType::Int32, true));
166
167        let result = udf
168            .return_field_from_args(ReturnFieldArgs {
169                arg_fields: &[date_field, nullable_days_field],
170                scalar_arguments: &[None, None],
171            })
172            .unwrap();
173
174        assert!(result.is_nullable());
175        assert_eq!(result.data_type(), &DataType::Date32);
176    }
177
178    #[test]
179    fn test_date_sub_nullability_scalar_null_argument() {
180        let udf = SparkDateSub::new();
181        let date_field = Arc::new(Field::new("d", DataType::Date32, false));
182        let days_field = Arc::new(Field::new("n", DataType::Int32, false));
183        let null_scalar = ScalarValue::Int32(None);
184
185        let result = udf
186            .return_field_from_args(ReturnFieldArgs {
187                arg_fields: &[date_field, days_field],
188                scalar_arguments: &[None, Some(&null_scalar)],
189            })
190            .unwrap();
191
192        assert!(result.is_nullable());
193        assert_eq!(result.data_type(), &DataType::Date32);
194    }
195}