datafusion_comet_spark_expr/
lib.rs

1// Licensed to the Apache Software Foundation (ASF) under one
2// or more contributor license agreements.  See the NOTICE file
3// distributed with this work for additional information
4// regarding copyright ownership.  The ASF licenses this file
5// to you under the Apache License, Version 2.0 (the
6// "License"); you may not use this file except in compliance
7// with the License.  You may obtain a copy of the License at
8//
9//   http://www.apache.org/licenses/LICENSE-2.0
10//
11// Unless required by applicable law or agreed to in writing,
12// software distributed under the License is distributed on an
13// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14// KIND, either express or implied.  See the License for the
15// specific language governing permissions and limitations
16// under the License.
17
18// The clippy throws an error if the reference clone not wrapped into `Arc::clone`
19// The lint makes easier for code reader/reviewer separate references clones from more heavyweight ones
20#![deny(clippy::clone_on_ref_ptr)]
21
22mod error;
23
24mod kernels;
25mod static_invoke;
26pub use static_invoke::*;
27
28mod struct_funcs;
29pub use struct_funcs::{CreateNamedStruct, GetStructField};
30
31mod json_funcs;
32pub mod test_common;
33pub mod timezone;
34mod unbound;
35pub use unbound::UnboundColumn;
36mod predicate_funcs;
37pub mod utils;
38pub use predicate_funcs::{spark_isnan, RLike};
39
40mod agg_funcs;
41mod array_funcs;
42mod bitwise_funcs;
43mod comet_scalar_funcs;
44pub mod hash_funcs;
45
46mod string_funcs;
47
48mod datetime_funcs;
49pub use agg_funcs::*;
50
51pub use cast::{spark_cast, Cast, SparkCastOptions};
52mod conditional_funcs;
53mod conversion_funcs;
54mod math_funcs;
55
56pub use array_funcs::*;
57pub use bitwise_funcs::*;
58pub use conditional_funcs::*;
59pub use conversion_funcs::*;
60
61pub use comet_scalar_funcs::create_comet_physical_fun;
62pub use datetime_funcs::{
63    spark_date_add, spark_date_sub, DateTruncExpr, HourExpr, MinuteExpr, SecondExpr,
64    TimestampTruncExpr,
65};
66pub use error::{SparkError, SparkResult};
67pub use hash_funcs::*;
68pub use json_funcs::ToJson;
69pub use math_funcs::{
70    create_negate_expr, spark_ceil, spark_decimal_div, spark_floor, spark_hex, spark_make_decimal,
71    spark_round, spark_unhex, spark_unscaled_value, CheckOverflow, NegativeExpr,
72    NormalizeNaNAndZero,
73};
74pub use string_funcs::*;
75
76/// Spark supports three evaluation modes when evaluating expressions, which affect
77/// the behavior when processing input values that are invalid or would result in an
78/// error, such as divide by zero errors, and also affects behavior when converting
79/// between types.
80#[derive(Debug, Hash, PartialEq, Eq, Clone, Copy)]
81pub enum EvalMode {
82    /// Legacy is the default behavior in Spark prior to Spark 4.0. This mode silently ignores
83    /// or replaces errors during SQL operations. Operations resulting in errors (like
84    /// division by zero) will produce NULL values instead of failing. Legacy mode also
85    /// enables implicit type conversions.
86    Legacy,
87    /// Adheres to the ANSI SQL standard for error handling by throwing exceptions for
88    /// operations that result in errors. Does not perform implicit type conversions.
89    Ansi,
90    /// Same as Ansi mode, except that it converts errors to NULL values without
91    /// failing the entire query.
92    Try,
93}
94
95pub(crate) fn arithmetic_overflow_error(from_type: &str) -> SparkError {
96    SparkError::ArithmeticOverflow {
97        from_type: from_type.to_string(),
98    }
99}