datafusion_comet_spark_expr/lib.rs
1// Licensed to the Apache Software Foundation (ASF) under one
2// or more contributor license agreements. See the NOTICE file
3// distributed with this work for additional information
4// regarding copyright ownership. The ASF licenses this file
5// to you under the Apache License, Version 2.0 (the
6// "License"); you may not use this file except in compliance
7// with the License. You may obtain a copy of the License at
8//
9// http://www.apache.org/licenses/LICENSE-2.0
10//
11// Unless required by applicable law or agreed to in writing,
12// software distributed under the License is distributed on an
13// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14// KIND, either express or implied. See the License for the
15// specific language governing permissions and limitations
16// under the License.
17
18// The clippy throws an error if the reference clone not wrapped into `Arc::clone`
19// The lint makes easier for code reader/reviewer separate references clones from more heavyweight ones
20#![deny(clippy::clone_on_ref_ptr)]
21
22mod error;
23
24mod kernels;
25mod static_invoke;
26pub use static_invoke::*;
27
28mod struct_funcs;
29pub use struct_funcs::{CreateNamedStruct, GetStructField};
30
31mod json_funcs;
32pub mod test_common;
33pub mod timezone;
34mod unbound;
35pub use unbound::UnboundColumn;
36mod predicate_funcs;
37pub mod utils;
38pub use predicate_funcs::{spark_isnan, RLike};
39
40mod agg_funcs;
41mod array_funcs;
42mod bitwise_funcs;
43mod comet_scalar_funcs;
44pub mod hash_funcs;
45
46mod string_funcs;
47
48mod datetime_funcs;
49pub use agg_funcs::*;
50
51pub use cast::{spark_cast, Cast, SparkCastOptions};
52mod conditional_funcs;
53mod conversion_funcs;
54mod math_funcs;
55mod nondetermenistic_funcs;
56
57pub use array_funcs::*;
58pub use bitwise_funcs::*;
59pub use conditional_funcs::*;
60pub use conversion_funcs::*;
61pub use nondetermenistic_funcs::*;
62
63pub use comet_scalar_funcs::{create_comet_physical_fun, register_all_comet_functions};
64pub use datetime_funcs::{
65 spark_date_add, spark_date_sub, SparkDateTrunc, SparkHour, SparkMinute, SparkSecond,
66 TimestampTruncExpr,
67};
68pub use error::{SparkError, SparkResult};
69pub use hash_funcs::*;
70pub use json_funcs::ToJson;
71pub use math_funcs::{
72 create_negate_expr, spark_ceil, spark_decimal_div, spark_decimal_integral_div, spark_floor,
73 spark_hex, spark_make_decimal, spark_round, spark_unhex, spark_unscaled_value, CheckOverflow,
74 NegativeExpr, NormalizeNaNAndZero,
75};
76pub use string_funcs::*;
77
78/// Spark supports three evaluation modes when evaluating expressions, which affect
79/// the behavior when processing input values that are invalid or would result in an
80/// error, such as divide by zero errors, and also affects behavior when converting
81/// between types.
82#[derive(Debug, Hash, PartialEq, Eq, Clone, Copy)]
83pub enum EvalMode {
84 /// Legacy is the default behavior in Spark prior to Spark 4.0. This mode silently ignores
85 /// or replaces errors during SQL operations. Operations resulting in errors (like
86 /// division by zero) will produce NULL values instead of failing. Legacy mode also
87 /// enables implicit type conversions.
88 Legacy,
89 /// Adheres to the ANSI SQL standard for error handling by throwing exceptions for
90 /// operations that result in errors. Does not perform implicit type conversions.
91 Ansi,
92 /// Same as Ansi mode, except that it converts errors to NULL values without
93 /// failing the entire query.
94 Try,
95}
96
97pub(crate) fn arithmetic_overflow_error(from_type: &str) -> SparkError {
98 SparkError::ArithmeticOverflow {
99 from_type: from_type.to_string(),
100 }
101}