datafusion_spark/
lib.rs

1// Licensed to the Apache Software Foundation (ASF) under one
2// or more contributor license agreements.  See the NOTICE file
3// distributed with this work for additional information
4// regarding copyright ownership.  The ASF licenses this file
5// to you under the Apache License, Version 2.0 (the
6// "License"); you may not use this file except in compliance
7// with the License.  You may obtain a copy of the License at
8//
9//   http://www.apache.org/licenses/LICENSE-2.0
10//
11// Unless required by applicable law or agreed to in writing,
12// software distributed under the License is distributed on an
13// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14// KIND, either express or implied.  See the License for the
15// specific language governing permissions and limitations
16// under the License.
17
18#![doc(
19    html_logo_url = "https://raw.githubusercontent.com/apache/datafusion/19fe44cf2f30cbdd63d4a4f52c74055163c6cc38/docs/logos/standalone_logo/logo_original.svg",
20    html_favicon_url = "https://raw.githubusercontent.com/apache/datafusion/19fe44cf2f30cbdd63d4a4f52c74055163c6cc38/docs/logos/standalone_logo/logo_original.svg"
21)]
22#![cfg_attr(docsrs, feature(doc_cfg))]
23// Make cheap clones clear: https://github.com/apache/datafusion/issues/11143
24#![deny(clippy::clone_on_ref_ptr)]
25#![deny(clippy::allow_attributes)]
26#![cfg_attr(test, allow(clippy::needless_pass_by_value))]
27
28//! Spark Expression packages for [DataFusion].
29//!
30//! This crate contains a collection of various Spark function packages for DataFusion,
31//! implemented using the extension API.
32//!
33//! [DataFusion]: https://crates.io/crates/datafusion
34//!
35//!
36//! # Available Function Packages
37//! See the list of [modules](#modules) in this crate for available packages.
38//!
39//! # Example: using all function packages
40//!
41//! You can register all the functions in all packages using the [`register_all`]
42//! function as shown below. Any existing functions will be overwritten, with these
43//! Spark functions taking priority.
44//!
45//! ```
46//! # use datafusion_execution::FunctionRegistry;
47//! # use datafusion_expr::{ScalarUDF, AggregateUDF, WindowUDF};
48//! # use datafusion_expr::planner::ExprPlanner;
49//! # use datafusion_common::Result;
50//! # use std::collections::HashSet;
51//! # use std::sync::Arc;
52//! # // Note: We can't use a real SessionContext here because the
53//! # // `datafusion_spark` crate has no dependence on the DataFusion crate
54//! # // thus use a dummy SessionContext that has enough of the implementation
55//! # struct SessionContext {}
56//! # impl FunctionRegistry for SessionContext {
57//! #    fn register_udf(&mut self, _udf: Arc<ScalarUDF>) -> Result<Option<Arc<ScalarUDF>>> { Ok (None) }
58//! #    fn udfs(&self) -> HashSet<String> { unimplemented!() }
59//! #    fn udafs(&self) -> HashSet<String> { unimplemented!() }
60//! #    fn udwfs(&self) -> HashSet<String> { unimplemented!() }
61//! #    fn udf(&self, _name: &str) -> Result<Arc<ScalarUDF>> { unimplemented!() }
62//! #    fn udaf(&self, name: &str) -> Result<Arc<AggregateUDF>> {unimplemented!() }
63//! #    fn udwf(&self, name: &str) -> Result<Arc<WindowUDF>> { unimplemented!() }
64//! #    fn expr_planners(&self) -> Vec<Arc<dyn ExprPlanner>> { unimplemented!() }
65//! # }
66//! # impl SessionContext {
67//! #   fn new() -> Self { SessionContext {} }
68//! #   async fn sql(&mut self, _query: &str) -> Result<()> { Ok(()) }
69//! #  }
70//! #
71//! # async fn stub() -> Result<()> {
72//! // Create a new session context
73//! let mut ctx = SessionContext::new();
74//! // Register all Spark functions with the context
75//! datafusion_spark::register_all(&mut ctx)?;
76//! // Run a query using the `sha2` function which is now available and has Spark semantics
77//! let df = ctx.sql("SELECT sha2('The input String', 256)").await?;
78//! # Ok(())
79//! # }
80//! ```
81//!
82//! # Example: calling a specific function in Rust
83//!
84//! Each package also exports an `expr_fn` submodule that create [`Expr`]s for
85//! invoking functions via rust using a fluent style. For example, to invoke the
86//! `sha2` function, you can use the following code:
87//!
88//! ```rust
89//! # use datafusion_expr::{col, lit};
90//! use datafusion_spark::expr_fn::sha2;
91//! // Create the expression `sha2(my_data, 256)`
92//! let expr = sha2(col("my_data"), lit(256));
93//! ```
94//!
95//![`Expr`]: datafusion_expr::Expr
96
97pub mod function;
98
99use datafusion_catalog::TableFunction;
100use datafusion_common::Result;
101use datafusion_execution::FunctionRegistry;
102use datafusion_expr::{AggregateUDF, ScalarUDF, WindowUDF};
103use log::debug;
104use std::sync::Arc;
105
106/// Fluent-style API for creating `Expr`s
107#[expect(unused_imports)]
108pub mod expr_fn {
109    pub use super::function::aggregate::expr_fn::*;
110    pub use super::function::array::expr_fn::*;
111    pub use super::function::bitmap::expr_fn::*;
112    pub use super::function::bitwise::expr_fn::*;
113    pub use super::function::collection::expr_fn::*;
114    pub use super::function::conditional::expr_fn::*;
115    pub use super::function::conversion::expr_fn::*;
116    pub use super::function::csv::expr_fn::*;
117    pub use super::function::datetime::expr_fn::*;
118    pub use super::function::generator::expr_fn::*;
119    pub use super::function::hash::expr_fn::*;
120    pub use super::function::json::expr_fn::*;
121    pub use super::function::lambda::expr_fn::*;
122    pub use super::function::map::expr_fn::*;
123    pub use super::function::math::expr_fn::*;
124    pub use super::function::misc::expr_fn::*;
125    pub use super::function::predicate::expr_fn::*;
126    pub use super::function::string::expr_fn::*;
127    pub use super::function::r#struct::expr_fn::*;
128    pub use super::function::table::expr_fn::*;
129    pub use super::function::url::expr_fn::*;
130    pub use super::function::window::expr_fn::*;
131    pub use super::function::xml::expr_fn::*;
132}
133
134/// Returns all default scalar functions
135pub fn all_default_scalar_functions() -> Vec<Arc<ScalarUDF>> {
136    function::array::functions()
137        .into_iter()
138        .chain(function::bitmap::functions())
139        .chain(function::bitwise::functions())
140        .chain(function::collection::functions())
141        .chain(function::conditional::functions())
142        .chain(function::conversion::functions())
143        .chain(function::csv::functions())
144        .chain(function::datetime::functions())
145        .chain(function::generator::functions())
146        .chain(function::hash::functions())
147        .chain(function::json::functions())
148        .chain(function::lambda::functions())
149        .chain(function::map::functions())
150        .chain(function::math::functions())
151        .chain(function::misc::functions())
152        .chain(function::predicate::functions())
153        .chain(function::string::functions())
154        .chain(function::r#struct::functions())
155        .chain(function::url::functions())
156        .chain(function::xml::functions())
157        .collect::<Vec<_>>()
158}
159
160/// Returns all default aggregate functions
161pub fn all_default_aggregate_functions() -> Vec<Arc<AggregateUDF>> {
162    function::aggregate::functions()
163}
164
165/// Returns all default window functions
166pub fn all_default_window_functions() -> Vec<Arc<WindowUDF>> {
167    function::window::functions()
168}
169
170/// Returns all default table functions
171pub fn all_default_table_functions() -> Vec<Arc<TableFunction>> {
172    function::table::functions()
173}
174
175/// Registers all enabled packages with a [`FunctionRegistry`], overriding any existing
176/// functions if there is a name clash.
177pub fn register_all(registry: &mut dyn FunctionRegistry) -> Result<()> {
178    let scalar_functions: Vec<Arc<ScalarUDF>> = all_default_scalar_functions();
179    scalar_functions.into_iter().try_for_each(|udf| {
180        let existing_udf = registry.register_udf(udf)?;
181        if let Some(existing_udf) = existing_udf {
182            debug!("Overwrite existing UDF: {}", existing_udf.name());
183        }
184        Ok(()) as Result<()>
185    })?;
186
187    let aggregate_functions: Vec<Arc<AggregateUDF>> = all_default_aggregate_functions();
188    aggregate_functions.into_iter().try_for_each(|udf| {
189        let existing_udaf = registry.register_udaf(udf)?;
190        if let Some(existing_udaf) = existing_udaf {
191            debug!("Overwrite existing UDAF: {}", existing_udaf.name());
192        }
193        Ok(()) as Result<()>
194    })?;
195
196    let window_functions: Vec<Arc<WindowUDF>> = all_default_window_functions();
197    window_functions.into_iter().try_for_each(|udf| {
198        let existing_udwf = registry.register_udwf(udf)?;
199        if let Some(existing_udwf) = existing_udwf {
200            debug!("Overwrite existing UDWF: {}", existing_udwf.name());
201        }
202        Ok(()) as Result<()>
203    })?;
204
205    Ok(())
206}