datafusion_spark/
lib.rs

1// Licensed to the Apache Software Foundation (ASF) under one
2// or more contributor license agreements.  See the NOTICE file
3// distributed with this work for additional information
4// regarding copyright ownership.  The ASF licenses this file
5// to you under the Apache License, Version 2.0 (the
6// "License"); you may not use this file except in compliance
7// with the License.  You may obtain a copy of the License at
8//
9//   http://www.apache.org/licenses/LICENSE-2.0
10//
11// Unless required by applicable law or agreed to in writing,
12// software distributed under the License is distributed on an
13// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14// KIND, either express or implied.  See the License for the
15// specific language governing permissions and limitations
16// under the License.
17
18#![doc(
19    html_logo_url = "https://raw.githubusercontent.com/apache/datafusion/19fe44cf2f30cbdd63d4a4f52c74055163c6cc38/docs/logos/standalone_logo/logo_original.svg",
20    html_favicon_url = "https://raw.githubusercontent.com/apache/datafusion/19fe44cf2f30cbdd63d4a4f52c74055163c6cc38/docs/logos/standalone_logo/logo_original.svg"
21)]
22#![cfg_attr(docsrs, feature(doc_cfg))]
23// Make cheap clones clear: https://github.com/apache/datafusion/issues/11143
24#![deny(clippy::clone_on_ref_ptr)]
25
26//! Spark Expression packages for [DataFusion].
27//!
28//! This crate contains a collection of various Spark function packages for DataFusion,
29//! implemented using the extension API.
30//!
31//! [DataFusion]: https://crates.io/crates/datafusion
32//!
33//!
34//! # Available Function Packages
35//! See the list of [modules](#modules) in this crate for available packages.
36//!
37//! # Example: using all function packages
38//!
39//! You can register all the functions in all packages using the [`register_all`]
40//! function as shown below. Any existing functions will be overwritten, with these
41//! Spark functions taking priority.
42//!
43//! ```
44//! # use datafusion_execution::FunctionRegistry;
45//! # use datafusion_expr::{ScalarUDF, AggregateUDF, WindowUDF};
46//! # use datafusion_expr::planner::ExprPlanner;
47//! # use datafusion_common::Result;
48//! # use std::collections::HashSet;
49//! # use std::sync::Arc;
50//! # // Note: We can't use a real SessionContext here because the
51//! # // `datafusion_spark` crate has no dependence on the DataFusion crate
52//! # // thus use a dummy SessionContext that has enough of the implementation
53//! # struct SessionContext {}
54//! # impl FunctionRegistry for SessionContext {
55//! #    fn register_udf(&mut self, _udf: Arc<ScalarUDF>) -> Result<Option<Arc<ScalarUDF>>> { Ok (None) }
56//! #    fn udfs(&self) -> HashSet<String> { unimplemented!() }
57//! #    fn udafs(&self) -> HashSet<String> { unimplemented!() }
58//! #    fn udwfs(&self) -> HashSet<String> { unimplemented!() }
59//! #    fn udf(&self, _name: &str) -> Result<Arc<ScalarUDF>> { unimplemented!() }
60//! #    fn udaf(&self, name: &str) -> Result<Arc<AggregateUDF>> {unimplemented!() }
61//! #    fn udwf(&self, name: &str) -> Result<Arc<WindowUDF>> { unimplemented!() }
62//! #    fn expr_planners(&self) -> Vec<Arc<dyn ExprPlanner>> { unimplemented!() }
63//! # }
64//! # impl SessionContext {
65//! #   fn new() -> Self { SessionContext {} }
66//! #   async fn sql(&mut self, _query: &str) -> Result<()> { Ok(()) }
67//! #  }
68//! #
69//! # async fn stub() -> Result<()> {
70//! // Create a new session context
71//! let mut ctx = SessionContext::new();
72//! // Register all Spark functions with the context
73//! datafusion_spark::register_all(&mut ctx)?;
74//! // Run a query using the `sha2` function which is now available and has Spark semantics
75//! let df = ctx.sql("SELECT sha2('The input String', 256)").await?;
76//! # Ok(())
77//! # }
78//! ```
79//!
80//! # Example: calling a specific function in Rust
81//!
82//! Each package also exports an `expr_fn` submodule that create [`Expr`]s for
83//! invoking functions via rust using a fluent style. For example, to invoke the
84//! `sha2` function, you can use the following code:
85//!
86//! ```rust
87//! # use datafusion_expr::{col, lit};
88//! use datafusion_spark::expr_fn::sha2;
89//! // Create the expression `sha2(my_data, 256)`
90//! let expr = sha2(col("my_data"), lit(256));
91//! ```
92//!
93//![`Expr`]: datafusion_expr::Expr
94
95pub mod function;
96
97use datafusion_catalog::TableFunction;
98use datafusion_common::Result;
99use datafusion_execution::FunctionRegistry;
100use datafusion_expr::{AggregateUDF, ScalarUDF, WindowUDF};
101use log::debug;
102use std::sync::Arc;
103
104/// Fluent-style API for creating `Expr`s
105#[allow(unused)]
106pub mod expr_fn {
107    pub use super::function::aggregate::expr_fn::*;
108    pub use super::function::array::expr_fn::*;
109    pub use super::function::bitmap::expr_fn::*;
110    pub use super::function::bitwise::expr_fn::*;
111    pub use super::function::collection::expr_fn::*;
112    pub use super::function::conditional::expr_fn::*;
113    pub use super::function::conversion::expr_fn::*;
114    pub use super::function::csv::expr_fn::*;
115    pub use super::function::datetime::expr_fn::*;
116    pub use super::function::generator::expr_fn::*;
117    pub use super::function::hash::expr_fn::*;
118    pub use super::function::json::expr_fn::*;
119    pub use super::function::lambda::expr_fn::*;
120    pub use super::function::map::expr_fn::*;
121    pub use super::function::math::expr_fn::*;
122    pub use super::function::misc::expr_fn::*;
123    pub use super::function::predicate::expr_fn::*;
124    pub use super::function::r#struct::expr_fn::*;
125    pub use super::function::string::expr_fn::*;
126    pub use super::function::table::expr_fn::*;
127    pub use super::function::url::expr_fn::*;
128    pub use super::function::window::expr_fn::*;
129    pub use super::function::xml::expr_fn::*;
130}
131
132/// Returns all default scalar functions
133pub fn all_default_scalar_functions() -> Vec<Arc<ScalarUDF>> {
134    function::array::functions()
135        .into_iter()
136        .chain(function::bitmap::functions())
137        .chain(function::bitwise::functions())
138        .chain(function::collection::functions())
139        .chain(function::conditional::functions())
140        .chain(function::conversion::functions())
141        .chain(function::csv::functions())
142        .chain(function::datetime::functions())
143        .chain(function::generator::functions())
144        .chain(function::hash::functions())
145        .chain(function::json::functions())
146        .chain(function::lambda::functions())
147        .chain(function::map::functions())
148        .chain(function::math::functions())
149        .chain(function::misc::functions())
150        .chain(function::predicate::functions())
151        .chain(function::string::functions())
152        .chain(function::r#struct::functions())
153        .chain(function::url::functions())
154        .chain(function::xml::functions())
155        .collect::<Vec<_>>()
156}
157
158/// Returns all default aggregate functions
159pub fn all_default_aggregate_functions() -> Vec<Arc<AggregateUDF>> {
160    function::aggregate::functions()
161}
162
163/// Returns all default window functions
164pub fn all_default_window_functions() -> Vec<Arc<WindowUDF>> {
165    function::window::functions()
166}
167
168/// Returns all default table functions
169pub fn all_default_table_functions() -> Vec<Arc<TableFunction>> {
170    function::table::functions()
171}
172
173/// Registers all enabled packages with a [`FunctionRegistry`], overriding any existing
174/// functions if there is a name clash.
175pub fn register_all(registry: &mut dyn FunctionRegistry) -> Result<()> {
176    let scalar_functions: Vec<Arc<ScalarUDF>> = all_default_scalar_functions();
177    scalar_functions.into_iter().try_for_each(|udf| {
178        let existing_udf = registry.register_udf(udf)?;
179        if let Some(existing_udf) = existing_udf {
180            debug!("Overwrite existing UDF: {}", existing_udf.name());
181        }
182        Ok(()) as Result<()>
183    })?;
184
185    let aggregate_functions: Vec<Arc<AggregateUDF>> = all_default_aggregate_functions();
186    aggregate_functions.into_iter().try_for_each(|udf| {
187        let existing_udaf = registry.register_udaf(udf)?;
188        if let Some(existing_udaf) = existing_udaf {
189            debug!("Overwrite existing UDAF: {}", existing_udaf.name());
190        }
191        Ok(()) as Result<()>
192    })?;
193
194    let window_functions: Vec<Arc<WindowUDF>> = all_default_window_functions();
195    window_functions.into_iter().try_for_each(|udf| {
196        let existing_udwf = registry.register_udwf(udf)?;
197        if let Some(existing_udwf) = existing_udwf {
198            debug!("Overwrite existing UDWF: {}", existing_udwf.name());
199        }
200        Ok(()) as Result<()>
201    })?;
202
203    Ok(())
204}