use crate::expr::InSubquery;
use crate::expr::{Exists, Placeholder};
use crate::logical_plan::display::{GraphvizVisitor, IndentVisitor};
use crate::logical_plan::extension::UserDefinedLogicalNode;
use crate::logical_plan::{DmlStatement, Statement};
use crate::utils::{
enumerate_grouping_sets, exprlist_to_fields, find_out_reference_exprs, from_plan,
grouping_set_expr_count, grouping_set_to_exprlist, inspect_expr_pre,
};
use crate::{
build_join_schema, Expr, ExprSchemable, TableProviderFilterPushDown, TableSource,
};
use arrow::datatypes::{DataType, Field, Schema, SchemaRef};
use datafusion_common::tree_node::{
Transformed, TreeNode, TreeNodeVisitor, VisitRecursion,
};
use datafusion_common::{
plan_err, Column, DFSchema, DFSchemaRef, DataFusionError, OwnedTableReference,
Result, ScalarValue,
};
use std::collections::{HashMap, HashSet};
use std::fmt::{self, Debug, Display, Formatter};
use std::hash::{Hash, Hasher};
use std::str::FromStr;
use std::sync::Arc;
use super::DdlStatement;
#[derive(Clone, PartialEq, Eq, Hash)]
pub enum LogicalPlan {
Projection(Projection),
Filter(Filter),
Window(Window),
Aggregate(Aggregate),
Sort(Sort),
Join(Join),
CrossJoin(CrossJoin),
Repartition(Repartition),
Union(Union),
TableScan(TableScan),
EmptyRelation(EmptyRelation),
Subquery(Subquery),
SubqueryAlias(SubqueryAlias),
Limit(Limit),
Statement(Statement),
Values(Values),
Explain(Explain),
Analyze(Analyze),
Extension(Extension),
Distinct(Distinct),
Prepare(Prepare),
Dml(DmlStatement),
Ddl(DdlStatement),
DescribeTable(DescribeTable),
Unnest(Unnest),
}
impl LogicalPlan {
pub fn schema(&self) -> &DFSchemaRef {
match self {
LogicalPlan::EmptyRelation(EmptyRelation { schema, .. }) => schema,
LogicalPlan::Values(Values { schema, .. }) => schema,
LogicalPlan::TableScan(TableScan {
projected_schema, ..
}) => projected_schema,
LogicalPlan::Projection(Projection { schema, .. }) => schema,
LogicalPlan::Filter(Filter { input, .. }) => input.schema(),
LogicalPlan::Distinct(Distinct { input }) => input.schema(),
LogicalPlan::Window(Window { schema, .. }) => schema,
LogicalPlan::Aggregate(Aggregate { schema, .. }) => schema,
LogicalPlan::Sort(Sort { input, .. }) => input.schema(),
LogicalPlan::Join(Join { schema, .. }) => schema,
LogicalPlan::CrossJoin(CrossJoin { schema, .. }) => schema,
LogicalPlan::Repartition(Repartition { input, .. }) => input.schema(),
LogicalPlan::Limit(Limit { input, .. }) => input.schema(),
LogicalPlan::Statement(statement) => statement.schema(),
LogicalPlan::Subquery(Subquery { subquery, .. }) => subquery.schema(),
LogicalPlan::SubqueryAlias(SubqueryAlias { schema, .. }) => schema,
LogicalPlan::Prepare(Prepare { input, .. }) => input.schema(),
LogicalPlan::Explain(explain) => &explain.schema,
LogicalPlan::Analyze(analyze) => &analyze.schema,
LogicalPlan::Extension(extension) => extension.node.schema(),
LogicalPlan::Union(Union { schema, .. }) => schema,
LogicalPlan::DescribeTable(DescribeTable { dummy_schema, .. }) => {
dummy_schema
}
LogicalPlan::Dml(DmlStatement { table_schema, .. }) => table_schema,
LogicalPlan::Ddl(ddl) => ddl.schema(),
LogicalPlan::Unnest(Unnest { schema, .. }) => schema,
}
}
pub fn fallback_normalize_schemas(&self) -> Vec<&DFSchema> {
match self {
LogicalPlan::Window(_)
| LogicalPlan::Projection(_)
| LogicalPlan::Aggregate(_)
| LogicalPlan::Unnest(_)
| LogicalPlan::Join(_)
| LogicalPlan::CrossJoin(_) => self
.inputs()
.iter()
.map(|input| input.schema().as_ref())
.collect(),
_ => vec![],
}
}
#[deprecated(since = "20.0.0")]
pub fn all_schemas(&self) -> Vec<&DFSchemaRef> {
match self {
LogicalPlan::Window(_)
| LogicalPlan::Projection(_)
| LogicalPlan::Aggregate(_)
| LogicalPlan::Unnest(_)
| LogicalPlan::Join(_)
| LogicalPlan::CrossJoin(_) => {
let mut schemas = vec![self.schema()];
self.inputs().iter().for_each(|input| {
schemas.push(input.schema());
});
schemas
}
LogicalPlan::Explain(_)
| LogicalPlan::Analyze(_)
| LogicalPlan::EmptyRelation(_)
| LogicalPlan::Ddl(_)
| LogicalPlan::Dml(_)
| LogicalPlan::Values(_)
| LogicalPlan::SubqueryAlias(_)
| LogicalPlan::Union(_)
| LogicalPlan::Extension(_)
| LogicalPlan::TableScan(_) => {
vec![self.schema()]
}
LogicalPlan::Limit(_)
| LogicalPlan::Subquery(_)
| LogicalPlan::Repartition(_)
| LogicalPlan::Sort(_)
| LogicalPlan::Filter(_)
| LogicalPlan::Distinct(_)
| LogicalPlan::Prepare(_) => {
self.inputs().iter().map(|p| p.schema()).collect()
}
LogicalPlan::Statement(_) | LogicalPlan::DescribeTable(_) => vec![],
}
}
pub fn explain_schema() -> SchemaRef {
SchemaRef::new(Schema::new(vec![
Field::new("plan_type", DataType::Utf8, false),
Field::new("plan", DataType::Utf8, false),
]))
}
pub fn expressions(self: &LogicalPlan) -> Vec<Expr> {
let mut exprs = vec![];
self.inspect_expressions(|e| {
exprs.push(e.clone());
Ok(()) as Result<()>
})
.unwrap();
exprs
}
pub fn all_out_ref_exprs(self: &LogicalPlan) -> Vec<Expr> {
let mut exprs = vec![];
self.inspect_expressions(|e| {
find_out_reference_exprs(e).into_iter().for_each(|e| {
if !exprs.contains(&e) {
exprs.push(e)
}
});
Ok(()) as Result<(), DataFusionError>
})
.unwrap();
self.inputs()
.into_iter()
.flat_map(|child| child.all_out_ref_exprs())
.for_each(|e| {
if !exprs.contains(&e) {
exprs.push(e)
}
});
exprs
}
pub fn inspect_expressions<F, E>(self: &LogicalPlan, mut f: F) -> Result<(), E>
where
F: FnMut(&Expr) -> Result<(), E>,
{
match self {
LogicalPlan::Projection(Projection { expr, .. }) => {
expr.iter().try_for_each(f)
}
LogicalPlan::Values(Values { values, .. }) => {
values.iter().flatten().try_for_each(f)
}
LogicalPlan::Filter(Filter { predicate, .. }) => f(predicate),
LogicalPlan::Repartition(Repartition {
partitioning_scheme,
..
}) => match partitioning_scheme {
Partitioning::Hash(expr, _) => expr.iter().try_for_each(f),
Partitioning::DistributeBy(expr) => expr.iter().try_for_each(f),
Partitioning::RoundRobinBatch(_) => Ok(()),
},
LogicalPlan::Window(Window { window_expr, .. }) => {
window_expr.iter().try_for_each(f)
}
LogicalPlan::Aggregate(Aggregate {
group_expr,
aggr_expr,
..
}) => group_expr.iter().chain(aggr_expr.iter()).try_for_each(f),
LogicalPlan::Join(Join { on, filter, .. }) => {
on.iter()
.map(|(l, r)| Expr::eq(l.clone(), r.clone()))
.try_for_each(|e| f(&e))?;
if let Some(filter) = filter.as_ref() {
f(filter)
} else {
Ok(())
}
}
LogicalPlan::Sort(Sort { expr, .. }) => expr.iter().try_for_each(f),
LogicalPlan::Extension(extension) => {
extension.node.expressions().iter().try_for_each(f)
}
LogicalPlan::TableScan(TableScan { filters, .. }) => {
filters.iter().try_for_each(f)
}
LogicalPlan::Unnest(Unnest { column, .. }) => {
f(&Expr::Column(column.clone()))
}
LogicalPlan::EmptyRelation(_)
| LogicalPlan::Subquery(_)
| LogicalPlan::SubqueryAlias(_)
| LogicalPlan::Limit(_)
| LogicalPlan::Statement(_)
| LogicalPlan::CrossJoin(_)
| LogicalPlan::Analyze(_)
| LogicalPlan::Explain(_)
| LogicalPlan::Union(_)
| LogicalPlan::Distinct(_)
| LogicalPlan::Dml(_)
| LogicalPlan::Ddl(_)
| LogicalPlan::DescribeTable(_)
| LogicalPlan::Prepare(_) => Ok(()),
}
}
pub fn inputs(&self) -> Vec<&LogicalPlan> {
match self {
LogicalPlan::Projection(Projection { input, .. }) => vec![input],
LogicalPlan::Filter(Filter { input, .. }) => vec![input],
LogicalPlan::Repartition(Repartition { input, .. }) => vec![input],
LogicalPlan::Window(Window { input, .. }) => vec![input],
LogicalPlan::Aggregate(Aggregate { input, .. }) => vec![input],
LogicalPlan::Sort(Sort { input, .. }) => vec![input],
LogicalPlan::Join(Join { left, right, .. }) => vec![left, right],
LogicalPlan::CrossJoin(CrossJoin { left, right, .. }) => vec![left, right],
LogicalPlan::Limit(Limit { input, .. }) => vec![input],
LogicalPlan::Subquery(Subquery { subquery, .. }) => vec![subquery],
LogicalPlan::SubqueryAlias(SubqueryAlias { input, .. }) => vec![input],
LogicalPlan::Extension(extension) => extension.node.inputs(),
LogicalPlan::Union(Union { inputs, .. }) => {
inputs.iter().map(|arc| arc.as_ref()).collect()
}
LogicalPlan::Distinct(Distinct { input }) => vec![input],
LogicalPlan::Explain(explain) => vec![&explain.plan],
LogicalPlan::Analyze(analyze) => vec![&analyze.input],
LogicalPlan::Dml(write) => vec![&write.input],
LogicalPlan::Ddl(ddl) => ddl.inputs(),
LogicalPlan::Unnest(Unnest { input, .. }) => vec![input],
LogicalPlan::Prepare(Prepare { input, .. }) => vec![input],
LogicalPlan::TableScan { .. }
| LogicalPlan::Statement { .. }
| LogicalPlan::EmptyRelation { .. }
| LogicalPlan::Values { .. }
| LogicalPlan::DescribeTable(_) => vec![],
}
}
pub fn using_columns(&self) -> Result<Vec<HashSet<Column>>, DataFusionError> {
let mut using_columns: Vec<HashSet<Column>> = vec![];
self.apply(&mut |plan| {
if let LogicalPlan::Join(Join {
join_constraint: JoinConstraint::Using,
on,
..
}) = plan
{
let columns =
on.iter().try_fold(HashSet::new(), |mut accumu, (l, r)| {
accumu.insert(l.try_into_col()?);
accumu.insert(r.try_into_col()?);
Result::<_, DataFusionError>::Ok(accumu)
})?;
using_columns.push(columns);
}
Ok(VisitRecursion::Continue)
})?;
Ok(using_columns)
}
pub fn with_new_inputs(&self, inputs: &[LogicalPlan]) -> Result<LogicalPlan> {
from_plan(self, &self.expressions(), inputs)
}
pub fn with_param_values(
self,
param_values: Vec<ScalarValue>,
) -> Result<LogicalPlan> {
match self {
LogicalPlan::Prepare(prepare_lp) => {
if prepare_lp.data_types.len() != param_values.len() {
return Err(DataFusionError::Internal(format!(
"Expected {} parameters, got {}",
prepare_lp.data_types.len(),
param_values.len()
)));
}
let iter = prepare_lp.data_types.iter().zip(param_values.iter());
for (i, (param_type, value)) in iter.enumerate() {
if *param_type != value.get_datatype() {
return Err(DataFusionError::Internal(format!(
"Expected parameter of type {:?}, got {:?} at index {}",
param_type,
value.get_datatype(),
i
)));
}
}
let input_plan = prepare_lp.input;
input_plan.replace_params_with_values(¶m_values)
}
_ => Ok(self),
}
}
pub fn max_rows(self: &LogicalPlan) -> Option<usize> {
match self {
LogicalPlan::Projection(Projection { input, .. }) => input.max_rows(),
LogicalPlan::Filter(Filter { input, .. }) => input.max_rows(),
LogicalPlan::Window(Window { input, .. }) => input.max_rows(),
LogicalPlan::Aggregate(Aggregate {
input, group_expr, ..
}) => {
if group_expr
.iter()
.all(|expr| matches!(expr, Expr::Literal(_)))
{
Some(1)
} else {
input.max_rows()
}
}
LogicalPlan::Sort(Sort { input, fetch, .. }) => {
match (fetch, input.max_rows()) {
(Some(fetch_limit), Some(input_max)) => {
Some(input_max.min(*fetch_limit))
}
(Some(fetch_limit), None) => Some(*fetch_limit),
(None, Some(input_max)) => Some(input_max),
(None, None) => None,
}
}
LogicalPlan::Join(Join {
left,
right,
join_type,
..
}) => match join_type {
JoinType::Inner | JoinType::Left | JoinType::Right | JoinType::Full => {
match (left.max_rows(), right.max_rows()) {
(Some(left_max), Some(right_max)) => {
let min_rows = match join_type {
JoinType::Left => left_max,
JoinType::Right => right_max,
JoinType::Full => left_max + right_max,
_ => 0,
};
Some((left_max * right_max).max(min_rows))
}
_ => None,
}
}
JoinType::LeftSemi | JoinType::LeftAnti => left.max_rows(),
JoinType::RightSemi | JoinType::RightAnti => right.max_rows(),
},
LogicalPlan::CrossJoin(CrossJoin { left, right, .. }) => {
match (left.max_rows(), right.max_rows()) {
(Some(left_max), Some(right_max)) => Some(left_max * right_max),
_ => None,
}
}
LogicalPlan::Repartition(Repartition { input, .. }) => input.max_rows(),
LogicalPlan::Union(Union { inputs, .. }) => inputs
.iter()
.map(|plan| plan.max_rows())
.try_fold(0usize, |mut acc, input_max| {
if let Some(i_max) = input_max {
acc += i_max;
Some(acc)
} else {
None
}
}),
LogicalPlan::TableScan(TableScan { fetch, .. }) => *fetch,
LogicalPlan::EmptyRelation(_) => Some(0),
LogicalPlan::Subquery(_) => None,
LogicalPlan::SubqueryAlias(SubqueryAlias { input, .. }) => input.max_rows(),
LogicalPlan::Limit(Limit { fetch, .. }) => *fetch,
LogicalPlan::Distinct(Distinct { input }) => input.max_rows(),
LogicalPlan::Values(v) => Some(v.values.len()),
LogicalPlan::Unnest(_) => None,
LogicalPlan::Ddl(_)
| LogicalPlan::Explain(_)
| LogicalPlan::Analyze(_)
| LogicalPlan::Dml(_)
| LogicalPlan::DescribeTable(_)
| LogicalPlan::Prepare(_)
| LogicalPlan::Statement(_)
| LogicalPlan::Extension(_) => None,
}
}
}
impl LogicalPlan {
pub(crate) fn apply_subqueries<F>(&self, op: &mut F) -> datafusion_common::Result<()>
where
F: FnMut(&Self) -> datafusion_common::Result<VisitRecursion>,
{
self.inspect_expressions(|expr| {
inspect_expr_pre(expr, |expr| {
match expr {
Expr::Exists(Exists { subquery, .. })
| Expr::InSubquery(InSubquery { subquery, .. })
| Expr::ScalarSubquery(subquery) => {
let synthetic_plan = LogicalPlan::Subquery(subquery.clone());
synthetic_plan.apply(op)?;
}
_ => {}
}
Ok::<(), DataFusionError>(())
})
})?;
Ok(())
}
pub(crate) fn visit_subqueries<V>(&self, v: &mut V) -> datafusion_common::Result<()>
where
V: TreeNodeVisitor<N = LogicalPlan>,
{
self.inspect_expressions(|expr| {
inspect_expr_pre(expr, |expr| {
match expr {
Expr::Exists(Exists { subquery, .. })
| Expr::InSubquery(InSubquery { subquery, .. })
| Expr::ScalarSubquery(subquery) => {
let synthetic_plan = LogicalPlan::Subquery(subquery.clone());
synthetic_plan.visit(v)?;
}
_ => {}
}
Ok::<(), DataFusionError>(())
})
})?;
Ok(())
}
pub fn replace_params_with_values(
&self,
param_values: &[ScalarValue],
) -> Result<LogicalPlan> {
let new_exprs = self
.expressions()
.into_iter()
.map(|e| Self::replace_placeholders_with_values(e, param_values))
.collect::<Result<Vec<_>>>()?;
let new_inputs_with_values = self
.inputs()
.into_iter()
.map(|inp| inp.replace_params_with_values(param_values))
.collect::<Result<Vec<_>>>()?;
from_plan(self, &new_exprs, &new_inputs_with_values)
}
pub fn get_parameter_types(
&self,
) -> Result<HashMap<String, Option<DataType>>, DataFusionError> {
let mut param_types: HashMap<String, Option<DataType>> = HashMap::new();
self.apply(&mut |plan| {
plan.inspect_expressions(|expr| {
expr.apply(&mut |expr| {
if let Expr::Placeholder(Placeholder { id, data_type }) = expr {
let prev = param_types.get(id);
match (prev, data_type) {
(Some(Some(prev)), Some(dt)) => {
if prev != dt {
Err(DataFusionError::Plan(format!(
"Conflicting types for {id}"
)))?;
}
}
(_, Some(dt)) => {
param_types.insert(id.clone(), Some(dt.clone()));
}
_ => {}
}
}
Ok(VisitRecursion::Continue)
})?;
Ok::<(), DataFusionError>(())
})?;
Ok(VisitRecursion::Continue)
})?;
Ok(param_types)
}
fn replace_placeholders_with_values(
expr: Expr,
param_values: &[ScalarValue],
) -> Result<Expr> {
expr.transform(&|expr| {
match &expr {
Expr::Placeholder(Placeholder { id, data_type }) => {
if id.is_empty() || id == "$0" {
return Err(DataFusionError::Plan(
"Empty placeholder id".to_string(),
));
}
let idx = id[1..].parse::<usize>().map_err(|e| {
DataFusionError::Internal(format!(
"Failed to parse placeholder id: {e}"
))
})? - 1;
let value = param_values.get(idx).ok_or_else(|| {
DataFusionError::Internal(format!(
"No value found for placeholder with id {id}"
))
})?;
if Some(value.get_datatype()) != *data_type {
return Err(DataFusionError::Internal(format!(
"Placeholder value type mismatch: expected {:?}, got {:?}",
data_type,
value.get_datatype()
)));
}
Ok(Transformed::Yes(Expr::Literal(value.clone())))
}
Expr::ScalarSubquery(qry) => {
let subquery =
Arc::new(qry.subquery.replace_params_with_values(param_values)?);
Ok(Transformed::Yes(Expr::ScalarSubquery(Subquery {
subquery,
outer_ref_columns: qry.outer_ref_columns.clone(),
})))
}
_ => Ok(Transformed::No(expr)),
}
})
}
}
impl LogicalPlan {
pub fn display_indent(&self) -> impl Display + '_ {
struct Wrapper<'a>(&'a LogicalPlan);
impl<'a> Display for Wrapper<'a> {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
let with_schema = false;
let mut visitor = IndentVisitor::new(f, with_schema);
match self.0.visit(&mut visitor) {
Ok(_) => Ok(()),
Err(_) => Err(fmt::Error),
}
}
}
Wrapper(self)
}
pub fn display_indent_schema(&self) -> impl Display + '_ {
struct Wrapper<'a>(&'a LogicalPlan);
impl<'a> Display for Wrapper<'a> {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
let with_schema = true;
let mut visitor = IndentVisitor::new(f, with_schema);
match self.0.visit(&mut visitor) {
Ok(_) => Ok(()),
Err(_) => Err(fmt::Error),
}
}
}
Wrapper(self)
}
pub fn display_graphviz(&self) -> impl Display + '_ {
struct Wrapper<'a>(&'a LogicalPlan);
impl<'a> Display for Wrapper<'a> {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
writeln!(
f,
"// Begin DataFusion GraphViz Plan (see https://graphviz.org)"
)?;
writeln!(f, "digraph {{")?;
let mut visitor = GraphvizVisitor::new(f);
visitor.pre_visit_plan("LogicalPlan")?;
self.0.visit(&mut visitor).map_err(|_| fmt::Error)?;
visitor.post_visit_plan()?;
visitor.set_with_schema(true);
visitor.pre_visit_plan("Detailed LogicalPlan")?;
self.0.visit(&mut visitor).map_err(|_| fmt::Error)?;
visitor.post_visit_plan()?;
writeln!(f, "}}")?;
writeln!(f, "// End DataFusion GraphViz Plan")?;
Ok(())
}
}
Wrapper(self)
}
pub fn display(&self) -> impl Display + '_ {
struct Wrapper<'a>(&'a LogicalPlan);
impl<'a> Display for Wrapper<'a> {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
match self.0 {
LogicalPlan::EmptyRelation(_) => write!(f, "EmptyRelation"),
LogicalPlan::Values(Values { ref values, .. }) => {
let str_values: Vec<_> = values
.iter()
.take(5)
.map(|row| {
let item = row
.iter()
.map(|expr| expr.to_string())
.collect::<Vec<_>>()
.join(", ");
format!("({item})")
})
.collect();
let elipse = if values.len() > 5 { "..." } else { "" };
write!(f, "Values: {}{}", str_values.join(", "), elipse)
}
LogicalPlan::TableScan(TableScan {
ref source,
ref table_name,
ref projection,
ref filters,
ref fetch,
..
}) => {
let projected_fields = match projection {
Some(indices) => {
let schema = source.schema();
let names: Vec<&str> = indices
.iter()
.map(|i| schema.field(*i).name().as_str())
.collect();
format!(" projection=[{}]", names.join(", "))
}
_ => "".to_string(),
};
write!(f, "TableScan: {table_name}{projected_fields}")?;
if !filters.is_empty() {
let mut full_filter = vec![];
let mut partial_filter = vec![];
let mut unsupported_filters = vec![];
let filters: Vec<&Expr> = filters.iter().collect();
if let Ok(results) =
source.supports_filters_pushdown(&filters)
{
filters.iter().zip(results.iter()).for_each(
|(x, res)| match res {
TableProviderFilterPushDown::Exact => {
full_filter.push(x)
}
TableProviderFilterPushDown::Inexact => {
partial_filter.push(x)
}
TableProviderFilterPushDown::Unsupported => {
unsupported_filters.push(x)
}
},
);
}
if !full_filter.is_empty() {
write!(f, ", full_filters={full_filter:?}")?;
};
if !partial_filter.is_empty() {
write!(f, ", partial_filters={partial_filter:?}")?;
}
if !unsupported_filters.is_empty() {
write!(
f,
", unsupported_filters={unsupported_filters:?}"
)?;
}
}
if let Some(n) = fetch {
write!(f, ", fetch={n}")?;
}
Ok(())
}
LogicalPlan::Projection(Projection { ref expr, .. }) => {
write!(f, "Projection: ")?;
for (i, expr_item) in expr.iter().enumerate() {
if i > 0 {
write!(f, ", ")?;
}
write!(f, "{expr_item:?}")?;
}
Ok(())
}
LogicalPlan::Dml(DmlStatement { table_name, op, .. }) => {
write!(f, "Dml: op=[{op}] table=[{table_name}]")
}
LogicalPlan::Ddl(ddl) => {
write!(f, "{}", ddl.display())
}
LogicalPlan::Filter(Filter {
predicate: ref expr,
..
}) => write!(f, "Filter: {expr:?}"),
LogicalPlan::Window(Window {
ref window_expr, ..
}) => {
write!(f, "WindowAggr: windowExpr=[{window_expr:?}]")
}
LogicalPlan::Aggregate(Aggregate {
ref group_expr,
ref aggr_expr,
..
}) => write!(
f,
"Aggregate: groupBy=[{group_expr:?}], aggr=[{aggr_expr:?}]"
),
LogicalPlan::Sort(Sort { expr, fetch, .. }) => {
write!(f, "Sort: ")?;
for (i, expr_item) in expr.iter().enumerate() {
if i > 0 {
write!(f, ", ")?;
}
write!(f, "{expr_item:?}")?;
}
if let Some(a) = fetch {
write!(f, ", fetch={a}")?;
}
Ok(())
}
LogicalPlan::Join(Join {
on: ref keys,
filter,
join_constraint,
join_type,
..
}) => {
let join_expr: Vec<String> =
keys.iter().map(|(l, r)| format!("{l} = {r}")).collect();
let filter_expr = filter
.as_ref()
.map(|expr| format!(" Filter: {expr}"))
.unwrap_or_else(|| "".to_string());
match join_constraint {
JoinConstraint::On => {
write!(
f,
"{} Join: {}{}",
join_type,
join_expr.join(", "),
filter_expr
)
}
JoinConstraint::Using => {
write!(
f,
"{} Join: Using {}{}",
join_type,
join_expr.join(", "),
filter_expr,
)
}
}
}
LogicalPlan::CrossJoin(_) => {
write!(f, "CrossJoin:")
}
LogicalPlan::Repartition(Repartition {
partitioning_scheme,
..
}) => match partitioning_scheme {
Partitioning::RoundRobinBatch(n) => {
write!(f, "Repartition: RoundRobinBatch partition_count={n}")
}
Partitioning::Hash(expr, n) => {
let hash_expr: Vec<String> =
expr.iter().map(|e| format!("{e:?}")).collect();
write!(
f,
"Repartition: Hash({}) partition_count={}",
hash_expr.join(", "),
n
)
}
Partitioning::DistributeBy(expr) => {
let dist_by_expr: Vec<String> =
expr.iter().map(|e| format!("{e:?}")).collect();
write!(
f,
"Repartition: DistributeBy({})",
dist_by_expr.join(", "),
)
}
},
LogicalPlan::Limit(Limit {
ref skip,
ref fetch,
..
}) => {
write!(
f,
"Limit: skip={}, fetch={}",
skip,
fetch.map_or_else(|| "None".to_string(), |x| x.to_string())
)
}
LogicalPlan::Subquery(Subquery { .. }) => {
write!(f, "Subquery:")
}
LogicalPlan::SubqueryAlias(SubqueryAlias { ref alias, .. }) => {
write!(f, "SubqueryAlias: {alias}")
}
LogicalPlan::Statement(statement) => {
write!(f, "{}", statement.display())
}
LogicalPlan::Distinct(Distinct { .. }) => {
write!(f, "Distinct:")
}
LogicalPlan::Explain { .. } => write!(f, "Explain"),
LogicalPlan::Analyze { .. } => write!(f, "Analyze"),
LogicalPlan::Union(_) => write!(f, "Union"),
LogicalPlan::Extension(e) => e.node.fmt_for_explain(f),
LogicalPlan::Prepare(Prepare {
name, data_types, ..
}) => {
write!(f, "Prepare: {name:?} {data_types:?} ")
}
LogicalPlan::DescribeTable(DescribeTable { .. }) => {
write!(f, "DescribeTable")
}
LogicalPlan::Unnest(Unnest { column, .. }) => {
write!(f, "Unnest: {column}")
}
}
}
}
Wrapper(self)
}
}
impl Debug for LogicalPlan {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
self.display_indent().fmt(f)
}
}
impl ToStringifiedPlan for LogicalPlan {
fn to_stringified(&self, plan_type: PlanType) -> StringifiedPlan {
StringifiedPlan::new(plan_type, self.display_indent().to_string())
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum JoinType {
Inner,
Left,
Right,
Full,
LeftSemi,
RightSemi,
LeftAnti,
RightAnti,
}
impl JoinType {
pub fn is_outer(self) -> bool {
self == JoinType::Left || self == JoinType::Right || self == JoinType::Full
}
}
impl Display for JoinType {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
let join_type = match self {
JoinType::Inner => "Inner",
JoinType::Left => "Left",
JoinType::Right => "Right",
JoinType::Full => "Full",
JoinType::LeftSemi => "LeftSemi",
JoinType::RightSemi => "RightSemi",
JoinType::LeftAnti => "LeftAnti",
JoinType::RightAnti => "RightAnti",
};
write!(f, "{join_type}")
}
}
impl FromStr for JoinType {
type Err = DataFusionError;
fn from_str(s: &str) -> Result<Self> {
let s = s.to_uppercase();
match s.as_str() {
"INNER" => Ok(JoinType::Inner),
"LEFT" => Ok(JoinType::Left),
"RIGHT" => Ok(JoinType::Right),
"FULL" => Ok(JoinType::Full),
"LEFTSEMI" => Ok(JoinType::LeftSemi),
"RIGHTSEMI" => Ok(JoinType::RightSemi),
"LEFTANTI" => Ok(JoinType::LeftAnti),
"RIGHTANTI" => Ok(JoinType::RightAnti),
_ => Err(DataFusionError::NotImplemented(format!(
"The join type {s} does not exist or is not implemented"
))),
}
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum JoinConstraint {
On,
Using,
}
#[derive(Clone, PartialEq, Eq, Hash)]
pub struct EmptyRelation {
pub produce_one_row: bool,
pub schema: DFSchemaRef,
}
#[derive(Clone, PartialEq, Eq, Hash)]
pub struct Values {
pub schema: DFSchemaRef,
pub values: Vec<Vec<Expr>>,
}
#[derive(Clone, PartialEq, Eq, Hash)]
#[non_exhaustive]
pub struct Projection {
pub expr: Vec<Expr>,
pub input: Arc<LogicalPlan>,
pub schema: DFSchemaRef,
}
impl Projection {
pub fn try_new(expr: Vec<Expr>, input: Arc<LogicalPlan>) -> Result<Self> {
let schema = Arc::new(DFSchema::new_with_metadata(
exprlist_to_fields(&expr, &input)?,
input.schema().metadata().clone(),
)?);
Self::try_new_with_schema(expr, input, schema)
}
pub fn try_new_with_schema(
expr: Vec<Expr>,
input: Arc<LogicalPlan>,
schema: DFSchemaRef,
) -> Result<Self> {
if expr.len() != schema.fields().len() {
return Err(DataFusionError::Plan(format!("Projection has mismatch between number of expressions ({}) and number of fields in schema ({})", expr.len(), schema.fields().len())));
}
Ok(Self {
expr,
input,
schema,
})
}
pub fn new_from_schema(input: Arc<LogicalPlan>, schema: DFSchemaRef) -> Self {
let expr: Vec<Expr> = schema
.fields()
.iter()
.map(|field| field.qualified_column())
.map(Expr::Column)
.collect();
Self {
expr,
input,
schema,
}
}
pub fn try_from_plan(plan: &LogicalPlan) -> Result<&Projection> {
match plan {
LogicalPlan::Projection(it) => Ok(it),
_ => plan_err!("Could not coerce into Projection!"),
}
}
}
#[derive(Clone, PartialEq, Eq, Hash)]
#[non_exhaustive]
pub struct SubqueryAlias {
pub input: Arc<LogicalPlan>,
pub alias: OwnedTableReference,
pub schema: DFSchemaRef,
}
impl SubqueryAlias {
pub fn try_new(
plan: LogicalPlan,
alias: impl Into<OwnedTableReference>,
) -> Result<Self> {
let alias = alias.into();
let schema: Schema = plan.schema().as_ref().clone().into();
let schema =
DFSchemaRef::new(DFSchema::try_from_qualified_schema(&alias, &schema)?);
Ok(SubqueryAlias {
input: Arc::new(plan),
alias,
schema,
})
}
}
#[derive(Clone, PartialEq, Eq, Hash)]
#[non_exhaustive]
pub struct Filter {
pub predicate: Expr,
pub input: Arc<LogicalPlan>,
}
impl Filter {
pub fn try_new(predicate: Expr, input: Arc<LogicalPlan>) -> Result<Self> {
if let Ok(predicate_type) = predicate.get_type(input.schema()) {
if predicate_type != DataType::Boolean {
return Err(DataFusionError::Plan(format!(
"Cannot create filter with non-boolean predicate '{predicate}' returning {predicate_type}"
)));
}
}
if let Expr::Alias(expr, alias) = predicate {
return Err(DataFusionError::Plan(format!(
"Attempted to create Filter predicate with \
expression `{expr}` aliased as '{alias}'. Filter predicates should not be \
aliased."
)));
}
Ok(Self { predicate, input })
}
pub fn try_from_plan(plan: &LogicalPlan) -> Result<&Filter> {
match plan {
LogicalPlan::Filter(it) => Ok(it),
_ => plan_err!("Could not coerce into Filter!"),
}
}
}
#[derive(Clone, PartialEq, Eq, Hash)]
pub struct Window {
pub input: Arc<LogicalPlan>,
pub window_expr: Vec<Expr>,
pub schema: DFSchemaRef,
}
#[derive(Clone)]
pub struct TableScan {
pub table_name: OwnedTableReference,
pub source: Arc<dyn TableSource>,
pub projection: Option<Vec<usize>>,
pub projected_schema: DFSchemaRef,
pub filters: Vec<Expr>,
pub fetch: Option<usize>,
}
impl PartialEq for TableScan {
fn eq(&self, other: &Self) -> bool {
self.table_name == other.table_name
&& self.projection == other.projection
&& self.projected_schema == other.projected_schema
&& self.filters == other.filters
&& self.fetch == other.fetch
}
}
impl Eq for TableScan {}
impl Hash for TableScan {
fn hash<H: Hasher>(&self, state: &mut H) {
self.table_name.hash(state);
self.projection.hash(state);
self.projected_schema.hash(state);
self.filters.hash(state);
self.fetch.hash(state);
}
}
#[derive(Clone, PartialEq, Eq, Hash)]
pub struct CrossJoin {
pub left: Arc<LogicalPlan>,
pub right: Arc<LogicalPlan>,
pub schema: DFSchemaRef,
}
#[derive(Clone, PartialEq, Eq, Hash)]
pub struct Repartition {
pub input: Arc<LogicalPlan>,
pub partitioning_scheme: Partitioning,
}
#[derive(Clone, PartialEq, Eq, Hash)]
pub struct Union {
pub inputs: Vec<Arc<LogicalPlan>>,
pub schema: DFSchemaRef,
}
#[derive(Clone, PartialEq, Eq, Hash)]
pub struct Prepare {
pub name: String,
pub data_types: Vec<DataType>,
pub input: Arc<LogicalPlan>,
}
#[derive(Clone, PartialEq, Eq, Hash)]
pub struct DescribeTable {
pub schema: Arc<Schema>,
pub dummy_schema: DFSchemaRef,
}
#[derive(Clone, PartialEq, Eq, Hash)]
pub struct Explain {
pub verbose: bool,
pub plan: Arc<LogicalPlan>,
pub stringified_plans: Vec<StringifiedPlan>,
pub schema: DFSchemaRef,
pub logical_optimization_succeeded: bool,
}
#[derive(Clone, PartialEq, Eq, Hash)]
pub struct Analyze {
pub verbose: bool,
pub input: Arc<LogicalPlan>,
pub schema: DFSchemaRef,
}
#[allow(clippy::derived_hash_with_manual_eq)]
#[derive(Clone, Eq, Hash)]
pub struct Extension {
pub node: Arc<dyn UserDefinedLogicalNode>,
}
impl PartialEq for Extension {
fn eq(&self, other: &Self) -> bool {
self.node.eq(&other.node)
}
}
#[derive(Clone, PartialEq, Eq, Hash)]
pub struct Limit {
pub skip: usize,
pub fetch: Option<usize>,
pub input: Arc<LogicalPlan>,
}
#[derive(Clone, PartialEq, Eq, Hash)]
pub struct Distinct {
pub input: Arc<LogicalPlan>,
}
#[derive(Clone, PartialEq, Eq, Hash)]
#[non_exhaustive]
pub struct Aggregate {
pub input: Arc<LogicalPlan>,
pub group_expr: Vec<Expr>,
pub aggr_expr: Vec<Expr>,
pub schema: DFSchemaRef,
}
impl Aggregate {
pub fn try_new(
input: Arc<LogicalPlan>,
group_expr: Vec<Expr>,
aggr_expr: Vec<Expr>,
) -> Result<Self> {
let group_expr = enumerate_grouping_sets(group_expr)?;
let grouping_expr: Vec<Expr> = grouping_set_to_exprlist(group_expr.as_slice())?;
let all_expr = grouping_expr.iter().chain(aggr_expr.iter());
let schema = DFSchema::new_with_metadata(
exprlist_to_fields(all_expr, &input)?,
input.schema().metadata().clone(),
)?;
Self::try_new_with_schema(input, group_expr, aggr_expr, Arc::new(schema))
}
pub fn try_new_with_schema(
input: Arc<LogicalPlan>,
group_expr: Vec<Expr>,
aggr_expr: Vec<Expr>,
schema: DFSchemaRef,
) -> Result<Self> {
if group_expr.is_empty() && aggr_expr.is_empty() {
return Err(DataFusionError::Plan(
"Aggregate requires at least one grouping or aggregate expression"
.to_string(),
));
}
let group_expr_count = grouping_set_expr_count(&group_expr)?;
if schema.fields().len() != group_expr_count + aggr_expr.len() {
return Err(DataFusionError::Plan(format!(
"Aggregate schema has wrong number of fields. Expected {} got {}",
group_expr_count + aggr_expr.len(),
schema.fields().len()
)));
}
Ok(Self {
input,
group_expr,
aggr_expr,
schema,
})
}
pub fn try_from_plan(plan: &LogicalPlan) -> Result<&Aggregate> {
match plan {
LogicalPlan::Aggregate(it) => Ok(it),
_ => plan_err!("Could not coerce into Aggregate!"),
}
}
}
#[derive(Clone, PartialEq, Eq, Hash)]
pub struct Sort {
pub expr: Vec<Expr>,
pub input: Arc<LogicalPlan>,
pub fetch: Option<usize>,
}
#[derive(Clone, PartialEq, Eq, Hash)]
pub struct Join {
pub left: Arc<LogicalPlan>,
pub right: Arc<LogicalPlan>,
pub on: Vec<(Expr, Expr)>,
pub filter: Option<Expr>,
pub join_type: JoinType,
pub join_constraint: JoinConstraint,
pub schema: DFSchemaRef,
pub null_equals_null: bool,
}
impl Join {
pub fn try_new_with_project_input(
original: &LogicalPlan,
left: Arc<LogicalPlan>,
right: Arc<LogicalPlan>,
column_on: (Vec<Column>, Vec<Column>),
) -> Result<Self> {
let original_join = match original {
LogicalPlan::Join(join) => join,
_ => return plan_err!("Could not create join with project input"),
};
let on: Vec<(Expr, Expr)> = column_on
.0
.into_iter()
.zip(column_on.1.into_iter())
.map(|(l, r)| (Expr::Column(l), Expr::Column(r)))
.collect();
let join_schema =
build_join_schema(left.schema(), right.schema(), &original_join.join_type)?;
Ok(Join {
left,
right,
on,
filter: original_join.filter.clone(),
join_type: original_join.join_type,
join_constraint: original_join.join_constraint,
schema: Arc::new(join_schema),
null_equals_null: original_join.null_equals_null,
})
}
}
#[derive(Clone, PartialEq, Eq, Hash)]
pub struct Subquery {
pub subquery: Arc<LogicalPlan>,
pub outer_ref_columns: Vec<Expr>,
}
impl Subquery {
pub fn try_from_expr(plan: &Expr) -> Result<&Subquery> {
match plan {
Expr::ScalarSubquery(it) => Ok(it),
Expr::Cast(cast) => Subquery::try_from_expr(cast.expr.as_ref()),
_ => plan_err!("Could not coerce into ScalarSubquery!"),
}
}
pub fn with_plan(&self, plan: Arc<LogicalPlan>) -> Subquery {
Subquery {
subquery: plan,
outer_ref_columns: self.outer_ref_columns.clone(),
}
}
}
impl Debug for Subquery {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
write!(f, "<subquery>")
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum Partitioning {
RoundRobinBatch(usize),
Hash(Vec<Expr>, usize),
DistributeBy(Vec<Expr>),
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum PlanType {
InitialLogicalPlan,
AnalyzedLogicalPlan {
analyzer_name: String,
},
FinalAnalyzedLogicalPlan,
OptimizedLogicalPlan {
optimizer_name: String,
},
FinalLogicalPlan,
InitialPhysicalPlan,
OptimizedPhysicalPlan {
optimizer_name: String,
},
FinalPhysicalPlan,
}
impl Display for PlanType {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
match self {
PlanType::InitialLogicalPlan => write!(f, "initial_logical_plan"),
PlanType::AnalyzedLogicalPlan { analyzer_name } => {
write!(f, "logical_plan after {analyzer_name}")
}
PlanType::FinalAnalyzedLogicalPlan => write!(f, "analyzed_logical_plan"),
PlanType::OptimizedLogicalPlan { optimizer_name } => {
write!(f, "logical_plan after {optimizer_name}")
}
PlanType::FinalLogicalPlan => write!(f, "logical_plan"),
PlanType::InitialPhysicalPlan => write!(f, "initial_physical_plan"),
PlanType::OptimizedPhysicalPlan { optimizer_name } => {
write!(f, "physical_plan after {optimizer_name}")
}
PlanType::FinalPhysicalPlan => write!(f, "physical_plan"),
}
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct StringifiedPlan {
pub plan_type: PlanType,
pub plan: Arc<String>,
}
impl StringifiedPlan {
pub fn new(plan_type: PlanType, plan: impl Into<String>) -> Self {
StringifiedPlan {
plan_type,
plan: Arc::new(plan.into()),
}
}
pub fn should_display(&self, verbose_mode: bool) -> bool {
match self.plan_type {
PlanType::FinalLogicalPlan | PlanType::FinalPhysicalPlan => true,
_ => verbose_mode,
}
}
}
pub trait ToStringifiedPlan {
fn to_stringified(&self, plan_type: PlanType) -> StringifiedPlan;
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Unnest {
pub input: Arc<LogicalPlan>,
pub column: Column,
pub schema: DFSchemaRef,
}
#[cfg(test)]
mod tests {
use super::*;
use crate::logical_plan::table_scan;
use crate::{col, exists, in_subquery, lit};
use arrow::datatypes::{DataType, Field, Schema};
use datafusion_common::tree_node::TreeNodeVisitor;
use datafusion_common::{DFSchema, TableReference};
use std::collections::HashMap;
fn employee_schema() -> Schema {
Schema::new(vec![
Field::new("id", DataType::Int32, false),
Field::new("first_name", DataType::Utf8, false),
Field::new("last_name", DataType::Utf8, false),
Field::new("state", DataType::Utf8, false),
Field::new("salary", DataType::Int32, false),
])
}
fn display_plan() -> Result<LogicalPlan> {
let plan1 = table_scan(Some("employee_csv"), &employee_schema(), Some(vec![3]))?
.build()?;
table_scan(Some("employee_csv"), &employee_schema(), Some(vec![0, 3]))?
.filter(in_subquery(col("state"), Arc::new(plan1)))?
.project(vec![col("id")])?
.build()
}
#[test]
fn test_display_indent() -> Result<()> {
let plan = display_plan()?;
let expected = "Projection: employee_csv.id\
\n Filter: employee_csv.state IN (<subquery>)\
\n Subquery:\
\n TableScan: employee_csv projection=[state]\
\n TableScan: employee_csv projection=[id, state]";
assert_eq!(expected, format!("{}", plan.display_indent()));
Ok(())
}
#[test]
fn test_display_indent_schema() -> Result<()> {
let plan = display_plan()?;
let expected = "Projection: employee_csv.id [id:Int32]\
\n Filter: employee_csv.state IN (<subquery>) [id:Int32, state:Utf8]\
\n Subquery: [state:Utf8]\
\n TableScan: employee_csv projection=[state] [state:Utf8]\
\n TableScan: employee_csv projection=[id, state] [id:Int32, state:Utf8]";
assert_eq!(expected, format!("{}", plan.display_indent_schema()));
Ok(())
}
#[test]
fn test_display_subquery_alias() -> Result<()> {
let plan1 = table_scan(Some("employee_csv"), &employee_schema(), Some(vec![3]))?
.build()?;
let plan1 = Arc::new(plan1);
let plan =
table_scan(Some("employee_csv"), &employee_schema(), Some(vec![0, 3]))?
.project(vec![col("id"), exists(plan1).alias("exists")])?
.build();
let expected = "Projection: employee_csv.id, EXISTS (<subquery>) AS exists\
\n Subquery:\
\n TableScan: employee_csv projection=[state]\
\n TableScan: employee_csv projection=[id, state]";
assert_eq!(expected, format!("{}", plan?.display_indent()));
Ok(())
}
#[test]
fn test_display_graphviz() -> Result<()> {
let plan = display_plan()?;
let graphviz = format!("{}", plan.display_graphviz());
assert!(
graphviz.contains(
r#"// Begin DataFusion GraphViz Plan (see https://graphviz.org)"#
),
"\n{}",
plan.display_graphviz()
);
assert!(
graphviz.contains(
r#"[shape=box label="TableScan: employee_csv projection=[id, state]"]"#
),
"\n{}",
plan.display_graphviz()
);
assert!(graphviz.contains(r#"[shape=box label="TableScan: employee_csv projection=[id, state]\nSchema: [id:Int32, state:Utf8]"]"#),
"\n{}", plan.display_graphviz());
assert!(
graphviz.contains(r#"// End DataFusion GraphViz Plan"#),
"\n{}",
plan.display_graphviz()
);
Ok(())
}
#[derive(Debug, Default)]
struct OkVisitor {
strings: Vec<String>,
}
impl TreeNodeVisitor for OkVisitor {
type N = LogicalPlan;
fn pre_visit(&mut self, plan: &LogicalPlan) -> Result<VisitRecursion> {
let s = match plan {
LogicalPlan::Projection { .. } => "pre_visit Projection",
LogicalPlan::Filter { .. } => "pre_visit Filter",
LogicalPlan::TableScan { .. } => "pre_visit TableScan",
_ => {
return Err(DataFusionError::NotImplemented(
"unknown plan type".to_string(),
))
}
};
self.strings.push(s.into());
Ok(VisitRecursion::Continue)
}
fn post_visit(&mut self, plan: &LogicalPlan) -> Result<VisitRecursion> {
let s = match plan {
LogicalPlan::Projection { .. } => "post_visit Projection",
LogicalPlan::Filter { .. } => "post_visit Filter",
LogicalPlan::TableScan { .. } => "post_visit TableScan",
_ => {
return Err(DataFusionError::NotImplemented(
"unknown plan type".to_string(),
))
}
};
self.strings.push(s.into());
Ok(VisitRecursion::Continue)
}
}
#[test]
fn visit_order() {
let mut visitor = OkVisitor::default();
let plan = test_plan();
let res = plan.visit(&mut visitor);
assert!(res.is_ok());
assert_eq!(
visitor.strings,
vec![
"pre_visit Projection",
"pre_visit Filter",
"pre_visit TableScan",
"post_visit TableScan",
"post_visit Filter",
"post_visit Projection",
]
);
}
#[derive(Debug, Default)]
struct OptionalCounter {
val: Option<usize>,
}
impl OptionalCounter {
fn new(val: usize) -> Self {
Self { val: Some(val) }
}
fn dec(&mut self) -> bool {
if Some(0) == self.val {
true
} else {
self.val = self.val.take().map(|i| i - 1);
false
}
}
}
#[derive(Debug, Default)]
struct StoppingVisitor {
inner: OkVisitor,
return_false_from_pre_in: OptionalCounter,
return_false_from_post_in: OptionalCounter,
}
impl TreeNodeVisitor for StoppingVisitor {
type N = LogicalPlan;
fn pre_visit(&mut self, plan: &LogicalPlan) -> Result<VisitRecursion> {
if self.return_false_from_pre_in.dec() {
return Ok(VisitRecursion::Stop);
}
self.inner.pre_visit(plan)?;
Ok(VisitRecursion::Continue)
}
fn post_visit(&mut self, plan: &LogicalPlan) -> Result<VisitRecursion> {
if self.return_false_from_post_in.dec() {
return Ok(VisitRecursion::Stop);
}
self.inner.post_visit(plan)
}
}
#[test]
fn early_stopping_pre_visit() {
let mut visitor = StoppingVisitor {
return_false_from_pre_in: OptionalCounter::new(2),
..Default::default()
};
let plan = test_plan();
let res = plan.visit(&mut visitor);
assert!(res.is_ok());
assert_eq!(
visitor.inner.strings,
vec!["pre_visit Projection", "pre_visit Filter"]
);
}
#[test]
fn early_stopping_post_visit() {
let mut visitor = StoppingVisitor {
return_false_from_post_in: OptionalCounter::new(1),
..Default::default()
};
let plan = test_plan();
let res = plan.visit(&mut visitor);
assert!(res.is_ok());
assert_eq!(
visitor.inner.strings,
vec![
"pre_visit Projection",
"pre_visit Filter",
"pre_visit TableScan",
"post_visit TableScan",
]
);
}
#[derive(Debug, Default)]
struct ErrorVisitor {
inner: OkVisitor,
return_error_from_pre_in: OptionalCounter,
return_error_from_post_in: OptionalCounter,
}
impl TreeNodeVisitor for ErrorVisitor {
type N = LogicalPlan;
fn pre_visit(&mut self, plan: &LogicalPlan) -> Result<VisitRecursion> {
if self.return_error_from_pre_in.dec() {
return Err(DataFusionError::NotImplemented(
"Error in pre_visit".to_string(),
));
}
self.inner.pre_visit(plan)
}
fn post_visit(&mut self, plan: &LogicalPlan) -> Result<VisitRecursion> {
if self.return_error_from_post_in.dec() {
return Err(DataFusionError::NotImplemented(
"Error in post_visit".to_string(),
));
}
self.inner.post_visit(plan)
}
}
#[test]
fn error_pre_visit() {
let mut visitor = ErrorVisitor {
return_error_from_pre_in: OptionalCounter::new(2),
..Default::default()
};
let plan = test_plan();
let res = plan.visit(&mut visitor);
if let Err(DataFusionError::NotImplemented(e)) = res {
assert_eq!("Error in pre_visit", e);
} else {
panic!("Expected an error");
}
assert_eq!(
visitor.inner.strings,
vec!["pre_visit Projection", "pre_visit Filter"]
);
}
#[test]
fn error_post_visit() {
let mut visitor = ErrorVisitor {
return_error_from_post_in: OptionalCounter::new(1),
..Default::default()
};
let plan = test_plan();
let res = plan.visit(&mut visitor);
if let Err(DataFusionError::NotImplemented(e)) = res {
assert_eq!("Error in post_visit", e);
} else {
panic!("Expected an error");
}
assert_eq!(
visitor.inner.strings,
vec![
"pre_visit Projection",
"pre_visit Filter",
"pre_visit TableScan",
"post_visit TableScan",
]
);
}
#[test]
fn projection_expr_schema_mismatch() -> Result<()> {
let empty_schema = Arc::new(DFSchema::new_with_metadata(vec![], HashMap::new())?);
let p = Projection::try_new_with_schema(
vec![col("a")],
Arc::new(LogicalPlan::EmptyRelation(EmptyRelation {
produce_one_row: false,
schema: empty_schema.clone(),
})),
empty_schema,
);
assert_eq!("Error during planning: Projection has mismatch between number of expressions (1) and number of fields in schema (0)", format!("{}", p.err().unwrap()));
Ok(())
}
fn test_plan() -> LogicalPlan {
let schema = Schema::new(vec![
Field::new("id", DataType::Int32, false),
Field::new("state", DataType::Utf8, false),
]);
table_scan(TableReference::none(), &schema, Some(vec![0, 1]))
.unwrap()
.filter(col("state").eq(lit("CO")))
.unwrap()
.project(vec![col("id")])
.unwrap()
.build()
.unwrap()
}
#[derive(Debug)]
struct NoChildExtension {
empty_schema: DFSchemaRef,
}
impl NoChildExtension {
fn empty() -> Self {
Self {
empty_schema: Arc::new(DFSchema::empty()),
}
}
}
impl UserDefinedLogicalNode for NoChildExtension {
fn as_any(&self) -> &dyn std::any::Any {
unimplemented!()
}
fn name(&self) -> &str {
unimplemented!()
}
fn inputs(&self) -> Vec<&LogicalPlan> {
panic!("Should not be called")
}
fn schema(&self) -> &DFSchemaRef {
&self.empty_schema
}
fn expressions(&self) -> Vec<Expr> {
unimplemented!()
}
fn fmt_for_explain(&self, _: &mut fmt::Formatter) -> fmt::Result {
unimplemented!()
}
fn from_template(
&self,
_: &[Expr],
_: &[LogicalPlan],
) -> Arc<dyn UserDefinedLogicalNode> {
unimplemented!()
}
fn dyn_hash(&self, _: &mut dyn Hasher) {
unimplemented!()
}
fn dyn_eq(&self, _: &dyn UserDefinedLogicalNode) -> bool {
unimplemented!()
}
}
#[test]
#[allow(deprecated)]
fn test_extension_all_schemas() {
let plan = LogicalPlan::Extension(Extension {
node: Arc::new(NoChildExtension::empty()),
});
let schemas = plan.all_schemas();
assert_eq!(1, schemas.len());
assert_eq!(0, schemas[0].fields().len());
}
#[test]
fn test_replace_invalid_placeholder() {
let schema = Schema::new(vec![Field::new("id", DataType::Int32, false)]);
let plan = table_scan(TableReference::none(), &schema, None)
.unwrap()
.filter(col("id").eq(Expr::Placeholder(Placeholder::new(
"".into(),
Some(DataType::Int32),
))))
.unwrap()
.build()
.unwrap();
plan.replace_params_with_values(&[42i32.into()])
.expect_err("unexpectedly succeeded to replace an invalid placeholder");
let schema = Schema::new(vec![Field::new("id", DataType::Int32, false)]);
let plan = table_scan(TableReference::none(), &schema, None)
.unwrap()
.filter(col("id").eq(Expr::Placeholder(Placeholder::new(
"$0".into(),
Some(DataType::Int32),
))))
.unwrap()
.build()
.unwrap();
plan.replace_params_with_values(&[42i32.into()])
.expect_err("unexpectedly succeeded to replace an invalid placeholder");
}
}