Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 12 additions & 0 deletions datafusion/core/src/physical_planner.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2524,6 +2524,18 @@ impl DefaultPhysicalPlanner {
// to verify that the plan is executable.
InvariantChecker(InvariantLevel::Executable).check(&new_plan)?;

#[cfg(debug_assertions)]
{
use datafusion_physical_plan::execution_plan::check_physical_expressions;

new_plan = new_plan
.transform_up(|p| {
let plan = check_physical_expressions(p)?;
Ok(Transformed::yes(plan))
})
.map(|t| t.data)?;
}

debug!(
"Optimized physical plan:\n{}\n",
displayable(new_plan.as_ref()).indent(false)
Expand Down
10 changes: 10 additions & 0 deletions datafusion/core/tests/physical_optimizer/pushdown_utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -155,6 +155,16 @@ impl FileSource for TestSource {
})
}

fn with_filter_and_projection(
&self,
filter: Option<Arc<dyn PhysicalExpr>>,
_projection: ProjectionExprs,
) -> Result<Option<Arc<dyn FileSource>>> {
let mut conf = self.clone();
conf.predicate = filter;
Ok(Some(Arc::new(conf)))
}

fn metrics(&self) -> &ExecutionPlanMetricsSet {
&self.metrics
}
Expand Down
17 changes: 16 additions & 1 deletion datafusion/datasource-arrow/src/source.rs
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ use datafusion_datasource::{TableSchema, as_file_source};
use arrow::buffer::Buffer;
use arrow::ipc::reader::{FileDecoder, FileReader, StreamReader};
use datafusion_common::error::Result;
use datafusion_common::exec_datafusion_err;
use datafusion_common::{assert_or_internal_err, exec_datafusion_err};
use datafusion_datasource::PartitionedFile;
use datafusion_datasource::file::FileSource;
use datafusion_datasource::file_scan_config::FileScanConfig;
Expand All @@ -50,6 +50,7 @@ use datafusion_physical_plan::projection::ProjectionExprs;

use datafusion_datasource::file_stream::FileOpenFuture;
use datafusion_datasource::file_stream::FileOpener;
use datafusion_physical_plan::PhysicalExpr;
use futures::StreamExt;
use itertools::Itertools;
use object_store::{GetOptions, GetRange, GetResultPayload, ObjectStore};
Expand Down Expand Up @@ -396,6 +397,20 @@ impl FileSource for ArrowSource {
fn projection(&self) -> Option<&ProjectionExprs> {
Some(&self.projection.source)
}

fn with_filter_and_projection(
&self,
filter: Option<Arc<dyn PhysicalExpr>>,
projection: ProjectionExprs,
) -> Result<Option<Arc<dyn FileSource>>> {
assert_or_internal_err!(filter.is_none(), "filter should not be defined");

let mut conf = self.clone();
conf.projection =
SplitProjection::new(self.table_schema.file_schema(), &projection);

Ok(Some(Arc::new(conf)))
}
}

/// `FileOpener` wrapper for both Arrow IPC file and stream formats
Expand Down
16 changes: 16 additions & 0 deletions datafusion/datasource-avro/src/source.rs
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ use std::sync::Arc;

use crate::avro_to_arrow::Reader as AvroReader;

use datafusion_common::assert_or_internal_err;
use datafusion_common::error::Result;
use datafusion_datasource::TableSchema;
use datafusion_datasource::file::FileSource;
Expand All @@ -32,6 +33,7 @@ use datafusion_physical_expr_common::sort_expr::LexOrdering;
use datafusion_physical_plan::metrics::ExecutionPlanMetricsSet;
use datafusion_physical_plan::projection::ProjectionExprs;

use datafusion_physical_plan::PhysicalExpr;
use object_store::ObjectStore;

/// AvroSource holds the extra configuration that is necessary for opening avro files
Expand Down Expand Up @@ -122,6 +124,20 @@ impl FileSource for AvroSource {
Some(&self.projection.source)
}

fn with_filter_and_projection(
&self,
filter: Option<Arc<dyn PhysicalExpr>>,
projection: ProjectionExprs,
) -> Result<Option<Arc<dyn FileSource>>> {
assert_or_internal_err!(filter.is_none(), "filter should not be defined");

let mut conf = self.clone();
conf.projection =
SplitProjection::new(self.table_schema.file_schema(), &projection);

Ok(Some(Arc::new(conf)))
}

fn metrics(&self) -> &ExecutionPlanMetricsSet {
&self.metrics
}
Expand Down
18 changes: 16 additions & 2 deletions datafusion/datasource-csv/src/source.rs
Original file line number Diff line number Diff line change
Expand Up @@ -35,14 +35,14 @@ use datafusion_datasource::{

use arrow::csv;
use datafusion_common::config::CsvOptions;
use datafusion_common::{DataFusionError, Result};
use datafusion_common::{DataFusionError, Result, assert_or_internal_err};
use datafusion_common_runtime::JoinSet;
use datafusion_datasource::file::FileSource;
use datafusion_datasource::file_scan_config::FileScanConfig;
use datafusion_execution::TaskContext;
use datafusion_physical_plan::metrics::{BaselineMetrics, ExecutionPlanMetricsSet};
use datafusion_physical_plan::{
DisplayFormatType, ExecutionPlan, ExecutionPlanProperties,
DisplayFormatType, ExecutionPlan, ExecutionPlanProperties, PhysicalExpr,
};

use crate::file_format::CsvDecoder;
Expand Down Expand Up @@ -292,6 +292,20 @@ impl FileSource for CsvSource {
Some(&self.projection.source)
}

fn with_filter_and_projection(
&self,
filter: Option<Arc<dyn PhysicalExpr>>,
projection: ProjectionExprs,
) -> Result<Option<Arc<dyn FileSource>>> {
assert_or_internal_err!(filter.is_none(), "filter should not be defined");

let mut conf = self.clone();
conf.projection =
SplitProjection::new(self.table_schema.file_schema(), &projection);

Ok(Some(Arc::new(conf)))
}

fn metrics(&self) -> &ExecutionPlanMetricsSet {
&self.metrics
}
Expand Down
17 changes: 16 additions & 1 deletion datafusion/datasource-json/src/source.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ use std::task::Poll;

use crate::file_format::JsonDecoder;

use datafusion_common::assert_or_internal_err;
use datafusion_common::error::{DataFusionError, Result};
use datafusion_common_runtime::JoinSet;
use datafusion_datasource::decoder::{DecoderDeserializer, deserialize_stream};
Expand All @@ -34,7 +35,7 @@ use datafusion_datasource::{
ListingTableUrl, PartitionedFile, RangeCalculation, as_file_source, calculate_range,
};
use datafusion_physical_plan::projection::ProjectionExprs;
use datafusion_physical_plan::{ExecutionPlan, ExecutionPlanProperties};
use datafusion_physical_plan::{ExecutionPlan, ExecutionPlanProperties, PhysicalExpr};

use arrow::json::ReaderBuilder;
use arrow::{datatypes::SchemaRef, json};
Expand Down Expand Up @@ -162,6 +163,20 @@ impl FileSource for JsonSource {
Some(&self.projection.source)
}

fn with_filter_and_projection(
&self,
filter: Option<Arc<dyn PhysicalExpr>>,
projection: ProjectionExprs,
) -> Result<Option<Arc<dyn FileSource>>> {
assert_or_internal_err!(filter.is_none(), "filter should not be defined");

let mut conf = self.clone();
conf.projection =
SplitProjection::new(self.table_schema.file_schema(), &projection);

Ok(Some(Arc::new(conf)))
}

fn metrics(&self) -> &ExecutionPlanMetricsSet {
&self.metrics
}
Expand Down
11 changes: 11 additions & 0 deletions datafusion/datasource-parquet/src/source.rs
Original file line number Diff line number Diff line change
Expand Up @@ -603,6 +603,17 @@ impl FileSource for ParquetSource {
Some(&self.projection)
}

fn with_filter_and_projection(
&self,
filter: Option<Arc<dyn PhysicalExpr>>,
projection: ProjectionExprs,
) -> datafusion_common::Result<Option<Arc<dyn FileSource>>> {
let mut conf = self.clone();
conf.predicate = filter;
conf.projection = projection;
Ok(Some(Arc::new(conf)))
}

fn metrics(&self) -> &ExecutionPlanMetricsSet {
&self.metrics
}
Expand Down
8 changes: 8 additions & 0 deletions datafusion/datasource/src/file.rs
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,14 @@ pub trait FileSource: Send + Sync {
fn projection(&self) -> Option<&ProjectionExprs> {
None
}
/// Returns new file source with given filter and projection.
fn with_filter_and_projection(
&self,
_filter: Option<Arc<dyn PhysicalExpr>>,
_projection: ProjectionExprs,
) -> Result<Option<Arc<dyn FileSource>>> {
Ok(None)
}
/// Return execution plan metrics
fn metrics(&self) -> &ExecutionPlanMetricsSet;
/// String representation of file source such as "csv", "json", "parquet"
Expand Down
76 changes: 73 additions & 3 deletions datafusion/datasource/src/file_scan_config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,8 @@ use arrow::datatypes::FieldRef;
use arrow::datatypes::{DataType, Schema, SchemaRef};
use datafusion_common::config::ConfigOptions;
use datafusion_common::{
Constraints, Result, ScalarValue, Statistics, internal_datafusion_err, internal_err,
Constraints, Result, ScalarValue, Statistics, assert_eq_or_internal_err,
internal_datafusion_err, internal_err,
};
use datafusion_execution::{
SendableRecordBatchStream, TaskContext, object_store::ObjectStoreUrl,
Expand All @@ -37,15 +38,15 @@ use datafusion_expr::Operator;

use datafusion_physical_expr::equivalence::project_orderings;
use datafusion_physical_expr::expressions::{BinaryExpr, Column};
use datafusion_physical_expr::projection::ProjectionExprs;
use datafusion_physical_expr::projection::{ProjectionExpr, ProjectionExprs};
use datafusion_physical_expr::utils::reassign_expr_columns;
use datafusion_physical_expr::{EquivalenceProperties, Partitioning, split_conjunction};
use datafusion_physical_expr_adapter::PhysicalExprAdapterFactory;
use datafusion_physical_expr_common::physical_expr::PhysicalExpr;
use datafusion_physical_expr_common::sort_expr::{LexOrdering, PhysicalSortExpr};
use datafusion_physical_plan::SortOrderPushdownResult;
use datafusion_physical_plan::coop::cooperative;
use datafusion_physical_plan::execution_plan::SchedulingType;
use datafusion_physical_plan::execution_plan::{ReplacePhysicalExpr, SchedulingType};
use datafusion_physical_plan::{
DisplayAs, DisplayFormatType,
display::{ProjectSchemaDisplay, display_orderings},
Expand Down Expand Up @@ -884,6 +885,75 @@ impl DataSource for FileScanConfig {
};
Some(Arc::new(new_config))
}

fn physical_expressions<'a>(
&'a self,
) -> Option<Box<dyn Iterator<Item = Arc<dyn PhysicalExpr>> + 'a>> {
let filter = self.file_source.filter().into_iter();
let projection = self
.file_source
.projection()
.into_iter()
.flat_map(|p| p.expr_iter());

Some(Box::new(filter.chain(projection)))
}

fn with_physical_expressions(
&self,
params: ReplacePhysicalExpr,
) -> Result<Option<Arc<dyn DataSource>>> {
let filter_count = self.file_source.filter().iter().len();
let projection_count = self
.file_source
.projection()
.map(|p| p.expr_iter().count())
.unwrap_or(0);

let expected_count = filter_count + projection_count;
let exprs_count = params.exprs.len();

assert_eq_or_internal_err!(
expected_count,
exprs_count,
"Inconsistent number of physical expressions for FileScanConfig",
);

let mut filter = None;
let mut projection = vec![];
let mut expr_iter = params.exprs.into_iter();

if filter_count > 0 {
filter = expr_iter.next();
}

if projection_count > 0 {
projection = self
.file_source
.projection()
.expect("should have expressions")
.iter()
.zip(expr_iter)
.map(|(p, expr)| ProjectionExpr::new(expr, p.alias.clone()))
.collect();
}

let file_source = self
.file_source
.with_filter_and_projection(filter, projection.into())?;

match file_source {
Some(file_source) => {
let conf_builder: FileScanConfigBuilder = self.clone().into();
Ok(Some(Arc::new(
conf_builder.with_source(file_source).build(),
)))
}
None => {
internal_err!("file source is not rebuilt")
}
}
}
}

impl FileScanConfig {
Expand Down
Loading