From b277a3840f7af0eecf81d26f5e4ee74b4cab494f Mon Sep 17 00:00:00 2001 From: Ziy1-Tan Date: Fri, 22 Sep 2023 23:45:46 +0800 Subject: [PATCH] Refactor: no pushdown when disabled Signed-off-by: Ziy1-Tan --- .../graphar/datasources/GarScanBuilder.scala | 43 +++++++++---------- 1 file changed, 20 insertions(+), 23 deletions(-) diff --git a/spark/src/main/scala/com/alibaba/graphar/datasources/GarScanBuilder.scala b/spark/src/main/scala/com/alibaba/graphar/datasources/GarScanBuilder.scala index ee20601a0..1a77997d6 100644 --- a/spark/src/main/scala/com/alibaba/graphar/datasources/GarScanBuilder.scala +++ b/spark/src/main/scala/com/alibaba/graphar/datasources/GarScanBuilder.scala @@ -51,39 +51,37 @@ case class GarScanBuilder( filters } - override def pushedFilters(): Array[Filter] = { - formatName match { - case "csv" => Array.empty - case "orc" => pushedOrcFilters - case "parquet" => pushedParquetFilters - case _ => throw new IllegalArgumentException - } + override def pushedFilters(): Array[Filter] = formatName match { + case "csv" => Array.empty[Filter] + case "orc" => pushedOrcFilters + case "parquet" => pushedParquetFilters + case _ => throw new IllegalArgumentException } - private lazy val pushedParquetFilters = { + private lazy val pushedParquetFilters: Array[Filter] = { if (!sparkSession.sessionState.conf.parquetFilterPushDown) { - Array.empty + Array.empty[Filter] + } else { + val builder = + ParquetScanBuilder(sparkSession, fileIndex, schema, dataSchema, options) + builder.pushFilters(this.filters) + builder.pushedFilters() } - - val builder = - ParquetScanBuilder(sparkSession, fileIndex, schema, dataSchema, options) - builder.pushFilters(this.filters) - builder.pushedFilters() } - private lazy val pushedOrcFilters = { + private lazy val pushedOrcFilters: Array[Filter] = { if (!sparkSession.sessionState.conf.orcFilterPushDown) { - Array.empty + Array.empty[Filter] + } else { + val builder = + OrcScanBuilder(sparkSession, fileIndex, schema, dataSchema, options) + builder.pushFilters(this.filters) + builder.pushedFilters() } - - val builder = - OrcScanBuilder(sparkSession, fileIndex, schema, dataSchema, options) - builder.pushFilters(this.filters) - builder.pushedFilters() } // Check if the file format supports nested schema pruning. - override protected val supportsNestedSchemaPruning: Boolean = { + override protected val supportsNestedSchemaPruning: Boolean = formatName match { case "csv" => false case "orc" => sparkSession.sessionState.conf.nestedSchemaPruningEnabled @@ -91,7 +89,6 @@ case class GarScanBuilder( sparkSession.sessionState.conf.nestedSchemaPruningEnabled case _ => throw new IllegalArgumentException } - } /** Build the file scan for GarDataSource. */ override def build(): Scan = {