diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/V2ScanRelationPushDown.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/V2ScanRelationPushDown.scala index 24ffe4b887d9..87b11da5d5c1 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/V2ScanRelationPushDown.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/V2ScanRelationPushDown.scala @@ -350,7 +350,7 @@ object V2ScanRelationPushDown extends Rule[LogicalPlan] with PredicateHelper { val normalizedProjects = DataSourceStrategy .normalizeExprs(project, sHolder.output) .asInstanceOf[Seq[NamedExpression]] - val allFilters = filtersStayUp ++ filtersPushDown.reduceOption(And) + val allFilters = filtersPushDown.reduceOption(And).toSeq ++ filtersStayUp val normalizedFilters = DataSourceStrategy.normalizeExprs(allFilters, sHolder.output) val (scan, output) = PushDownUtils.pruneColumns( sHolder.builder, sHolder.relation, normalizedProjects, normalizedFilters) @@ -371,7 +371,8 @@ object V2ScanRelationPushDown extends Rule[LogicalPlan] with PredicateHelper { } val finalFilters = normalizedFilters.map(projectionFunc) - val withFilter = finalFilters.foldRight[LogicalPlan](scanRelation)((cond, plan) => { + // bottom-most filters are put in the left of the list. + val withFilter = finalFilters.foldLeft[LogicalPlan](scanRelation)((plan, cond) => { Filter(cond, plan) })