Skip to content

Commit 02bbfbc

Browse files
committed
Tightening imports.
1 parent ffbce66 commit 02bbfbc

File tree

2 files changed

+4
-5
lines changed

2 files changed

+4
-5
lines changed

sql/core/src/main/scala/org/apache/spark/sql/package.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -37,5 +37,5 @@ package object sql {
3737
* Converts a logical plan into zero or more SparkPlans.
3838
*/
3939
@DeveloperApi
40-
type Strategy = org.apache.spark.sql.catalyst.planning.GenericStrategy[SparkPlan]
40+
protected[sql] type Strategy = org.apache.spark.sql.catalyst.planning.GenericStrategy[SparkPlan]
4141
}

sql/core/src/main/scala/org/apache/spark/sql/sources/DataSourceStrategy.scala

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -18,19 +18,18 @@
1818
package org.apache.spark.sql.sources
1919

2020
import org.apache.spark.rdd.RDD
21-
import org.apache.spark.sql.Row
22-
import org.apache.spark.sql._
21+
import org.apache.spark.sql.{Row, Strategy}
2322
import org.apache.spark.sql.catalyst.expressions
2423
import org.apache.spark.sql.catalyst.expressions._
2524
import org.apache.spark.sql.catalyst.planning.PhysicalOperation
2625
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
27-
import org.apache.spark.sql.execution.SparkPlan
26+
import org.apache.spark.sql.execution
2827

2928
/**
3029
* A Strategy for planning scans over data sources defined using the sources API.
3130
*/
3231
private[sql] object DataSourceStrategy extends Strategy {
33-
def apply(plan: LogicalPlan): Seq[SparkPlan] = plan match {
32+
def apply(plan: LogicalPlan): Seq[execution.SparkPlan] = plan match {
3433
case PhysicalOperation(projectList, filters, l @ LogicalRelation(t: CatalystScan)) =>
3534
pruneFilterProjectRaw(
3635
l,

0 commit comments

Comments
 (0)