Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -237,6 +237,8 @@ object CheckConnectJvmClientCompatibility {
// SparkSession#implicits
ProblemFilters.exclude[DirectMissingMethodProblem](
"org.apache.spark.sql.SparkSession#implicits._sqlContext"),
ProblemFilters.exclude[DirectMissingMethodProblem](
"org.apache.spark.sql.SparkSession#implicits.session"),

// SparkSession#Builder
ProblemFilters.exclude[DirectMissingMethodProblem](
Expand Down Expand Up @@ -287,6 +289,7 @@ object CheckConnectJvmClientCompatibility {
// SQLImplicits
ProblemFilters.exclude[Problem]("org.apache.spark.sql.SQLImplicits.rddToDatasetHolder"),
ProblemFilters.exclude[Problem]("org.apache.spark.sql.SQLImplicits._sqlContext"),
ProblemFilters.exclude[Problem]("org.apache.spark.sql.SQLImplicits.session"),

// Artifact Manager
ProblemFilters.exclude[MissingClassProblem](
Expand Down Expand Up @@ -440,6 +443,9 @@ object CheckConnectJvmClientCompatibility {
ProblemFilters.exclude[DirectMissingMethodProblem](
"org.apache.spark.sql.SparkSession#Builder.interceptor"),

// SQLImplicits
ProblemFilters.exclude[Problem]("org.apache.spark.sql.SQLImplicits.session"),

// Steaming API
ProblemFilters.exclude[MissingTypesProblem](
"org.apache.spark.sql.streaming.DataStreamWriter" // Client version extends Logging
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ import org.apache.spark.SparkContext
import org.apache.spark.ml.feature._
import org.apache.spark.ml.stat.Summarizer
import org.apache.spark.ml.util.TempDirectory
import org.apache.spark.sql.{SparkSession, SQLContext, SQLImplicits}
import org.apache.spark.sql.{SparkSession, SQLImplicits}
import org.apache.spark.util.ArrayImplicits._
import org.apache.spark.util.Utils

Expand Down Expand Up @@ -65,11 +65,11 @@ trait MLlibTestSparkContext extends TempDirectory { self: Suite =>
* A helper object for importing SQL implicits.
*
* Note that the alternative of importing `spark.implicits._` is not possible here.
* This is because we create the `SQLContext` immediately before the first test is run,
* This is because we create the `SparkSession` immediately before the first test is run,
* but the implicits import is needed in the constructor.
*/
protected object testImplicits extends SQLImplicits {
protected override def _sqlContext: SQLContext = self.spark.sqlContext
protected override def session: SparkSession = self.spark
}

private[spark] def standardize(instances: Array[Instance]): Array[Instance] = {
Expand Down
6 changes: 5 additions & 1 deletion project/MimaExcludes.scala
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,11 @@ object MimaExcludes {
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.api.python.WriteInputFormatTestDataGenerator"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.api.python.WriteInputFormatTestDataGenerator$"),
// SPARK-47764: Cleanup shuffle dependencies based on ShuffleCleanupMode
ProblemFilters.exclude[ReversedMissingMethodProblem]("org.apache.spark.shuffle.MigratableResolver.addShuffleToSkip")
ProblemFilters.exclude[ReversedMissingMethodProblem]("org.apache.spark.shuffle.MigratableResolver.addShuffleToSkip"),
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.sql.SQLContext#implicits._sqlContext"),
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.sql.SQLImplicits._sqlContext"),
ProblemFilters.exclude[ReversedMissingMethodProblem]("org.apache.spark.sql.SQLImplicits.session"),
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.sql.SparkSession#implicits._sqlContext")
)

// Default exclude rules
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -252,7 +252,7 @@ class SQLContext private[sql](val sparkSession: SparkSession)
* @since 1.3.0
*/
object implicits extends SQLImplicits with Serializable {
protected override def _sqlContext: SQLContext = self
protected override def session: SparkSession = self.sparkSession
}
// scalastyle:on

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ import org.apache.spark.sql.catalyst.encoders.ExpressionEncoder
*/
abstract class SQLImplicits extends LowPrioritySQLImplicits {

protected def _sqlContext: SQLContext
protected def session: SparkSession

/**
* Converts $"col name" into a [[Column]].
Expand Down Expand Up @@ -242,15 +242,15 @@ abstract class SQLImplicits extends LowPrioritySQLImplicits {
* @since 1.6.0
*/
implicit def rddToDatasetHolder[T : Encoder](rdd: RDD[T]): DatasetHolder[T] = {
DatasetHolder(_sqlContext.createDataset(rdd))
DatasetHolder(session.createDataset(rdd))
}

/**
* Creates a [[Dataset]] from a local Seq.
* @since 1.6.0
*/
implicit def localSeqToDatasetHolder[T : Encoder](s: Seq[T]): DatasetHolder[T] = {
DatasetHolder(_sqlContext.createDataset(s))
DatasetHolder(session.createDataset(s))
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -846,7 +846,7 @@ class SparkSession private(
* @since 2.0.0
*/
object implicits extends SQLImplicits with Serializable {
protected override def _sqlContext: SQLContext = SparkSession.this.sqlContext
protected override def session: SparkSession = SparkSession.this
}
// scalastyle:on

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ object typed {

// TODO: This is pretty hacky. Maybe we should have an object for implicit encoders.
private val implicits = new SQLImplicits {
override protected def _sqlContext: SQLContext = null
override protected def session: SparkSession = null
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ import java.nio.charset.StandardCharsets
import java.time.{Duration, Period}

import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, SparkSession, SQLContext, SQLImplicits}
import org.apache.spark.sql.{DataFrame, SparkSession, SQLImplicits}
import org.apache.spark.sql.types._
import org.apache.spark.sql.types.DayTimeIntervalType.{DAY, HOUR, MINUTE, SECOND}
import org.apache.spark.sql.types.YearMonthIntervalType.{MONTH, YEAR}
Expand All @@ -33,15 +33,15 @@ import org.apache.spark.unsafe.types.CalendarInterval
private[sql] trait SQLTestData { self =>
protected def spark: SparkSession

// Helper object to import SQL implicits without a concrete SQLContext
// Helper object to import SQL implicits without a concrete SparkSession
private object internalImplicits extends SQLImplicits {
protected override def _sqlContext: SQLContext = self.spark.sqlContext
protected override def session: SparkSession = self.spark
}

import internalImplicits._
import SQLTestData._

// Note: all test data should be lazy because the SQLContext is not set up yet.
// Note: all test data should be lazy because the SparkSession is not set up yet.

protected lazy val emptyTestData: DataFrame = {
val df = spark.sparkContext.parallelize(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -53,11 +53,11 @@ import org.apache.spark.util.Utils
* Helper trait that should be extended by all SQL test suites within the Spark
* code base.
*
* This allows subclasses to plugin a custom `SQLContext`. It comes with test data
* This allows subclasses to plugin a custom `SparkSession`. It comes with test data
* prepared in advance as well as all implicit conversions used extensively by dataframes.
* To use implicit methods, import `testImplicits._` instead of through the `SQLContext`.
* To use implicit methods, import `testImplicits._` instead of through the `SparkSession`.
*
* Subclasses should *not* create `SQLContext`s in the test suite constructor, which is
* Subclasses should *not* create `SparkSession`s in the test suite constructor, which is
* prone to leaving multiple overlapping [[org.apache.spark.SparkContext]]s in the same JVM.
*/
private[sql] trait SQLTestUtils extends SparkFunSuite with SQLTestUtilsBase with PlanTest {
Expand Down Expand Up @@ -229,18 +229,18 @@ private[sql] trait SQLTestUtilsBase

protected def sparkContext = spark.sparkContext

// Shorthand for running a query using our SQLContext
// Shorthand for running a query using our SparkSession
protected lazy val sql: String => DataFrame = spark.sql _

/**
* A helper object for importing SQL implicits.
*
* Note that the alternative of importing `spark.implicits._` is not possible here.
* This is because we create the `SQLContext` immediately before the first test is run,
* This is because we create the `SparkSession` immediately before the first test is run,
* but the implicits import is needed in the constructor.
*/
protected object testImplicits extends SQLImplicits {
protected override def _sqlContext: SQLContext = self.spark.sqlContext
protected override def session: SparkSession = self.spark
}

protected override def withSQLConf[T](pairs: (String, String)*)(f: => T): T = {
Expand Down