Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ import org.apache.spark.util.Utils
* logging messages at different levels using methods that only evaluate parameters lazily if the
* log level is enabled.
*/
private[spark] trait Logging {
trait Logging {

// Make the log field transient so that objects with Logging can
// be serialized and used on another machine
Expand Down
29 changes: 24 additions & 5 deletions sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala
Original file line number Diff line number Diff line change
Expand Up @@ -95,18 +95,28 @@ class SparkSession private(
/**
* State shared across sessions, including the `SparkContext`, cached data, listener,
* and a catalog that interacts with external systems.
*
* This is internal to Spark and there is no guarantee on interface stability.
*
* @since 2.2.0
*/
@InterfaceStability.Unstable
@transient
private[sql] lazy val sharedState: SharedState = {
lazy val sharedState: SharedState = {
existingSharedState.getOrElse(new SharedState(sparkContext))
}

/**
* State isolated across sessions, including SQL configurations, temporary tables, registered
* functions, and everything else that accepts a [[org.apache.spark.sql.internal.SQLConf]].
*
* This is internal to Spark and there is no guarantee on interface stability.
*
* @since 2.2.0
*/
@InterfaceStability.Unstable
@transient
private[sql] lazy val sessionState: SessionState = {
lazy val sessionState: SessionState = {
SparkSession.reflect[SessionState, SparkSession](
SparkSession.sessionStateClassName(sparkContext.conf),
self)
Expand Down Expand Up @@ -613,7 +623,6 @@ class SparkSession private(
*
* @since 2.1.0
*/
@InterfaceStability.Stable
def time[T](f: => T): T = {
val start = System.nanoTime()
val ret = f
Expand Down Expand Up @@ -928,9 +937,19 @@ object SparkSession {
defaultSession.set(null)
}

private[sql] def getActiveSession: Option[SparkSession] = Option(activeThreadSession.get)
/**
* Returns the active SparkSession for the current thread, returned by the builder.
*
* @since 2.2.0
*/
def getActiveSession: Option[SparkSession] = Option(activeThreadSession.get)

private[sql] def getDefaultSession: Option[SparkSession] = Option(defaultSession.get)
/**
* Returns the default SparkSession that is returned by the builder.
*
* @since 2.2.0
*/
def getDefaultSession: Option[SparkSession] = Option(defaultSession.get)

/** A global SQL listener used for the SQL UI. */
private[sql] val sqlListener = new AtomicReference[SQLListener]()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ private[sql] class SharedState(val sparkContext: SparkContext) extends Logging {

// Load hive-site.xml into hadoopConf and determine the warehouse path we want to use, based on
// the config from both hive and Spark SQL. Finally set the warehouse config value to sparkConf.
val warehousePath = {
val warehousePath: String = {
val configFile = Utils.getContextOrSparkClassLoader.getResource("hive-site.xml")
if (configFile != null) {
sparkContext.hadoopConfiguration.addResource(configFile)
Expand Down Expand Up @@ -103,7 +103,7 @@ private[sql] class SharedState(val sparkContext: SparkContext) extends Logging {
/**
* A manager for global temporary views.
*/
val globalTempViewManager = {
val globalTempViewManager: GlobalTempViewManager = {
// System preserved database should not exists in metastore. However it's hard to guarantee it
// for every session, because case-sensitivity differs. Here we always lowercase it to make our
// life easier.
Expand Down