Skip to content
Closed
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion core/src/main/scala/org/apache/spark/SparkContext.scala
Original file line number Diff line number Diff line change
Expand Up @@ -568,7 +568,7 @@ class SparkContext(config: SparkConf) extends Logging {

// The metrics system for Driver need to be set spark.app.id to app ID.
// So it should start after we get app ID from the task scheduler and set spark.app.id.
_env.metricsSystem.start()
_env.metricsSystem.start(_conf.get(METRICS_STATIC_SOURCES_ENABLED))
// Attach the driver metrics servlet handler to the web ui after the metrics system is started.
_env.metricsSystem.getServletHandlers.foreach(handler => ui.foreach(_.attachHandler(handler)))

Expand Down
2 changes: 1 addition & 1 deletion core/src/main/scala/org/apache/spark/SparkEnv.scala
Original file line number Diff line number Diff line change
Expand Up @@ -383,7 +383,7 @@ object SparkEnv extends Logging {
conf.set(EXECUTOR_ID, executorId)
val ms = MetricsSystem.createMetricsSystem(MetricsSystemInstances.EXECUTOR, conf,
securityManager)
ms.start()
ms.start(conf.get(METRICS_STATIC_SOURCES_ENABLED))
ms
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -615,6 +615,12 @@ package object config {
.stringConf
.createOptional

private[spark] val METRICS_STATIC_SOURCES_ENABLED =
ConfigBuilder("spark.register.static.sources.enabled")
.doc("Whether to register static sources with the metrics system.")
.booleanConf
.createWithDefault(true)

private[spark] val PYSPARK_DRIVER_PYTHON = ConfigBuilder("spark.pyspark.driver.python")
.stringConf
.createOptional
Expand Down
8 changes: 8 additions & 0 deletions docs/monitoring.md
Original file line number Diff line number Diff line change
Expand Up @@ -923,13 +923,17 @@ This is the component with the largest amount of instrumented metrics
- memory.remainingOnHeapMem_MB

- namespace=HiveExternalCatalog
- **note:**: these metrics are conditional to a configuration parameter:
`spark.metrics.static.sources.enabled` (default is true)
- fileCacheHits.count
- filesDiscovered.count
- hiveClientCalls.count
- parallelListingJobCount.count
- partitionsFetched.count

- namespace=CodeGenerator
- **note:**: these metrics are conditional to a configuration parameter:
`spark.metrics.static.sources.enabled` (default is true)
- compilationTime (histogram)
- generatedClassSize (histogram)
- generatedMethodSize (histogram)
Expand Down Expand Up @@ -1047,13 +1051,17 @@ when running in local mode.
- shuffle-server.usedHeapMemory

- namespace=HiveExternalCatalog
- **note:**: these metrics are conditional to a configuration parameter:
`spark.metrics.static.sources.enabled` (default is true)
- fileCacheHits.count
- filesDiscovered.count
- hiveClientCalls.count
- parallelListingJobCount.count
- partitionsFetched.count

- namespace=CodeGenerator
- **note:**: these metrics are conditional to a configuration parameter:
`spark.metrics.static.sources.enabled` (default is true)
- compilationTime (histogram)
- generatedClassSize (histogram)
- generatedMethodSize (histogram)
Expand Down