Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 2 additions & 5 deletions core/src/main/scala/org/apache/spark/executor/Executor.scala
Original file line number Diff line number Diff line change
Expand Up @@ -323,10 +323,7 @@ private[spark] class Executor(
val threadName = s"Executor task launch worker for task $taskId"
val taskName = taskDescription.name
val mdcProperties = taskDescription.properties.asScala
.filter(_._1.startsWith("mdc.")).map { item =>
val key = item._1.substring(4)
(key, item._2)
}.toSeq
.filter(_._1.startsWith("mdc.")).toSeq

/** If specified, this task has been killed and this option contains the reason. */
@volatile private var reasonIfKilled: Option[String] = None
Expand Down Expand Up @@ -705,7 +702,7 @@ private[spark] class Executor(
MDC.clear()
mdc.foreach { case (key, value) => MDC.put(key, value) }
// avoid overriding the takName by the user
MDC.put("taskName", taskName)
MDC.put("mdc.taskName", taskName)
}

/**
Expand Down
8 changes: 4 additions & 4 deletions docs/configuration.md
Original file line number Diff line number Diff line change
Expand Up @@ -2955,11 +2955,11 @@ Spark uses [log4j](http://logging.apache.org/log4j/) for logging. You can config
`log4j.properties` file in the `conf` directory. One way to start is to copy the existing
`log4j.properties.template` located there.

By default, Spark adds 1 record to the MDC (Mapped Diagnostic Context): `taskName`, which shows something
like `task 1.0 in stage 0.0`. You can add `%X{taskName}` to your patternLayout in
By default, Spark adds 1 record to the MDC (Mapped Diagnostic Context): `mdc.taskName`, which shows something
like `task 1.0 in stage 0.0`. You can add `%X{mdc.taskName}` to your patternLayout in
order to print it in the logs.
Moreover, you can use `spark.sparkContext.setLocalProperty("mdc." + name, "value")` to add user specific data into MDC.
The key in MDC will be the string after the `mdc.` prefix.
Moreover, you can use `spark.sparkContext.setLocalProperty(s"mdc.$name", "value")` to add user specific data into MDC.
The key in MDC will be the string of "mdc.$name".

# Overriding configuration directory

Expand Down