diff --git a/.github/workflows/benchmark.yml b/.github/workflows/benchmark.yml
index 1978e6fd4343d..91e168210fb30 100644
--- a/.github/workflows/benchmark.yml
+++ b/.github/workflows/benchmark.yml
@@ -101,8 +101,8 @@ jobs:
run: |
./build/sbt -Pyarn -Pmesos -Pkubernetes -Phive -Phive-thriftserver -Phadoop-cloud -Pkinesis-asl -Pspark-ganglia-lgpl test:package
# Make less noisy
- cp conf/log4j.properties.template conf/log4j.properties
- sed -i 's/log4j.rootCategory=INFO, console/log4j.rootCategory=WARN, console/g' conf/log4j.properties
+ cp conf/log4j2.properties.template conf/log4j2.properties
+ sed -i 's/rootLogger.level = info/rootLogger.level = warn/g' conf/log4j2.properties
# In benchmark, we use local as master so set driver memory only. Note that GitHub Actions has 7 GB memory limit.
bin/spark-submit \
--driver-memory 6g --class org.apache.spark.benchmark.Benchmarks \
diff --git a/conf/log4j2.properties.template b/conf/log4j2.properties.template
index fbc2333baf479..85b4f679a93e2 100644
--- a/conf/log4j2.properties.template
+++ b/conf/log4j2.properties.template
@@ -17,7 +17,7 @@
# Set everything to be logged to the console
rootLogger.level = info
-rootLogger.appenderRef.file.ref = console
+rootLogger.appenderRef.stdout.ref = console
appender.console.type = Console
appender.console.name = console
diff --git a/docs/configuration.md b/docs/configuration.md
index 89871cbd3bf39..3d78870a705c0 100644
--- a/docs/configuration.md
+++ b/docs/configuration.md
@@ -3081,7 +3081,7 @@ Note: When running Spark on YARN in `cluster` mode, environment variables need t
Spark uses [log4j](http://logging.apache.org/log4j/) for logging. You can configure it by adding a
`log4j.properties` file in the `conf` directory. One way to start is to copy the existing
-`log4j.properties.template` located there.
+`log4j2.properties.template` located there.
By default, Spark adds 1 record to the MDC (Mapped Diagnostic Context): `mdc.taskName`, which shows something
like `task 1.0 in stage 0.0`. You can add `%X{mdc.taskName}` to your patternLayout in
diff --git a/external/kafka-0-10-assembly/pom.xml b/external/kafka-0-10-assembly/pom.xml
index 4864c629330b6..0c8194cad4d20 100644
--- a/external/kafka-0-10-assembly/pom.xml
+++ b/external/kafka-0-10-assembly/pom.xml
@@ -165,7 +165,7 @@
reference.conf
- log4j.properties
+ log4j2.properties
diff --git a/external/kinesis-asl-assembly/pom.xml b/external/kinesis-asl-assembly/pom.xml
index 6d3a4fa72c0f4..01541eb7816d0 100644
--- a/external/kinesis-asl-assembly/pom.xml
+++ b/external/kinesis-asl-assembly/pom.xml
@@ -205,7 +205,7 @@
reference.conf
- log4j.properties
+ log4j2.properties
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index d9e63e86bec5e..9cb9fd7455dbf 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -871,7 +871,7 @@ object Assembly {
=> MergeStrategy.discard
case m if m.toLowerCase(Locale.ROOT).matches("meta-inf.*\\.sf$")
=> MergeStrategy.discard
- case "log4j.properties" => MergeStrategy.discard
+ case "log4j2.properties" => MergeStrategy.discard
case m if m.toLowerCase(Locale.ROOT).startsWith("meta-inf/services/")
=> MergeStrategy.filterDistinctLines
case "reference.conf" => MergeStrategy.concat
diff --git a/resource-managers/kubernetes/integration-tests/pom.xml b/resource-managers/kubernetes/integration-tests/pom.xml
index 9c6737ad75294..4c5f14b79f690 100644
--- a/resource-managers/kubernetes/integration-tests/pom.xml
+++ b/resource-managers/kubernetes/integration-tests/pom.xml
@@ -145,7 +145,7 @@
-ea -Xmx4g -XX:ReservedCodeCacheSize=1g ${extraScalaTestArgs}
- file:src/test/resources/log4j.properties
+ file:src/test/resources/log4j2.properties
true
${spark.kubernetes.test.imageTagFile}
${spark.kubernetes.test.unpackSparkDir}
diff --git a/resource-managers/kubernetes/integration-tests/src/test/resources/log-config-test-log4j.properties b/resource-managers/kubernetes/integration-tests/src/test/resources/log-config-test-log4j.properties
index d3e13d8542ba1..17b8d598ac6f8 100644
--- a/resource-managers/kubernetes/integration-tests/src/test/resources/log-config-test-log4j.properties
+++ b/resource-managers/kubernetes/integration-tests/src/test/resources/log-config-test-log4j.properties
@@ -16,8 +16,11 @@
#
# This log4j config file is for integration test SparkConfPropagateSuite.
-log4j.rootCategory=DEBUG, console
-log4j.appender.console=org.apache.log4j.ConsoleAppender
-log4j.appender.console.target=System.err
-log4j.appender.console.layout=org.apache.log4j.PatternLayout
-log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c: %m%n
+rootLogger.level = debug
+rootLogger.appenderRef.stdout.ref = console
+
+appender.console.type = Console
+appender.console.name = console
+appender.console.target = SYSTEM_ERR
+appender.console.layout.type = PatternLayout
+appender.console.layout.pattern = %d{yy/MM/dd HH:mm:ss} %p %c: %m%n
diff --git a/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/SparkConfPropagateSuite.scala b/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/SparkConfPropagateSuite.scala
index 3651398b1d627..d51f4d7d5d5ed 100644
--- a/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/SparkConfPropagateSuite.scala
+++ b/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/SparkConfPropagateSuite.scala
@@ -37,8 +37,8 @@ private[spark] trait SparkConfPropagateSuite { k8sSuite: KubernetesSuite =>
try {
Files.write(new File(logConfFilePath).toPath, content.getBytes)
- sparkAppConf.set("spark.driver.extraJavaOptions", "-Dlog4j.debug")
- sparkAppConf.set("spark.executor.extraJavaOptions", "-Dlog4j.debug")
+ sparkAppConf.set("spark.driver.extraJavaOptions", "-Dlog4j2.debug")
+ sparkAppConf.set("spark.executor.extraJavaOptions", "-Dlog4j2.debug")
sparkAppConf.set("spark.kubernetes.executor.deleteOnTermination", "false")
val log4jExpectedLog =
diff --git a/resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/BaseYarnClusterSuite.scala b/resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/BaseYarnClusterSuite.scala
index 4189423d1ccc7..f92bcdd677540 100644
--- a/resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/BaseYarnClusterSuite.scala
+++ b/resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/BaseYarnClusterSuite.scala
@@ -47,15 +47,21 @@ abstract class BaseYarnClusterSuite
// log4j configuration for the YARN containers, so that their output is collected
// by YARN instead of trying to overwrite unit-tests.log.
protected val LOG4J_CONF = """
- |log4j.rootCategory=DEBUG, console
- |log4j.appender.console=org.apache.log4j.ConsoleAppender
- |log4j.appender.console.target=System.err
- |log4j.appender.console.layout=org.apache.log4j.PatternLayout
- |log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n
- |log4j.logger.org.apache.hadoop=WARN
- |log4j.logger.org.eclipse.jetty=WARN
- |log4j.logger.org.mortbay=WARN
- |log4j.logger.org.sparkproject.jetty=WARN
+ |rootLogger.level = debug
+ |rootLogger.appenderRef.stdout.ref = console
+ |appender.console.type = Console
+ |appender.console.name = console
+ |appender.console.target = SYSTEM_ERR
+ |appender.console.layout.type = PatternLayout
+ |appender.console.layout.pattern = %d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n
+ |logger.jetty.name = org.sparkproject.jetty
+ |logger.jetty.level = warn
+ |logger.eclipse.name = org.eclipse.jetty
+ |logger.eclipse.level = warn
+ |logger.hadoop.name = org.apache.hadoop
+ |logger.hadoop.level = warn
+ |logger.mortbay.name = org.mortbay
+ |logger.mortbay.level = warn
""".stripMargin
private var yarnCluster: MiniYARNCluster = _
diff --git a/resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala b/resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala
index 9fd3c70fa86fc..1c20723ff7ade 100644
--- a/resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala
+++ b/resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala
@@ -295,10 +295,12 @@ class YarnClusterSuite extends BaseYarnClusterSuite {
val log4jConf = new File(tempDir, "log4j.properties")
val logOutFile = new File(tempDir, "logs")
Files.write(
- s"""log4j.rootCategory=DEBUG,file
- |log4j.appender.file=org.apache.log4j.FileAppender
- |log4j.appender.file.file=$logOutFile
- |log4j.appender.file.layout=org.apache.log4j.PatternLayout
+ s"""rootLogger.level = debug
+ |rootLogger.appenderRef.file.ref = file
+ |appender.file.type = File
+ |appender.file.name = file
+ |appender.file.fileName = $logOutFile
+ |appender.file.layout.type = PatternLayout
|""".stripMargin,
log4jConf, StandardCharsets.UTF_8)
// Since this test is trying to extract log output from the SparkSubmit process itself,
@@ -307,7 +309,8 @@ class YarnClusterSuite extends BaseYarnClusterSuite {
val confDir = new File(tempDir, "conf")
confDir.mkdir()
val javaOptsFile = new File(confDir, "java-opts")
- Files.write(s"-Dlog4j.configuration=file://$log4jConf\n", javaOptsFile, StandardCharsets.UTF_8)
+ Files.write(s"-Dlog4j.configurationFile=file://$log4jConf\n", javaOptsFile,
+ StandardCharsets.UTF_8)
val result = File.createTempFile("result", null, tempDir)
val finalState = runSpark(clientMode = false,
diff --git a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suites.scala b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suites.scala
index 8e939a5471997..6f1a4397ae932 100644
--- a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suites.scala
+++ b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suites.scala
@@ -1220,11 +1220,13 @@ abstract class HiveThriftServer2TestBase extends SparkFunSuite with BeforeAndAft
val tempLog4jConf = Utils.createTempDir().getCanonicalPath
Files.write(
- """log4j.rootCategory=INFO, console
- |log4j.appender.console=org.apache.log4j.ConsoleAppender
- |log4j.appender.console.target=System.err
- |log4j.appender.console.layout=org.apache.log4j.PatternLayout
- |log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n
+ """rootLogger.level = info
+ |rootLogger.appenderRef.file.ref = console
+ |appender.console.type = Console
+ |appender.console.name = console
+ |appender.console.target = SYSTEM_ERR
+ |appender.console.layout.type = PatternLayout
+ |appender.console.layout.pattern = %d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n
""".stripMargin,
new File(s"$tempLog4jConf/log4j.properties"),
StandardCharsets.UTF_8)