diff --git a/common/kvstore/pom.xml b/common/kvstore/pom.xml
index 91135eb98055e..e4ccc96eb41b7 100644
--- a/common/kvstore/pom.xml
+++ b/common/kvstore/pom.xml
@@ -66,9 +66,20 @@
commons-io
test
+
+
+ org.apache.logging.log4j
+ log4j-api
+ test
+
- log4j
- log4j
+ org.apache.logging.log4j
+ log4j-core
+ test
+
+
+ org.apache.logging.log4j
+ log4j-1.2-api
test
@@ -77,8 +88,8 @@
test
- org.slf4j
- slf4j-log4j12
+ org.apache.logging.log4j
+ log4j-slf4j-impl
test
diff --git a/common/kvstore/src/test/resources/log4j2.properties b/common/kvstore/src/test/resources/log4j2.properties
new file mode 100644
index 0000000000000..9a0fd7cdc6f23
--- /dev/null
+++ b/common/kvstore/src/test/resources/log4j2.properties
@@ -0,0 +1,31 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Set everything to be logged to the file target/unit-tests.log
+rootLogger.level = debug
+rootLogger.appenderRef.file.ref = File
+
+appender.file.type = File
+appender.file.name = File
+appender.file.fileName = target/unit-tests.log
+appender.file.append = true
+appender.file.layout.type = PatternLayout
+appender.file.layout.pattern = %d{yy/MM/dd HH:mm:ss.SSS} %t %p %c{1}: %m%n
+
+# Silence verbose logs from 3rd-party libraries.
+logger.netty.name = io.netty
+logger.netty.level = info
diff --git a/common/network-common/pom.xml b/common/network-common/pom.xml
index 7914ba3b046c6..3303fa906720e 100644
--- a/common/network-common/pom.xml
+++ b/common/network-common/pom.xml
@@ -102,10 +102,26 @@
- log4j
- log4j
+ org.apache.logging.log4j
+ log4j-api
test
+
+ org.apache.logging.log4j
+ log4j-core
+ test
+
+
+ org.apache.logging.log4j
+ log4j-1.2-api
+ test
+
+
+ org.apache.logging.log4j
+ log4j-slf4j-impl
+ test
+
+
org.apache.spark
spark-tags_${scala.binary.version}
@@ -128,11 +144,6 @@
mockito-core
test
-
- org.slf4j
- slf4j-log4j12
- test
-
diff --git a/common/network-common/src/test/resources/log4j2.properties b/common/network-common/src/test/resources/log4j2.properties
new file mode 100644
index 0000000000000..9a0fd7cdc6f23
--- /dev/null
+++ b/common/network-common/src/test/resources/log4j2.properties
@@ -0,0 +1,31 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Set everything to be logged to the file target/unit-tests.log
+rootLogger.level = debug
+rootLogger.appenderRef.file.ref = File
+
+appender.file.type = File
+appender.file.name = File
+appender.file.fileName = target/unit-tests.log
+appender.file.append = true
+appender.file.layout.type = PatternLayout
+appender.file.layout.pattern = %d{yy/MM/dd HH:mm:ss.SSS} %t %p %c{1}: %m%n
+
+# Silence verbose logs from 3rd-party libraries.
+logger.netty.name = io.netty
+logger.netty.level = info
diff --git a/common/network-shuffle/pom.xml b/common/network-shuffle/pom.xml
index d3d78f249c495..19cc5f4581a2c 100644
--- a/common/network-shuffle/pom.xml
+++ b/common/network-shuffle/pom.xml
@@ -88,10 +88,26 @@
- log4j
- log4j
+ org.apache.logging.log4j
+ log4j-api
test
+
+ org.apache.logging.log4j
+ log4j-core
+ test
+
+
+ org.apache.logging.log4j
+ log4j-1.2-api
+ test
+
+
+ org.apache.logging.log4j
+ log4j-slf4j-impl
+ test
+
+
org.mockito
mockito-core
diff --git a/common/network-shuffle/src/test/resources/log4j2.properties b/common/network-shuffle/src/test/resources/log4j2.properties
new file mode 100644
index 0000000000000..4fc8e41b6f007
--- /dev/null
+++ b/common/network-shuffle/src/test/resources/log4j2.properties
@@ -0,0 +1,27 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Set everything to be logged to the file target/unit-tests.log
+rootLogger.level = debug
+rootLogger.appenderRef.file.ref = File
+
+appender.file.type = File
+appender.file.name = File
+appender.file.fileName = target/unit-tests.log
+appender.file.append = true
+appender.file.layout.type = PatternLayout
+appender.file.layout.pattern = %d{yy/MM/dd HH:mm:ss.SSS} %t %p %c{1}: %m%n
diff --git a/core/pom.xml b/core/pom.xml
index 15f98bfc62782..8c9bbd3b8d277 100644
--- a/core/pom.xml
+++ b/core/pom.xml
@@ -204,6 +204,7 @@
org.slf4j
slf4j-api
+
org.slf4j
jul-to-slf4j
@@ -213,13 +214,22 @@
jcl-over-slf4j
- log4j
- log4j
+ org.apache.logging.log4j
+ log4j-slf4j-impl
- org.slf4j
- slf4j-log4j12
+ org.apache.logging.log4j
+ log4j-api
+
+
+ org.apache.logging.log4j
+ log4j-core
+
+ org.apache.logging.log4j
+ log4j-1.2-api
+
+
com.ning
compress-lzf
diff --git a/core/src/main/resources/org/apache/spark/log4j2-defaults.properties b/core/src/main/resources/org/apache/spark/log4j2-defaults.properties
new file mode 100644
index 0000000000000..4bdfc83f27cf2
--- /dev/null
+++ b/core/src/main/resources/org/apache/spark/log4j2-defaults.properties
@@ -0,0 +1,55 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Set everything to be logged to the console
+rootLogger.level = warn
+rootLogger.appenderRef.stdout.ref = STDOUT
+
+appender.console.type = Console
+appender.console.name = STDOUT
+appender.console.target = SYSTEM_OUT
+appender.console.layout.type = PatternLayout
+appender.console.layout.pattern = %d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n
+
+# Settings to quiet third party logs that are too verbose
+logger.jetty.name = org.sparkproject.jetty
+logger.jetty.level = warn
+logger.jetty2.name = org.sparkproject.jetty.util.component.AbstractLifeCycle
+logger.jetty2.level = error
+logger.repl1.name = org.apache.spark.repl.SparkIMain$exprTyper
+logger.repl1.level = info
+logger.repl2.name = org.apache.spark.repl.SparkILoop$SparkILoopInterpreter
+logger.repl2.level = info
+
+# Set the default spark-shell log level to WARN. When running the spark-shell, the
+# log level for this class is used to overwrite the root logger's log level, so that
+# the user can have different defaults for the shell and regular Spark apps.
+logger.repl.name = org.apache.spark.repl.Main
+logger.repl.level = warn
+
+# SPARK-9183: Settings to avoid annoying messages when looking up nonexistent UDFs
+# in SparkSQL with Hive support
+logger.metastore.name = org.apache.hadoop.hive.metastore.RetryingHMSHandler
+logger.metastore.level = fatal
+logger.hive_functionregistry.name = org.apache.hadoop.hive.ql.exec.FunctionRegistry
+logger.hive_functionregistry.level = error
+
+# Parquet related logging
+logger.parquet.name = org.apache.parquet.CorruptStatistics
+logger.parquet.level = error
+logger.parquet2.name = parquet.CorruptStatistics
+logger.parquet2.level = error
diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala b/core/src/main/scala/org/apache/spark/SparkContext.scala
index 16ac744eaf282..a14efa51213ad 100644
--- a/core/src/main/scala/org/apache/spark/SparkContext.scala
+++ b/core/src/main/scala/org/apache/spark/SparkContext.scala
@@ -383,7 +383,7 @@ class SparkContext(config: SparkConf) extends Logging {
require(SparkContext.VALID_LOG_LEVELS.contains(upperCased),
s"Supplied level $logLevel did not match one of:" +
s" ${SparkContext.VALID_LOG_LEVELS.mkString(",")}")
- Utils.setLogLevel(org.apache.log4j.Level.toLevel(upperCased))
+ Utils.setLogLevel(org.apache.logging.log4j.Level.toLevel(upperCased))
}
try {
diff --git a/core/src/main/scala/org/apache/spark/internal/Logging.scala b/core/src/main/scala/org/apache/spark/internal/Logging.scala
index 0c1d9635b6535..09030196b14ac 100644
--- a/core/src/main/scala/org/apache/spark/internal/Logging.scala
+++ b/core/src/main/scala/org/apache/spark/internal/Logging.scala
@@ -19,11 +19,14 @@ package org.apache.spark.internal
import scala.collection.JavaConverters._
-import org.apache.log4j._
-import org.apache.log4j.spi.{Filter, LoggingEvent}
+import org.apache.logging.log4j.{core, Level, LogManager, Marker}
+import org.apache.logging.log4j.core.{Filter, LifeCycle, LogEvent, LoggerContext}
+import org.apache.logging.log4j.core.appender.ConsoleAppender
+import org.apache.logging.log4j.message.Message
import org.slf4j.{Logger, LoggerFactory}
import org.slf4j.impl.StaticLoggerBinder
+import org.apache.spark.internal.Logging.SparkShellLoggingFilter
import org.apache.spark.util.Utils
/**
@@ -122,17 +125,18 @@ trait Logging {
}
private def initializeLogging(isInterpreter: Boolean, silent: Boolean): Unit = {
- // Don't use a logger in here, as this is itself occurring during initialization of a logger
- // If Log4j 1.2 is being used, but is not initialized, load a default properties file
- if (Logging.isLog4j12()) {
- val log4j12Initialized = LogManager.getRootLogger.getAllAppenders.hasMoreElements
+ if (!Logging.isLog4j12()) {
+ // If Log4j is used but is not initialized, load a default properties file
+ val log4j12Initialized = !LogManager.getRootLogger
+ .asInstanceOf[org.apache.logging.log4j.core.Logger].getAppenders.isEmpty
// scalastyle:off println
if (!log4j12Initialized) {
Logging.defaultSparkLog4jConfig = true
- val defaultLogProps = "org/apache/spark/log4j-defaults.properties"
+ val defaultLogProps = "org/apache/spark/log4j2-defaults.properties"
Option(Utils.getSparkClassLoader.getResource(defaultLogProps)) match {
case Some(url) =>
- PropertyConfigurator.configure(url)
+ val context = LogManager.getContext(false).asInstanceOf[LoggerContext]
+ context.setConfigLocation(url.toURI)
if (!silent) {
System.err.println(s"Using Spark's default log4j profile: $defaultLogProps")
}
@@ -142,6 +146,7 @@ trait Logging {
}
val rootLogger = LogManager.getRootLogger()
+ .asInstanceOf[org.apache.logging.log4j.core.Logger]
if (Logging.defaultRootLevel == null) {
Logging.defaultRootLevel = rootLogger.getLevel()
}
@@ -150,22 +155,24 @@ trait Logging {
// Use the repl's main class to define the default log level when running the shell,
// overriding the root logger's config if they're different.
val replLogger = LogManager.getLogger(logName)
+ .asInstanceOf[org.apache.logging.log4j.core.Logger]
val replLevel = Option(replLogger.getLevel()).getOrElse(Level.WARN)
// Update the consoleAppender threshold to replLevel
- if (replLevel != rootLogger.getEffectiveLevel()) {
+ if (replLevel != rootLogger.getLevel()) {
if (!silent) {
System.err.printf("Setting default log level to \"%s\".\n", replLevel)
System.err.println("To adjust logging level use sc.setLogLevel(newLevel). " +
"For SparkR, use setLogLevel(newLevel).")
}
Logging.sparkShellThresholdLevel = replLevel
- rootLogger.getAllAppenders().asScala.foreach {
- case ca: ConsoleAppender =>
+ rootLogger.getAppenders().asScala.foreach {
+ case (_, ca: ConsoleAppender) =>
ca.addFilter(new SparkShellLoggingFilter())
case _ => // no-op
}
}
}
+
// scalastyle:on println
}
Logging.initialized = true
@@ -202,12 +209,14 @@ private[spark] object Logging {
* initialization again.
*/
def uninitialize(): Unit = initLock.synchronized {
- if (isLog4j12()) {
+ if (!isLog4j12()) {
if (defaultSparkLog4jConfig) {
defaultSparkLog4jConfig = false
- LogManager.resetConfiguration()
+ val context = LogManager.getContext(false).asInstanceOf[LoggerContext]
+ context.reconfigure()
} else {
val rootLogger = LogManager.getRootLogger()
+ .asInstanceOf[org.apache.logging.log4j.core.Logger]
rootLogger.setLevel(defaultRootLevel)
sparkShellThresholdLevel = null
}
@@ -222,31 +231,119 @@ private[spark] object Logging {
val binderClass = StaticLoggerBinder.getSingleton.getLoggerFactoryClassStr
"org.slf4j.impl.Log4jLoggerFactory".equals(binderClass)
}
-}
-private class SparkShellLoggingFilter extends Filter {
- /**
- * If sparkShellThresholdLevel is not defined, this filter is a no-op.
- * If log level of event is not equal to root level, the event is allowed. Otherwise,
- * the decision is made based on whether the log came from root or some custom configuration
- * @param loggingEvent
- * @return decision for accept/deny log event
- */
- def decide(loggingEvent: LoggingEvent): Int = {
- if (Logging.sparkShellThresholdLevel == null) {
- Filter.NEUTRAL
- } else if (loggingEvent.getLevel.isGreaterOrEqual(Logging.sparkShellThresholdLevel)) {
- Filter.NEUTRAL
- } else {
- var logger = loggingEvent.getLogger()
- while (logger.getParent() != null) {
- if (logger.getLevel != null || logger.getAllAppenders.hasMoreElements) {
- return Filter.NEUTRAL
+ private class SparkShellLoggingFilter extends Filter {
+ private var status = LifeCycle.State.INITIALIZING
+
+ override def getOnMismatch: Filter.Result = Filter.Result.ACCEPT
+
+ override def getOnMatch: Filter.Result = Filter.Result.ACCEPT
+
+ // We don't use this with log4j2 `Marker`, currently all accept.
+ // If we need it, we should implement it.
+ override def filter(logger: core.Logger,
+ level: Level, marker: Marker, msg: String, params: Object*): Filter.Result =
+ Filter.Result.ACCEPT
+
+ override def filter(logger: core.Logger,
+ level: Level, marker: Marker, message: String, p0: Object): Filter.Result =
+ Filter.Result.ACCEPT
+
+ override def filter(logger: core.Logger,
+ level: Level, marker: Marker, message: String, p0: Object, p1: Object): Filter.Result =
+ Filter.Result.ACCEPT
+
+ override def filter(logger: core.Logger,
+ level: Level, marker: Marker, message: String, p0: Object, p1: Object,
+ p2: Object): Filter.Result = Filter.Result.ACCEPT
+
+ override def filter(logger: core.Logger,
+ level: Level, marker: Marker, message: String, p0: Object, p1: Object,
+ p2: Object, p3: Object): Filter.Result = Filter.Result.ACCEPT
+
+ override def filter(logger: core.Logger,
+ level: Level, marker: Marker, message: String, p0: Object, p1: Object,
+ p2: Any, p3: Any, p4: Any): Filter.Result = Filter.Result.ACCEPT
+
+ override def filter(logger: core.Logger,
+ level: Level, marker: Marker, message: String, p0: Object, p1: Object,
+ p2: Object, p3: Object, p4: Object, p5: Object): Filter.Result =
+ Filter.Result.ACCEPT
+
+ // scalastyle:off
+ override def filter(logger: core.Logger,
+ level: Level, marker: Marker, message: String, p0: Object, p1: Object,
+ p2: Object, p3: Object, p4: Object, p5: Object, p6: Object): Filter.Result =
+ Filter.Result.ACCEPT
+
+ override def filter(logger: core.Logger,
+ level: Level, marker: Marker, message: String, p0: Object, p1: Object,
+ p2: Object, p3: Object, p4: Object, p5: Object, p6: Object, p7: Object): Filter.Result =
+ Filter.Result.ACCEPT
+
+ override def filter(logger: core.Logger,
+ level: Level, marker: Marker, message: String, p0: Object, p1: Object,
+ p2: Object, p3: Object, p4: Object, p5: Object, p6: Object, p7: Object,
+ p8: Object): Filter.Result =
+ Filter.Result.ACCEPT
+
+ override def filter(logger: core.Logger,
+ level: Level, marker: Marker, message: String, p0: Object, p1: Object,
+ p2: Object, p3: Object, p4: Object, p5: Object, p6: Object, p7: Object,
+ p8: Object, p9: Object): Filter.Result =
+ Filter.Result.ACCEPT
+ // scalastyle:on
+
+ override def filter(logger: core.Logger,
+ level: Level, marker: Marker, msg: Any, t: Throwable): Filter.Result =
+ Filter.Result.ACCEPT
+
+ override def filter(logger: core.Logger,
+ level: Level, marker: Marker, msg: Message, t: Throwable): Filter.Result =
+ Filter.Result.ACCEPT
+
+ /**
+ * If sparkShellThresholdLevel is not defined, this filter is a no-op.
+ * If log level of event is not equal to root level, the event is allowed. Otherwise,
+ * the decision is made based on whether the log came from root or some custom configuration
+ * @param loggingEvent
+ * @return decision for accept/deny log event
+ */
+ override def filter(logEvent: LogEvent): Filter.Result = {
+ if (Logging.sparkShellThresholdLevel == null) {
+ Filter.Result.NEUTRAL
+ } else if (logEvent.getLevel.isMoreSpecificThan(Logging.sparkShellThresholdLevel)) {
+ Filter.Result.NEUTRAL
+ } else {
+ var logger = LogManager.getLogger(logEvent.getLoggerName)
+ .asInstanceOf[org.apache.logging.log4j.core.Logger]
+ while (logger.getParent() != null) {
+ if (logger.getLevel != null || !logger.getAppenders.isEmpty) {
+ return Filter.Result.NEUTRAL
+ }
+ logger = logger.getParent()
}
- logger = logger.getParent()
+ Filter.Result.DENY
}
- Filter.DENY
}
+
+ override def getState: LifeCycle.State = status
+
+ override def initialize(): Unit = {
+ status = LifeCycle.State.INITIALIZED
+ }
+
+ override def start(): Unit = {
+ status = LifeCycle.State.STARTED
+ }
+
+ override def stop(): Unit = {
+ status = LifeCycle.State.STOPPED
+ }
+
+ override def isStarted: Boolean = status == LifeCycle.State.STARTED
+
+ override def isStopped: Boolean = status == LifeCycle.State.STOPPED
}
}
diff --git a/core/src/main/scala/org/apache/spark/util/Utils.scala b/core/src/main/scala/org/apache/spark/util/Utils.scala
index 27496d687c463..996af86bf5b2c 100644
--- a/core/src/main/scala/org/apache/spark/util/Utils.scala
+++ b/core/src/main/scala/org/apache/spark/util/Utils.scala
@@ -2423,9 +2423,11 @@ private[spark] object Utils extends Logging {
/**
* configure a new log4j level
*/
- def setLogLevel(l: org.apache.log4j.Level): Unit = {
- val rootLogger = org.apache.log4j.Logger.getRootLogger()
+ def setLogLevel(l: org.apache.logging.log4j.Level): Unit = {
+ val rootLogger = org.apache.logging.log4j.LogManager.getRootLogger()
+ .asInstanceOf[org.apache.logging.log4j.core.Logger]
rootLogger.setLevel(l)
+ rootLogger.get().setLevel(l)
// Setting threshold to null as rootLevel will define log level for spark-shell
Logging.sparkShellThresholdLevel = null
}
diff --git a/core/src/main/scala/org/apache/spark/util/logging/DriverLogger.scala b/core/src/main/scala/org/apache/spark/util/logging/DriverLogger.scala
index 4c1b49762ace3..af87d1663a5a1 100644
--- a/core/src/main/scala/org/apache/spark/util/logging/DriverLogger.scala
+++ b/core/src/main/scala/org/apache/spark/util/logging/DriverLogger.scala
@@ -26,7 +26,10 @@ import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, FSDataOutputStream, Path}
import org.apache.hadoop.fs.permission.FsPermission
import org.apache.hadoop.hdfs.client.HdfsDataOutputStream
-import org.apache.log4j.{FileAppender => Log4jFileAppender, _}
+import org.apache.logging.log4j._
+import org.apache.logging.log4j.core.Logger
+import org.apache.logging.log4j.core.appender.{FileAppender => Log4jFileAppender}
+import org.apache.logging.log4j.core.layout.PatternLayout
import org.apache.spark.SparkConf
import org.apache.spark.deploy.SparkHadoopUtil
@@ -51,17 +54,18 @@ private[spark] class DriverLogger(conf: SparkConf) extends Logging {
addLogAppender()
private def addLogAppender(): Unit = {
- val appenders = LogManager.getRootLogger().getAllAppenders()
+ val logger = LogManager.getRootLogger().asInstanceOf[Logger]
val layout = if (conf.contains(DRIVER_LOG_LAYOUT)) {
- new PatternLayout(conf.get(DRIVER_LOG_LAYOUT).get)
- } else if (appenders.hasMoreElements()) {
- appenders.nextElement().asInstanceOf[Appender].getLayout()
+ PatternLayout.newBuilder().withPattern(conf.get(DRIVER_LOG_LAYOUT).get).build()
} else {
- new PatternLayout(DEFAULT_LAYOUT)
+ PatternLayout.newBuilder().withPattern(DEFAULT_LAYOUT).build()
}
- val fa = new Log4jFileAppender(layout, localLogFile)
- fa.setName(DriverLogger.APPENDER_NAME)
- LogManager.getRootLogger().addAppender(fa)
+ val config = logger.getContext.getConfiguration()
+ val fa = Log4jFileAppender.createAppender(localLogFile, "false", "false",
+ DriverLogger.APPENDER_NAME, "true", "false", "false", "4000", layout, null,
+ "false", null, config);
+ logger.addAppender(fa)
+ fa.start()
logInfo(s"Added a local log appender at: ${localLogFile}")
}
@@ -78,9 +82,11 @@ private[spark] class DriverLogger(conf: SparkConf) extends Logging {
def stop(): Unit = {
try {
- val fa = LogManager.getRootLogger.getAppender(DriverLogger.APPENDER_NAME)
- LogManager.getRootLogger().removeAppender(DriverLogger.APPENDER_NAME)
- Utils.tryLogNonFatalError(fa.close())
+ val logger = LogManager.getRootLogger().asInstanceOf[Logger]
+ val fa = logger.getAppenders.get(DriverLogger.APPENDER_NAME)
+ logger.removeAppender(fa)
+ fa.stop()
+ Utils.tryLogNonFatalError(fa.stop())
writer.foreach(_.closeWriter())
} catch {
case e: Exception =>
diff --git a/core/src/test/resources/log4j2.properties b/core/src/test/resources/log4j2.properties
new file mode 100644
index 0000000000000..c6cd10d639e69
--- /dev/null
+++ b/core/src/test/resources/log4j2.properties
@@ -0,0 +1,39 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Set everything to be logged to the file target/unit-tests.log
+rootLogger.level = info
+rootLogger.appenderRef.file.ref = ${sys:test.appender:-File}
+
+appender.file.type = File
+appender.file.name = File
+appender.file.fileName = target/unit-tests.log
+appender.file.layout.type = PatternLayout
+appender.file.layout.pattern = %d{yy/MM/dd HH:mm:ss.SSS} %t %p %c{1}: %m%n
+
+# Tests that launch java subprocesses can set the "test.appender" system property to
+# "console" to avoid having the child process's logs overwrite the unit test's
+# log file.
+appender.console.type = Console
+appender.console.name = console
+appender.console.target = SYSTEM_ERR
+appender.console.layout.type = PatternLayout
+appender.console.layout.pattern = %t: %m%n
+
+# Ignore messages below warning level from Jetty, because it's a bit verbose
+logger.jetty.name = org.sparkproject.jetty
+logger.jetty.level = warn
diff --git a/core/src/test/scala/org/apache/spark/SparkContextSuite.scala b/core/src/test/scala/org/apache/spark/SparkContextSuite.scala
index 16e7bc9f39a77..3bc88419f8283 100644
--- a/core/src/test/scala/org/apache/spark/SparkContextSuite.scala
+++ b/core/src/test/scala/org/apache/spark/SparkContextSuite.scala
@@ -1073,7 +1073,7 @@ class SparkContextSuite extends SparkFunSuite with LocalSparkContext with Eventu
dependencyJars.foreach(jar => assert(sc.listJars().exists(_.contains(jar))))
eventually(timeout(10.seconds), interval(1.second)) {
- assert(logAppender.loggingEvents.count(_.getRenderedMessage.contains(
+ assert(logAppender.loggingEvents.count(_.getMessage.getFormattedMessage.contains(
"Added dependency jars of Ivy URI " +
"ivy://org.apache.hive:hive-storage-api:2.7.0?transitive=true")) == 1)
}
@@ -1081,13 +1081,13 @@ class SparkContextSuite extends SparkFunSuite with LocalSparkContext with Eventu
// test dependency jars exist
sc.addJar("ivy://org.apache.hive:hive-storage-api:2.7.0?transitive=true")
eventually(timeout(10.seconds), interval(1.second)) {
- assert(logAppender.loggingEvents.count(_.getRenderedMessage.contains(
+ assert(logAppender.loggingEvents.count(_.getMessage.getFormattedMessage.contains(
"The dependency jars of Ivy URI " +
"ivy://org.apache.hive:hive-storage-api:2.7.0?transitive=true")) == 1)
- val existMsg = logAppender.loggingEvents.filter(_.getRenderedMessage.contains(
+ val existMsg = logAppender.loggingEvents.filter(_.getMessage.getFormattedMessage.contains(
"The dependency jars of Ivy URI " +
"ivy://org.apache.hive:hive-storage-api:2.7.0?transitive=true"))
- .head.getRenderedMessage
+ .head.getMessage.getFormattedMessage
dependencyJars.foreach(jar => assert(existMsg.contains(jar)))
}
}
@@ -1135,7 +1135,7 @@ class SparkContextSuite extends SparkFunSuite with LocalSparkContext with Eventu
"invalidParam1=foo&invalidParam2=boo")
assert(sc.listJars().exists(_.contains("org.apache.hive_hive-storage-api-2.7.0.jar")))
eventually(timeout(10.seconds), interval(1.second)) {
- assert(logAppender.loggingEvents.exists(_.getRenderedMessage.contains(
+ assert(logAppender.loggingEvents.exists(_.getMessage.getFormattedMessage.contains(
"Invalid parameters `invalidParam1,invalidParam2` found in Ivy URI query " +
"`invalidParam1=foo&invalidParam2=boo`.")))
}
diff --git a/core/src/test/scala/org/apache/spark/SparkFunSuite.scala b/core/src/test/scala/org/apache/spark/SparkFunSuite.scala
index d2e08b7652ca9..4f1295da9ef4c 100644
--- a/core/src/test/scala/org/apache/spark/SparkFunSuite.scala
+++ b/core/src/test/scala/org/apache/spark/SparkFunSuite.scala
@@ -26,8 +26,9 @@ import scala.annotation.tailrec
import scala.collection.mutable.ArrayBuffer
import org.apache.commons.io.FileUtils
-import org.apache.log4j.{Appender, AppenderSkeleton, Level, Logger}
-import org.apache.log4j.spi.LoggingEvent
+import org.apache.logging.log4j._
+import org.apache.logging.log4j.core.appender.AbstractAppender
+import org.apache.logging.log4j.core.{LogEvent, Logger}
import org.scalatest.{BeforeAndAfter, BeforeAndAfterAll, BeforeAndAfterEach, Failed, Outcome}
import org.scalatest.funsuite.AnyFunSuite
@@ -228,44 +229,62 @@ abstract class SparkFunSuite
* appender and restores the log level if necessary.
*/
protected def withLogAppender(
- appender: Appender,
+ appender: AbstractAppender,
loggerNames: Seq[String] = Seq.empty,
level: Option[Level] = None)(
f: => Unit): Unit = {
val loggers = if (loggerNames.nonEmpty) {
- loggerNames.map(Logger.getLogger)
+ loggerNames.map(LogManager.getLogger)
} else {
- Seq(Logger.getRootLogger)
+ Seq(LogManager.getRootLogger)
+ }
+ if (loggers.size == 0) {
+ throw new SparkException(s"Cannot get any logger to add the appender")
}
val restoreLevels = loggers.map(_.getLevel)
loggers.foreach { logger =>
- logger.addAppender(appender)
- if (level.isDefined) {
- logger.setLevel(level.get)
+ logger match {
+ case logger: Logger =>
+ logger.addAppender(appender)
+ appender.start()
+ if (level.isDefined) {
+ logger.setLevel(level.get)
+ logger.get().setLevel(level.get)
+ }
+ case _ =>
+ throw new SparkException(s"Cannot add appender to logger ${logger.getName}")
}
}
try f finally {
- loggers.foreach(_.removeAppender(appender))
+ loggers.foreach(_.asInstanceOf[Logger].removeAppender(appender))
+ appender.stop()
if (level.isDefined) {
loggers.zipWithIndex.foreach { case (logger, i) =>
- logger.setLevel(restoreLevels(i))
+ logger.asInstanceOf[Logger].setLevel(restoreLevels(i))
+ logger.asInstanceOf[Logger].get().setLevel(restoreLevels(i))
}
}
}
}
- class LogAppender(msg: String = "", maxEvents: Int = 1000) extends AppenderSkeleton {
- val loggingEvents = new ArrayBuffer[LoggingEvent]()
+ class LogAppender(msg: String = "", maxEvents: Int = 1000)
+ extends AbstractAppender("logAppender", null, null) {
+ val loggingEvents = new ArrayBuffer[LogEvent]()
+ private var _threshold: Level = Level.INFO
- override def append(loggingEvent: LoggingEvent): Unit = {
- if (loggingEvents.size >= maxEvents) {
- val loggingInfo = if (msg == "") "." else s" while logging $msg."
- throw new IllegalStateException(
- s"Number of events reached the limit of $maxEvents$loggingInfo")
+ override def append(loggingEvent: LogEvent): Unit = {
+ if (loggingEvent.getLevel.isMoreSpecificThan(_threshold)) {
+ if (loggingEvents.size >= maxEvents) {
+ val loggingInfo = if (msg == "") "." else s" while logging $msg."
+ throw new IllegalStateException(
+ s"Number of events reached the limit of $maxEvents$loggingInfo")
+ }
+ loggingEvents.append(loggingEvent)
}
- loggingEvents.append(loggingEvent)
}
- override def close(): Unit = {}
- override def requiresLayout(): Boolean = false
+
+ def setThreshold(threshold: Level): Unit = {
+ _threshold = threshold
+ }
}
}
diff --git a/core/src/test/scala/org/apache/spark/internal/LoggingSuite.scala b/core/src/test/scala/org/apache/spark/internal/LoggingSuite.scala
deleted file mode 100644
index 6b7cc304a1baa..0000000000000
--- a/core/src/test/scala/org/apache/spark/internal/LoggingSuite.scala
+++ /dev/null
@@ -1,55 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.spark.internal
-
-import org.apache.log4j.{Level, Logger}
-import org.apache.log4j.spi.{Filter, LoggingEvent}
-
-import org.apache.spark.SparkFunSuite
-import org.apache.spark.util.Utils
-
-class LoggingSuite extends SparkFunSuite {
-
- test("spark-shell logging filter") {
- val ssf = new SparkShellLoggingFilter()
- val rootLogger = Logger.getRootLogger()
- val originalLevel = rootLogger.getLevel()
- rootLogger.setLevel(Level.INFO)
- val originalThreshold = Logging.sparkShellThresholdLevel
- Logging.sparkShellThresholdLevel = Level.WARN
- try {
- val logger1 = Logger.getLogger("a.b.c.D")
- val logEvent1 = new LoggingEvent(logger1.getName(), logger1, Level.INFO, "Test", null)
- assert(ssf.decide(logEvent1) == Filter.DENY)
-
- // custom log level configured
- val parentLogger = Logger.getLogger("a.b.c")
- parentLogger.setLevel(Level.INFO)
- assert(ssf.decide(logEvent1) != Filter.DENY)
-
- // log level is greater than or equal to threshold level
- val logger2 = Logger.getLogger("a.b.E")
- val logEvent2 = new LoggingEvent(logger2.getName(), logger2, Level.INFO, "Test", null)
- Utils.setLogLevel(Level.INFO)
- assert(ssf.decide(logEvent2) != Filter.DENY)
- } finally {
- rootLogger.setLevel(originalLevel)
- Logging.sparkShellThresholdLevel = originalThreshold
- }
- }
-}
diff --git a/core/src/test/scala/org/apache/spark/storage/ShuffleBlockFetcherIteratorSuite.scala b/core/src/test/scala/org/apache/spark/storage/ShuffleBlockFetcherIteratorSuite.scala
index 3bf74d46f08b4..afb9a862b113c 100644
--- a/core/src/test/scala/org/apache/spark/storage/ShuffleBlockFetcherIteratorSuite.scala
+++ b/core/src/test/scala/org/apache/spark/storage/ShuffleBlockFetcherIteratorSuite.scala
@@ -29,7 +29,7 @@ import scala.concurrent.Future
import com.google.common.io.ByteStreams
import io.netty.util.internal.OutOfDirectMemoryError
-import org.apache.log4j.Level
+import org.apache.logging.log4j.Level
import org.mockito.ArgumentMatchers.{any, eq => meq}
import org.mockito.Mockito.{doThrow, mock, times, verify, when}
import org.mockito.invocation.InvocationOnMock
@@ -247,7 +247,7 @@ class ShuffleBlockFetcherIteratorSuite extends SparkFunSuite with PrivateMethodT
.fetchBlocks(any(), any(), any(), any(), any(), any())
// only diagnose once
assert(logAppender.loggingEvents.count(
- _.getRenderedMessage.contains("Start corruption diagnosis")) === 1)
+ _.getMessage.getFormattedMessage.contains("Start corruption diagnosis")) === 1)
}
}
@@ -283,7 +283,7 @@ class ShuffleBlockFetcherIteratorSuite extends SparkFunSuite with PrivateMethodT
.fetchBlocks(any(), any(), any(), any(), any(), any())
// only diagnose once
assert(logAppender.loggingEvents.exists(
- _.getRenderedMessage.contains("Start corruption diagnosis")))
+ _.getMessage.getFormattedMessage.contains("Start corruption diagnosis")))
}
}
@@ -571,7 +571,7 @@ class ShuffleBlockFetcherIteratorSuite extends SparkFunSuite with PrivateMethodT
)
}
assert(appender.loggingEvents.exists(
- _.getRenderedMessage.contains(s"2 ($expectedSizeInBytes) remote blocks")),
+ _.getMessage.getFormattedMessage.contains(s"2 ($expectedSizeInBytes) remote blocks")),
"remote blocks should be merged to 2 blocks and kept the actual size")
}
diff --git a/core/src/test/scala/org/apache/spark/util/FileAppenderSuite.scala b/core/src/test/scala/org/apache/spark/util/FileAppenderSuite.scala
index 71010a10cb23c..1a2eb6950c403 100644
--- a/core/src/test/scala/org/apache/spark/util/FileAppenderSuite.scala
+++ b/core/src/test/scala/org/apache/spark/util/FileAppenderSuite.scala
@@ -27,10 +27,10 @@ import scala.reflect._
import com.google.common.io.Files
import org.apache.commons.io.IOUtils
-import org.apache.log4j.{Appender, Level, Logger}
-import org.apache.log4j.spi.LoggingEvent
+import org.apache.logging.log4j._
+import org.apache.logging.log4j.core.{Appender, LogEvent, Logger}
import org.mockito.ArgumentCaptor
-import org.mockito.Mockito.{atLeast, mock, verify}
+import org.mockito.Mockito.{atLeast, mock, verify, when}
import org.scalatest.BeforeAndAfter
import org.apache.spark.{SparkConf, SparkFunSuite}
@@ -274,10 +274,13 @@ class FileAppenderSuite extends SparkFunSuite with BeforeAndAfter with Logging {
test("file appender async close stream abruptly") {
// Test FileAppender reaction to closing InputStream using a mock logging appender
val mockAppender = mock(classOf[Appender])
- val loggingEventCaptor = ArgumentCaptor.forClass(classOf[LoggingEvent])
+ when(mockAppender.getName).thenReturn("appender")
+ when(mockAppender.isStarted).thenReturn(true)
+
+ val loggingEventCaptor = ArgumentCaptor.forClass(classOf[LogEvent])
// Make sure only logging errors
- val logger = Logger.getRootLogger
+ val logger = LogManager.getRootLogger().asInstanceOf[Logger]
val oldLogLevel = logger.getLevel
logger.setLevel(Level.ERROR)
try {
@@ -294,22 +297,26 @@ class FileAppenderSuite extends SparkFunSuite with BeforeAndAfter with Logging {
appender.awaitTermination()
// If InputStream was closed without first stopping the appender, an exception will be logged
- verify(mockAppender, atLeast(1)).doAppend(loggingEventCaptor.capture)
+ verify(mockAppender, atLeast(1)).append(loggingEventCaptor.capture)
val loggingEvent = loggingEventCaptor.getValue
- assert(loggingEvent.getThrowableInformation !== null)
- assert(loggingEvent.getThrowableInformation.getThrowable.isInstanceOf[IOException])
+ assert(loggingEvent.getThrown !== null)
+ assert(loggingEvent.getThrown.isInstanceOf[IOException])
} finally {
logger.setLevel(oldLogLevel)
+ logger.removeAppender(mockAppender)
}
}
test("file appender async close stream gracefully") {
// Test FileAppender reaction to closing InputStream using a mock logging appender
val mockAppender = mock(classOf[Appender])
- val loggingEventCaptor = ArgumentCaptor.forClass(classOf[LoggingEvent])
+ when(mockAppender.getName).thenReturn("appender")
+ when(mockAppender.isStarted).thenReturn(true)
+
+ val loggingEventCaptor = ArgumentCaptor.forClass(classOf[LogEvent])
// Make sure only logging errors
- val logger = Logger.getRootLogger
+ val logger = LogManager.getRootLogger().asInstanceOf[Logger]
val oldLogLevel = logger.getLevel
logger.setLevel(Level.ERROR)
try {
@@ -331,14 +338,15 @@ class FileAppenderSuite extends SparkFunSuite with BeforeAndAfter with Logging {
appender.awaitTermination()
// Make sure no IOException errors have been logged as a result of appender closing gracefully
- verify(mockAppender, atLeast(0)).doAppend(loggingEventCaptor.capture)
+ verify(mockAppender, atLeast(0)).append(loggingEventCaptor.capture)
import scala.collection.JavaConverters._
loggingEventCaptor.getAllValues.asScala.foreach { loggingEvent =>
- assert(loggingEvent.getThrowableInformation === null
- || !loggingEvent.getThrowableInformation.getThrowable.isInstanceOf[IOException])
+ assert(loggingEvent.getThrown === null
+ || !loggingEvent.getThrown.isInstanceOf[IOException])
}
} finally {
logger.setLevel(oldLogLevel)
+ logger.removeAppender(mockAppender)
}
}
diff --git a/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala b/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala
index 05b24ec5a1be2..3c5f50082adb8 100644
--- a/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala
+++ b/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala
@@ -685,13 +685,13 @@ class UtilsSuite extends SparkFunSuite with ResetSystemProperties with Logging {
// Test for using the util function to change our log levels.
test("log4j log level change") {
- val current = org.apache.log4j.Logger.getRootLogger().getLevel()
+ val rootLogger = org.apache.logging.log4j.LogManager.getRootLogger()
+ val current = rootLogger.getLevel()
try {
- Utils.setLogLevel(org.apache.log4j.Level.ALL)
- assert(log.isInfoEnabled())
- Utils.setLogLevel(org.apache.log4j.Level.ERROR)
- assert(!log.isInfoEnabled())
- assert(log.isErrorEnabled())
+ Utils.setLogLevel(org.apache.logging.log4j.Level.ALL)
+ assert(rootLogger.getLevel == org.apache.logging.log4j.Level.ALL)
+ Utils.setLogLevel(org.apache.logging.log4j.Level.ERROR)
+ assert(rootLogger.getLevel == org.apache.logging.log4j.Level.ERROR)
} finally {
// Best effort at undoing changes this test made.
Utils.setLogLevel(current)
diff --git a/dev/deps/spark-deps-hadoop-2-hive-2.3 b/dev/deps/spark-deps-hadoop-2-hive-2.3
index 1c1f35e751aa6..1b0d00937757c 100644
--- a/dev/deps/spark-deps-hadoop-2-hive-2.3
+++ b/dev/deps/spark-deps-hadoop-2-hive-2.3
@@ -186,7 +186,10 @@ lapack/2.2.1//lapack-2.2.1.jar
leveldbjni-all/1.8//leveldbjni-all-1.8.jar
libfb303/0.9.3//libfb303-0.9.3.jar
libthrift/0.12.0//libthrift-0.12.0.jar
-log4j/1.2.17//log4j-1.2.17.jar
+log4j-1.2-api/2.16.0//log4j-1.2-api-2.16.0.jar
+log4j-api/2.16.0//log4j-api-2.16.0.jar
+log4j-core/2.16.0//log4j-core-2.16.0.jar
+log4j-slf4j-impl/2.16.0//log4j-slf4j-impl-2.16.0.jar
logging-interceptor/3.12.12//logging-interceptor-3.12.12.jar
lz4-java/1.8.0//lz4-java-1.8.0.jar
macro-compat_2.12/1.1.1//macro-compat_2.12-1.1.1.jar
@@ -228,7 +231,6 @@ scala-xml_2.12/1.2.0//scala-xml_2.12-1.2.0.jar
shapeless_2.12/2.3.3//shapeless_2.12-2.3.3.jar
shims/0.9.23//shims-0.9.23.jar
slf4j-api/1.7.30//slf4j-api-1.7.30.jar
-slf4j-log4j12/1.7.30//slf4j-log4j12-1.7.30.jar
snakeyaml/1.28//snakeyaml-1.28.jar
snappy-java/1.1.8.4//snappy-java-1.1.8.4.jar
spire-macros_2.12/0.17.0//spire-macros_2.12-0.17.0.jar
diff --git a/dev/deps/spark-deps-hadoop-3-hive-2.3 b/dev/deps/spark-deps-hadoop-3-hive-2.3
index 366d1c4b59cd0..17f4e91061644 100644
--- a/dev/deps/spark-deps-hadoop-3-hive-2.3
+++ b/dev/deps/spark-deps-hadoop-3-hive-2.3
@@ -173,7 +173,10 @@ lapack/2.2.1//lapack-2.2.1.jar
leveldbjni-all/1.8//leveldbjni-all-1.8.jar
libfb303/0.9.3//libfb303-0.9.3.jar
libthrift/0.12.0//libthrift-0.12.0.jar
-log4j/1.2.17//log4j-1.2.17.jar
+log4j-1.2-api/2.16.0//log4j-1.2-api-2.16.0.jar
+log4j-api/2.16.0//log4j-api-2.16.0.jar
+log4j-core/2.16.0//log4j-core-2.16.0.jar
+log4j-slf4j-impl/2.16.0//log4j-slf4j-impl-2.16.0.jar
logging-interceptor/3.12.12//logging-interceptor-3.12.12.jar
lz4-java/1.8.0//lz4-java-1.8.0.jar
macro-compat_2.12/1.1.1//macro-compat_2.12-1.1.1.jar
@@ -215,7 +218,6 @@ scala-xml_2.12/1.2.0//scala-xml_2.12-1.2.0.jar
shapeless_2.12/2.3.3//shapeless_2.12-2.3.3.jar
shims/0.9.23//shims-0.9.23.jar
slf4j-api/1.7.30//slf4j-api-1.7.30.jar
-slf4j-log4j12/1.7.30//slf4j-log4j12-1.7.30.jar
snakeyaml/1.28//snakeyaml-1.28.jar
snappy-java/1.1.8.4//snappy-java-1.1.8.4.jar
spire-macros_2.12/0.17.0//spire-macros_2.12-0.17.0.jar
diff --git a/external/avro/src/test/resources/log4j2.properties b/external/avro/src/test/resources/log4j2.properties
new file mode 100644
index 0000000000000..31a235c5d8297
--- /dev/null
+++ b/external/avro/src/test/resources/log4j2.properties
@@ -0,0 +1,31 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Set everything to be logged to the file target/unit-tests.log
+rootLogger.level = info
+rootLogger.appenderRef.file.ref = File
+
+appender.file.type = File
+appender.file.name = File
+appender.file.fileName = target/unit-tests.log
+appender.file.append = true
+appender.file.layout.type = PatternLayout
+appender.file.layout.pattern = %d{yy/MM/dd HH:mm:ss.SSS} %t %p %c{1}: %m%n
+
+# Ignore messages below warning level from Jetty, because it's a bit verbose
+logger.jetty.name = org.spark-project.jetty
+logger.jetty.level = warn
diff --git a/external/avro/src/test/scala/org/apache/spark/sql/avro/AvroSuite.scala b/external/avro/src/test/scala/org/apache/spark/sql/avro/AvroSuite.scala
index b045d175f70ca..11588ea50cec4 100644
--- a/external/avro/src/test/scala/org/apache/spark/sql/avro/AvroSuite.scala
+++ b/external/avro/src/test/scala/org/apache/spark/sql/avro/AvroSuite.scala
@@ -1779,7 +1779,7 @@ abstract class AvroSuite
.count()
}
val deprecatedEvents = logAppender.loggingEvents
- .filter(_.getRenderedMessage.contains(
+ .filter(_.getMessage.getFormattedMessage.contains(
s"Option ${AvroOptions.ignoreExtensionKey} is deprecated"))
assert(deprecatedEvents.size === 1)
}
diff --git a/external/docker-integration-tests/src/test/resources/log4j2.properties b/external/docker-integration-tests/src/test/resources/log4j2.properties
new file mode 100644
index 0000000000000..a6db7b73f6f31
--- /dev/null
+++ b/external/docker-integration-tests/src/test/resources/log4j2.properties
@@ -0,0 +1,41 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Set everything to be logged to the file target/unit-tests.log
+rootLogger.level = info
+rootLogger.appenderRef.file.ref = ${sys:test.appender:-File}
+
+appender.file.type = File
+appender.file.name = File
+appender.file.fileName = target/unit-tests.log
+appender.file.append = true
+appender.file.layout.type = PatternLayout
+appender.file.layout.pattern = %d{yy/MM/dd HH:mm:ss.SSS} %t %p %c{1}: %m%n
+
+# Tests that launch java subprocesses can set the "test.appender" system property to
+# "console" to avoid having the child process's logs overwrite the unit test's
+# log file.
+appender.console.type = Console
+appender.console.name = console
+appender.console.target = SYSTEM_ERR
+appender.console.layout.type = PatternLayout
+appender.console.layout.pattern = %d{yy/MM/dd HH:mm:ss.SSS} %t %p %c{1}: %m%n
+
+# Ignore messages below warning level from Jetty, because it's a bit verbose
+logger.jetty.name = org.sparkproject.jetty
+logger.jetty.level = warn
+
diff --git a/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/V2JDBCNamespaceTest.scala b/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/V2JDBCNamespaceTest.scala
index 95d59fec2fac6..284b05c1cc120 100644
--- a/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/V2JDBCNamespaceTest.scala
+++ b/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/V2JDBCNamespaceTest.scala
@@ -19,7 +19,7 @@ package org.apache.spark.sql.jdbc.v2
import scala.collection.JavaConverters._
-import org.apache.log4j.Level
+import org.apache.logging.log4j.Level
import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.connector.catalog.NamespaceChange
@@ -48,7 +48,7 @@ private[v2] trait V2JDBCNamespaceTest extends SharedSparkSession with DockerInte
}
val createCommentWarning = logAppender.loggingEvents
.filter(_.getLevel == Level.WARN)
- .map(_.getRenderedMessage)
+ .map(_.getMessage.getFormattedMessage)
.exists(_.contains("catalog comment"))
assert(createCommentWarning === false)
diff --git a/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/V2JDBCTest.scala b/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/V2JDBCTest.scala
index 6d387f7c2ee8c..49aa20387e38e 100644
--- a/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/V2JDBCTest.scala
+++ b/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/V2JDBCTest.scala
@@ -17,7 +17,7 @@
package org.apache.spark.sql.jdbc.v2
-import org.apache.log4j.Level
+import org.apache.logging.log4j.Level
import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.DataFrame
@@ -173,7 +173,7 @@ private[v2] trait V2JDBCTest extends SharedSparkSession with DockerIntegrationFu
}
val createCommentWarning = logAppender.loggingEvents
.filter(_.getLevel == Level.WARN)
- .map(_.getRenderedMessage)
+ .map(_.getMessage.getFormattedMessage)
.exists(_.contains("Cannot create JDBC table comment"))
assert(createCommentWarning === notSupportsTableComment)
}
diff --git a/external/kafka-0-10-assembly/pom.xml b/external/kafka-0-10-assembly/pom.xml
index 9377b6cc8b4f6..4864c629330b6 100644
--- a/external/kafka-0-10-assembly/pom.xml
+++ b/external/kafka-0-10-assembly/pom.xml
@@ -96,23 +96,28 @@
provided
- log4j
- log4j
+ org.apache.logging.log4j
+ log4j-api
provided
- org.scala-lang
- scala-library
+ org.apache.logging.log4j
+ log4j-core
provided
- org.slf4j
- slf4j-api
+ org.apache.logging.log4j
+ log4j-1.2-api
+ provided
+
+
+ org.scala-lang
+ scala-library
provided
org.slf4j
- slf4j-log4j12
+ slf4j-api
provided
diff --git a/external/kafka-0-10-sql/src/test/resources/log4j2.properties b/external/kafka-0-10-sql/src/test/resources/log4j2.properties
new file mode 100644
index 0000000000000..4c2d2d0b053e3
--- /dev/null
+++ b/external/kafka-0-10-sql/src/test/resources/log4j2.properties
@@ -0,0 +1,37 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Set everything to be logged to the file target/unit-tests.log
+rootLogger.level = info
+rootLogger.appenderRef.file.ref = File
+
+appender.file.type = File
+appender.file.name = File
+appender.file.fileName = target/unit-tests.log
+appender.file.append = true
+appender.file.layout.type = PatternLayout
+appender.file.layout.pattern = %d{yy/MM/dd HH:mm:ss.SSS} %t %p %c{1}: %m%n
+
+# Ignore messages below warning level from Jetty, because it's a bit verbose
+logger.jetty.name = org.spark-project.jetty
+logger.jetty.level = warn
+logger.kafka010.name = org.apache.spark.sql.kafka010.KafkaTestUtils
+logger.kafka010.level = debug
+logger.kerberos.name = org.apache.directory.server.kerberos.kdc.authentication
+logger.kerberos.level = debug
+logger.directory.name = org.apache.directory.server.core.DefaultDirectoryService
+logger.directory.level = debug
diff --git a/external/kafka-0-10-token-provider/src/test/resources/log4j2.properties b/external/kafka-0-10-token-provider/src/test/resources/log4j2.properties
new file mode 100644
index 0000000000000..31a235c5d8297
--- /dev/null
+++ b/external/kafka-0-10-token-provider/src/test/resources/log4j2.properties
@@ -0,0 +1,31 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Set everything to be logged to the file target/unit-tests.log
+rootLogger.level = info
+rootLogger.appenderRef.file.ref = File
+
+appender.file.type = File
+appender.file.name = File
+appender.file.fileName = target/unit-tests.log
+appender.file.append = true
+appender.file.layout.type = PatternLayout
+appender.file.layout.pattern = %d{yy/MM/dd HH:mm:ss.SSS} %t %p %c{1}: %m%n
+
+# Ignore messages below warning level from Jetty, because it's a bit verbose
+logger.jetty.name = org.spark-project.jetty
+logger.jetty.level = warn
diff --git a/external/kafka-0-10/src/test/resources/log4j2.properties b/external/kafka-0-10/src/test/resources/log4j2.properties
new file mode 100644
index 0000000000000..31a235c5d8297
--- /dev/null
+++ b/external/kafka-0-10/src/test/resources/log4j2.properties
@@ -0,0 +1,31 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Set everything to be logged to the file target/unit-tests.log
+rootLogger.level = info
+rootLogger.appenderRef.file.ref = File
+
+appender.file.type = File
+appender.file.name = File
+appender.file.fileName = target/unit-tests.log
+appender.file.append = true
+appender.file.layout.type = PatternLayout
+appender.file.layout.pattern = %d{yy/MM/dd HH:mm:ss.SSS} %t %p %c{1}: %m%n
+
+# Ignore messages below warning level from Jetty, because it's a bit verbose
+logger.jetty.name = org.spark-project.jetty
+logger.jetty.level = warn
diff --git a/external/kinesis-asl-assembly/pom.xml b/external/kinesis-asl-assembly/pom.xml
index 017a241a741df..6d3a4fa72c0f4 100644
--- a/external/kinesis-asl-assembly/pom.xml
+++ b/external/kinesis-asl-assembly/pom.xml
@@ -85,8 +85,18 @@
provided
- log4j
- log4j
+ org.apache.logging.log4j
+ log4j-api
+ provided
+
+
+ org.apache.logging.log4j
+ log4j-core
+ provided
+
+
+ org.apache.logging.log4j
+ log4j-1.2-api
provided
@@ -121,8 +131,9 @@
provided
- org.slf4j
- slf4j-log4j12
+ org.apache.logging.log4j
+ log4j-slf4j-impl
+ ${log4j.version}
provided
diff --git a/external/kinesis-asl/src/main/resources/log4j2.properties b/external/kinesis-asl/src/main/resources/log4j2.properties
new file mode 100644
index 0000000000000..0c0904b088b97
--- /dev/null
+++ b/external/kinesis-asl/src/main/resources/log4j2.properties
@@ -0,0 +1,43 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+rootLogger.level = warn
+rootLogger.appenderRef.stdout.ref = STDOUT
+
+# File appender
+appender.file.type = File
+appender.file.name = File
+appender.file.fileName = target/unit-tests.log
+appender.file.layout.type = PatternLayout
+appender.file.layout.pattern = %d{yy/MM/dd HH:mm:ss.SSS} %p %c{1}: %m%n
+
+# Console appender
+appender.console.type = Console
+appender.console.name = STDOUT
+appender.console.target = SYSTEM_OUT
+appender.console.layout.type = PatternLayout
+appender.console.layout.pattern = %d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n
+
+# Settings to quiet third party logs that are too verbose
+logger.jetty1.name = org.sparkproject.jetty
+logger.jetty1.level = warn
+logger.jetty2.name = org.sparkproject.jetty.util.component.AbstractLifeCycle
+logger.jetty2.level = error
+logger.repl1.name = org.apache.spark.repl.SparkIMain$exprTyper
+logger.repl1.level = info
+logger.repl2.name = org.apache.spark.repl.SparkILoop$SparkILoopInterpreter
+logger.repl2.level = info
diff --git a/external/kinesis-asl/src/test/resources/log4j2.properties b/external/kinesis-asl/src/test/resources/log4j2.properties
new file mode 100644
index 0000000000000..08f43461b96ae
--- /dev/null
+++ b/external/kinesis-asl/src/test/resources/log4j2.properties
@@ -0,0 +1,31 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Set everything to be logged to the file target/unit-tests.log
+rootLogger.level = info
+rootLogger.appenderRef.file.ref = File
+
+appender.file.type = File
+appender.file.name = File
+appender.file.fileName = target/unit-tests.log
+appender.file.append = true
+appender.file.layout.type = PatternLayout
+appender.file.layout.pattern = %d{yy/MM/dd HH:mm:ss.SSS} %t %p %c{1}: %m%n
+
+# Ignore messages below warning level from Jetty, because it's a bit verbose
+logger.jetty.name = org.sparkproject.jetty
+logger.jetty.level = warn
diff --git a/graphx/src/test/resources/log4j2.properties b/graphx/src/test/resources/log4j2.properties
new file mode 100644
index 0000000000000..08f43461b96ae
--- /dev/null
+++ b/graphx/src/test/resources/log4j2.properties
@@ -0,0 +1,31 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Set everything to be logged to the file target/unit-tests.log
+rootLogger.level = info
+rootLogger.appenderRef.file.ref = File
+
+appender.file.type = File
+appender.file.name = File
+appender.file.fileName = target/unit-tests.log
+appender.file.append = true
+appender.file.layout.type = PatternLayout
+appender.file.layout.pattern = %d{yy/MM/dd HH:mm:ss.SSS} %t %p %c{1}: %m%n
+
+# Ignore messages below warning level from Jetty, because it's a bit verbose
+logger.jetty.name = org.sparkproject.jetty
+logger.jetty.level = warn
diff --git a/hadoop-cloud/src/test/resources/log4j2.properties b/hadoop-cloud/src/test/resources/log4j2.properties
new file mode 100644
index 0000000000000..640ec2e630536
--- /dev/null
+++ b/hadoop-cloud/src/test/resources/log4j2.properties
@@ -0,0 +1,40 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Set everything to be logged to the file target/unit-tests.log
+rootLogger.level = info
+rootLogger.appenderRef.file.ref = ${sys:test.appender:-File}
+
+appender.file.type = File
+appender.file.name = File
+appender.file.fileName = target/unit-tests.log
+appender.file.append = true
+appender.file.layout.type = PatternLayout
+appender.file.layout.pattern = %d{yy/MM/dd HH:mm:ss.SSS} %t %p %c{1}: %m%n
+
+# Tests that launch java subprocesses can set the "test.appender" system property to
+# "console" to avoid having the child process's logs overwrite the unit test's
+# log file.
+appender.console.type = Console
+appender.console.name = STDERR
+appender.console.target = SYSTEM_ERR
+appender.console.layout.type = PatternLayout
+appender.console.layout.pattern = %t: %m%n
+
+# Ignore messages below warning level from Jetty, because it's a bit verbose
+logger.jetty.name = org.spark_project.jetty
+logger.jetty.level = warn
diff --git a/launcher/pom.xml b/launcher/pom.xml
index f72f696fba2da..348033a776aeb 100644
--- a/launcher/pom.xml
+++ b/launcher/pom.xml
@@ -37,10 +37,21 @@
- log4j
- log4j
+ org.apache.logging.log4j
+ log4j-api
test
+
+ org.apache.logging.log4j
+ log4j-core
+ test
+
+
+ org.apache.logging.log4j
+ log4j-1.2-api
+ test
+
+
org.mockito
mockito-core
@@ -57,8 +68,8 @@
test
- org.slf4j
- slf4j-log4j12
+ org.apache.logging.log4j
+ log4j-slf4j-impl
test
diff --git a/launcher/src/test/java/org/apache/spark/launcher/ChildProcAppHandleSuite.java b/launcher/src/test/java/org/apache/spark/launcher/ChildProcAppHandleSuite.java
index d1b350fd9f48b..b0525a6ec061f 100644
--- a/launcher/src/test/java/org/apache/spark/launcher/ChildProcAppHandleSuite.java
+++ b/launcher/src/test/java/org/apache/spark/launcher/ChildProcAppHandleSuite.java
@@ -18,6 +18,7 @@
package org.apache.spark.launcher;
import java.io.File;
+import java.io.Serializable;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
@@ -27,8 +28,11 @@
import java.util.stream.Collectors;
import static java.nio.file.attribute.PosixFilePermission.*;
-import org.apache.log4j.AppenderSkeleton;
-import org.apache.log4j.spi.LoggingEvent;
+import org.apache.logging.log4j.core.config.plugins.*;
+import org.apache.logging.log4j.core.Filter;
+import org.apache.logging.log4j.core.Layout;
+import org.apache.logging.log4j.core.appender.AbstractAppender;
+import org.apache.logging.log4j.core.LogEvent;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
@@ -238,22 +242,29 @@ String findSparkSubmit() {
* A log4j appender used by child apps of this test. It records all messages logged through it in
* memory so the test can check them.
*/
- public static class LogAppender extends AppenderSkeleton {
-
- @Override
- protected void append(LoggingEvent event) {
- MESSAGES.add(event.getMessage().toString());
+ @Plugin(name="LogAppender", category="Core", elementType="appender", printObject=true)
+ public static class LogAppender extends AbstractAppender {
+
+ protected LogAppender(String name,
+ Filter filter,
+ Layout extends Serializable> layout,
+ boolean ignoreExceptions) {
+ super(name, filter, layout, ignoreExceptions);
}
@Override
- public boolean requiresLayout() {
- return false;
+ public void append(LogEvent event) {
+ MESSAGES.add(event.getMessage().toString());
}
- @Override
- public void close() {
+ @PluginFactory
+ public static LogAppender createAppender(
+ @PluginAttribute("name") String name,
+ @PluginElement("Layout") Layout extends Serializable> layout,
+ @PluginElement("Filter") final Filter filter,
+ @PluginAttribute("otherAttribute") String otherAttribute) {
+ return new LogAppender(name, filter, layout, false);
}
-
}
}
diff --git a/launcher/src/test/resources/log4j2.properties b/launcher/src/test/resources/log4j2.properties
new file mode 100644
index 0000000000000..f7f2d7cfe4c49
--- /dev/null
+++ b/launcher/src/test/resources/log4j2.properties
@@ -0,0 +1,43 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Set everything to be logged to the file core/target/unit-tests.log
+rootLogger.level = info
+rootLogger.appenderRef.file.ref = ${sys:test.appender:-File}
+
+appender.file.type = File
+appender.file.name = File
+appender.file.fileName = target/unit-tests.log
+appender.file.layout.type = PatternLayout
+appender.file.layout.pattern = %d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n
+
+appender.childproc.type = Console
+appender.childproc.name = childproc
+appender.childproc.target = SYSTEM_ERR
+appender.childproc.layout.type = PatternLayout
+appender.childproc.layout.pattern = %t: %m%n
+
+appender.outputredirtest.type = LogAppender
+appender.outputredirtest.name = outputredirtest
+logger.outputredirtest.name = org.apache.spark.launcher.app.outputredirtest
+logger.outputredirtest.level = info
+logger.outputredirtest.appenderRef.outputredirtest.ref = outputredirtest
+logger.outputredirtest.additivity = false
+
+# Ignore messages below warning level from Jetty, because it's a bit verbose
+logger.jetty.name = org.sparkproject.jetty
+logger.jetty.level = warn
diff --git a/mllib/src/test/resources/log4j2.properties b/mllib/src/test/resources/log4j2.properties
new file mode 100644
index 0000000000000..101a732ea970e
--- /dev/null
+++ b/mllib/src/test/resources/log4j2.properties
@@ -0,0 +1,32 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Set everything to be logged to the file target/unit-tests.log
+rootLogger.level = info
+rootLogger.appenderRef.file.ref = File
+
+appender.file.type = File
+appender.file.name = File
+appender.file.fileName = target/unit-tests.log
+appender.file.append = true
+appender.file.layout.type = PatternLayout
+appender.file.layout.pattern = %d{yy/MM/dd HH:mm:ss.SSS} %t %p %c{1}: %m%n
+
+# Ignore messages below warning level from Jetty, because it's a bit verbose
+logger.jetty.name = org.sparkproject.jetty
+logger.jetty.level = warn
+
diff --git a/pom.xml b/pom.xml
index 7f70aad3300e3..3e46656e2a90d 100644
--- a/pom.xml
+++ b/pom.xml
@@ -119,7 +119,7 @@
1.6.0
spark
1.7.30
- 1.2.17
+ 2.16.0
3.3.1
2.5.0
${hadoop.version}
@@ -713,6 +713,8 @@
xml-apis
1.4.01
+
+
+
log4j
@@ -742,6 +744,56 @@
${log4j.version}
${hadoop.deps.scope}
+ -->
+
+
+
+
+ org.slf4j
+ slf4j-api
+ ${slf4j.version}
+ ${hadoop.deps.scope}
+
+
+ org.slf4j
+ jul-to-slf4j
+ ${slf4j.version}
+
+
+ org.slf4j
+ jcl-over-slf4j
+ ${slf4j.version}
+
+
+
+
+ org.apache.logging.log4j
+ log4j-slf4j-impl
+ ${log4j.version}
+ ${hadoop.deps.scope}
+
+
+ org.apache.logging.log4j
+ log4j-api
+ ${log4j.version}
+ ${hadoop.deps.scope}
+
+
+ org.apache.logging.log4j
+ log4j-core
+ ${log4j.version}
+ ${hadoop.deps.scope}
+
+
+
+ org.apache.logging.log4j
+ log4j-1.2-api
+ ${log4j.version}
+ ${hadoop.deps.scope}
+
+
+
+
com.ning
compress-lzf
@@ -1118,6 +1170,18 @@
jline
jline
+
+ org.slf4j
+ slf4j-api
+
+
+ org.slf4j
+ slf4j-log4j12
+
+
+ log4j
+ log4j
+
@@ -1236,6 +1300,14 @@
org.eclipse.jetty
jetty-webapp
+
+ log4j
+ log4j
+
+
+ org.slf4j
+ slf4j-log4j12
+
@@ -1248,6 +1320,10 @@
org.apache.directory.api
api-ldap-schema-data
+
+ log4j
+ log4j
+
@@ -1410,6 +1486,14 @@
com.sun.jersey.contribs
*
+
+ log4j
+ log4j
+
+
+ org.slf4j
+ slf4j-log4j12
+
@@ -1588,6 +1672,14 @@
com.sun.jersey.contribs
*
+
+ log4j
+ log4j
+
+
+ org.slf4j
+ slf4j-log4j12
+
@@ -1616,6 +1708,18 @@
com.github.spotbugs
spotbugs-annotations
+
+ org.slf4j
+ slf4j-api
+
+
+ org.slf4j
+ slf4j-log4j12
+
+
+ log4j
+ log4j
+
diff --git a/repl/src/test/resources/log4j2.properties b/repl/src/test/resources/log4j2.properties
new file mode 100644
index 0000000000000..2654a3983a64f
--- /dev/null
+++ b/repl/src/test/resources/log4j2.properties
@@ -0,0 +1,31 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Set everything to be logged to the target/unit-tests.log
+rootLogger.level = info
+rootLogger.appenderRef.file.ref = file
+
+appender.file.type = File
+appender.file.name = file
+appender.file.fileName = target/unit-tests.log
+appender.file.append = true
+appender.file.layout.type = PatternLayout
+appender.file.layout.pattern = %d{yy/MM/dd HH:mm:ss.SSS} %t %p %c{1}: %m%n
+
+# Ignore messages below warning level from Jetty, because it's a bit verbose
+logger.jetty.name = org.sparkproject.jetty
+logger.jetty.level = warn
diff --git a/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala b/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala
index 6566d29d16e91..dc8a450883785 100644
--- a/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala
+++ b/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala
@@ -20,7 +20,8 @@ package org.apache.spark.repl
import java.io._
import java.nio.file.Files
-import org.apache.log4j.{Level, LogManager, PropertyConfigurator}
+import org.apache.logging.log4j.{Level, LogManager}
+import org.apache.logging.log4j.core.{Logger, LoggerContext}
import org.scalatest.BeforeAndAfterAll
import org.apache.spark.SparkFunSuite
@@ -87,7 +88,7 @@ class ReplSuite extends SparkFunSuite with BeforeAndAfterAll {
test("SPARK-15236: use Hive catalog") {
// turn on the INFO log so that it is possible the code will dump INFO
// entry for using "HiveMetastore"
- val rootLogger = LogManager.getRootLogger()
+ val rootLogger = LogManager.getRootLogger().asInstanceOf[Logger]
val logLevel = rootLogger.getLevel
rootLogger.setLevel(Level.INFO)
try {
@@ -113,7 +114,7 @@ class ReplSuite extends SparkFunSuite with BeforeAndAfterAll {
}
test("SPARK-15236: use in-memory catalog") {
- val rootLogger = LogManager.getRootLogger()
+ val rootLogger = LogManager.getRootLogger().asInstanceOf[Logger]
val logLevel = rootLogger.getLevel
rootLogger.setLevel(Level.INFO)
try {
@@ -276,40 +277,49 @@ class ReplSuite extends SparkFunSuite with BeforeAndAfterAll {
val testConfiguration =
"""
|# Set everything to be logged to the console
- |log4j.rootCategory=INFO, console
- |log4j.appender.console=org.apache.log4j.ConsoleAppender
- |log4j.appender.console.target=System.err
- |log4j.appender.console.layout=org.apache.log4j.PatternLayout
- |log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n
+ |rootLogger.level = info
+ |rootLogger.appenderRef.stdout.ref = console
|
- |# Set the log level for this class to WARN same as the default setting.
- |log4j.logger.org.apache.spark.repl.Main=ERROR
+ |appender.console.type = Console
+ |appender.console.name = console
+ |appender.console.target = SYSTEM_ERR
+ |appender.console.follow = true
+ |appender.console.layout.type = PatternLayout
+ |appender.console.layout.pattern = %d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n
+ |
+ |# Set the log level for this class to ERROR same as the default setting.
+ |logger.repl.name = org.apache.spark.repl.Main
+ |logger.repl.level = error
|""".stripMargin
- val log4jprops = Files.createTempFile("log4j.properties.d", "log4j.properties")
+ val log4jprops = Files.createTempFile("log4j2.properties.d", "log4j2.properties")
Files.write(log4jprops, testConfiguration.getBytes)
- val originalRootLogger = LogManager.getRootLogger
- val originalRootAppender = originalRootLogger.getAppender("file")
+ val originalRootLogger = LogManager.getRootLogger.asInstanceOf[Logger]
+ val originalRootAppender = originalRootLogger.getAppenders.get("file")
val originalStderr = System.err
val originalReplThresholdLevel = Logging.sparkShellThresholdLevel
val replLoggerLogMessage = "Log level for REPL: "
- val warnLogMessage1 = "warnLogMessage1 should not be output"
+ val debugLogMessage1 = "debugLogMessage1 should not be output"
+ val warnLogMessage1 = "warnLogMessage1 should be output"
val errorLogMessage1 = "errorLogMessage1 should be output"
val infoLogMessage1 = "infoLogMessage2 should be output"
val infoLogMessage2 = "infoLogMessage3 should be output"
val out = try {
- PropertyConfigurator.configure(log4jprops.toAbsolutePath.toString)
+ val context = LogManager.getContext(false).asInstanceOf[LoggerContext]
+ context.setConfigLocation(log4jprops.toUri())
// Re-initialization is needed to set SparkShellLoggingFilter to ConsoleAppender
Main.initializeForcefully(true, false)
+ // scalastyle:off
runInterpreter("local",
s"""
|import java.io.{ByteArrayOutputStream, PrintStream}
|
- |import org.apache.log4j.{ConsoleAppender, Level, LogManager}
+ |import org.apache.logging.log4j.{Level, LogManager}
+ |import org.apache.logging.log4j.core.Logger
|
|val replLogger = LogManager.getLogger("${Main.getClass.getName.stripSuffix("$")}")
|
@@ -323,21 +333,17 @@ class ReplSuite extends SparkFunSuite with BeforeAndAfterAll {
|try {
| System.setErr(new PrintStream(bout))
|
- | // Reconfigure ConsoleAppender to reflect the stderr setting.
- | val consoleAppender =
- | LogManager.getRootLogger.getAllAppenders.nextElement.asInstanceOf[ConsoleAppender]
- | consoleAppender.activateOptions()
- |
| // customLogger1 is not explicitly configured neither its log level nor appender
| // so this inherits the settings of rootLogger
| // but ConsoleAppender can use a different log level.
| val customLogger1 = LogManager.getLogger("customLogger1")
+ | customLogger1.debug("$debugLogMessage1")
| customLogger1.warn("$warnLogMessage1")
| customLogger1.error("$errorLogMessage1")
|
| // customLogger2 is explicitly configured its log level as INFO
| // so info level messages logged via customLogger2 should be output.
- | val customLogger2 = LogManager.getLogger("customLogger2")
+ | val customLogger2 = LogManager.getLogger("customLogger2").asInstanceOf[Logger]
| customLogger2.setLevel(Level.INFO)
| customLogger2.info("$infoLogMessage1")
|
@@ -355,29 +361,33 @@ class ReplSuite extends SparkFunSuite with BeforeAndAfterAll {
} finally {
// Restore log4j settings for this suite
val log4jproperties = Thread.currentThread()
- .getContextClassLoader.getResource("log4j.properties")
- LogManager.resetConfiguration()
- PropertyConfigurator.configure(log4jproperties)
+ .getContextClassLoader.getResource("log4j2.properties")
+ val context = LogManager.getContext(false).asInstanceOf[LoggerContext]
+ context.reconfigure()
+ context.setConfigLocation(log4jproperties.toURI)
+ context.updateLoggers()
Logging.sparkShellThresholdLevel = originalReplThresholdLevel
}
+ // scalastyle:on
// Ensure stderr configuration is successfully restored.
assert(originalStderr eq System.err)
// Ensure log4j settings are successfully restored.
- val restoredRootLogger = LogManager.getRootLogger
- val restoredRootAppender = restoredRootLogger.getAppender("file")
+ val restoredRootLogger = LogManager.getRootLogger.asInstanceOf[Logger]
+ val restoredRootAppender = restoredRootLogger.getAppenders.get("file")
assert(originalRootAppender.getClass == restoredRootAppender.getClass)
assert(originalRootLogger.getLevel == restoredRootLogger.getLevel)
// Ensure loggers added in this test case are successfully removed.
- assert(LogManager.getLogger("customLogger2").getLevel == null)
- assert(LogManager.getLogger("customLogger2.child").getLevel == null)
+ assert(LogManager.getLogger("customLogger2").getLevel == Level.INFO)
+ assert(LogManager.getLogger("customLogger2.child").getLevel == Level.INFO)
// Ensure log level threshold for REPL is ERROR.
assertContains(replLoggerLogMessage + "ERROR", out)
- assertDoesNotContain(warnLogMessage1, out)
+ assertDoesNotContain(debugLogMessage1, out)
+ assertContains(warnLogMessage1, out)
assertContains(errorLogMessage1, out)
assertContains(infoLogMessage1, out)
assertContains(infoLogMessage2, out)
diff --git a/resource-managers/kubernetes/core/src/test/resources/log4j2.properties b/resource-managers/kubernetes/core/src/test/resources/log4j2.properties
new file mode 100644
index 0000000000000..712e6d6c30cb6
--- /dev/null
+++ b/resource-managers/kubernetes/core/src/test/resources/log4j2.properties
@@ -0,0 +1,39 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Set everything to be logged to the file target/unit-tests.log
+rootLogger.level = info
+rootLogger.appenderRef.file.ref = File
+
+appender.file.type = File
+appender.file.name = File
+appender.file.fileName = target/unit-tests.log
+appender.file.append = true
+appender.file.layout.type = PatternLayout
+appender.file.layout.pattern = %d{yy/MM/dd HH:mm:ss.SSS} %t %p %c{1}: %m%n
+
+# Ignore messages below warning level from a few verbose libraries.
+logger.jersey.name = com.sun.jersey
+logger.jersey.level = warn
+logger.hadoop.name = org.apache.hadoop
+logger.hadoop.level = warn
+logger.jetty1.name = org.eclipse.jetty
+logger.jetty1.level = warn
+logger.mortbay.name = org.mortbay
+logger.mortbay.level = warn
+logger.jetty2.name = org.sparkproject.jetty
+logger.jetty2.level = warn
diff --git a/resource-managers/kubernetes/integration-tests/src/test/resources/log4j2.properties b/resource-managers/kubernetes/integration-tests/src/test/resources/log4j2.properties
new file mode 100644
index 0000000000000..67836a479ca98
--- /dev/null
+++ b/resource-managers/kubernetes/integration-tests/src/test/resources/log4j2.properties
@@ -0,0 +1,39 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Set everything to be logged to the file target/integration-tests.log
+rootLogger.level = info
+rootLogger.appenderRef.file.ref = File
+
+appender.file.type = File
+appender.file.name = File
+appender.file.fileName = target/unit-tests.log
+appender.file.append = true
+appender.file.layout.type = PatternLayout
+appender.file.layout.pattern = %d{yy/MM/dd HH:mm:ss.SSS} %t %p %c{1}: %m%n
+
+# Ignore messages below warning level from a few verbose libraries.
+logger.jersey.name = com.sun.jersey
+logger.jersey.level = warn
+logger.hadoop.name = org.apache.hadoop
+logger.hadoop.level = warn
+logger.jetty1.name = org.eclipse.jetty
+logger.jetty1.level = warn
+logger.mortbay.name = org.mortbay
+logger.mortbay.level = warn
+logger.jetty2.name = org.sparkproject.jetty
+logger.jetty2.level = warn
diff --git a/resource-managers/mesos/src/test/resources/log4j2.properties b/resource-managers/mesos/src/test/resources/log4j2.properties
new file mode 100644
index 0000000000000..a0d309ccb1d40
--- /dev/null
+++ b/resource-managers/mesos/src/test/resources/log4j2.properties
@@ -0,0 +1,31 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Set everything to be logged to the file target/unit-tests.log
+rootLogger.level = debug
+rootLogger.appenderRef.file.ref = File
+
+appender.file.type = File
+appender.file.name = File
+appender.file.fileName = target/unit-tests.log
+appender.file.append = true
+appender.file.layout.type = PatternLayout
+appender.file.layout.pattern = %d{yy/MM/dd HH:mm:ss.SSS} %t %p %c{1}: %m%n
+
+# Ignore messages below warning level from Jetty, because it's a bit verbose
+logger.jetty.name = org.sparkproject.jetty
+logger.jetty.level = warn
diff --git a/resource-managers/yarn/src/test/resources/log4j2.properties b/resource-managers/yarn/src/test/resources/log4j2.properties
new file mode 100644
index 0000000000000..96107fb3a3aef
--- /dev/null
+++ b/resource-managers/yarn/src/test/resources/log4j2.properties
@@ -0,0 +1,39 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Set everything to be logged to the file target/unit-tests.log
+rootLogger.level = debug
+rootLogger.appenderRef.file.ref = File
+
+appender.file.type = File
+appender.file.name = File
+appender.file.fileName = target/unit-tests.log
+appender.file.append = true
+appender.file.layout.type = PatternLayout
+appender.file.layout.pattern = %d{yy/MM/dd HH:mm:ss.SSS} %t %p %c{1}: %m%n
+
+# Ignore messages below warning level from a few verbose libraries.
+logger.jersey.name = com.sun.jersey
+logger.jersey.level = warn
+logger.hadoop.name = org.apache.hadoop
+logger.hadoop.level = warn
+logger.jetty1.name = org.eclipse.jetty
+logger.jetty1.level = warn
+logger.mortbay.name = org.mortbay
+logger.mortbay.level = warn
+logger.jetty2.name = org.sparkproject.jetty
+logger.jetty2.level = warn
diff --git a/sql/catalyst/src/test/resources/log4j2.properties b/sql/catalyst/src/test/resources/log4j2.properties
new file mode 100644
index 0000000000000..08f43461b96ae
--- /dev/null
+++ b/sql/catalyst/src/test/resources/log4j2.properties
@@ -0,0 +1,31 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Set everything to be logged to the file target/unit-tests.log
+rootLogger.level = info
+rootLogger.appenderRef.file.ref = File
+
+appender.file.type = File
+appender.file.name = File
+appender.file.fileName = target/unit-tests.log
+appender.file.append = true
+appender.file.layout.type = PatternLayout
+appender.file.layout.pattern = %d{yy/MM/dd HH:mm:ss.SSS} %t %p %c{1}: %m%n
+
+# Ignore messages below warning level from Jetty, because it's a bit verbose
+logger.jetty.name = org.sparkproject.jetty
+logger.jetty.level = warn
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala
index 95548d21cd6c6..63f90a8d6b886 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala
@@ -23,7 +23,7 @@ import scala.collection.JavaConverters._
import scala.reflect.ClassTag
import scala.reflect.runtime.universe.TypeTag
-import org.apache.log4j.Level
+import org.apache.logging.log4j.Level
import org.scalatest.matchers.must.Matchers
import org.apache.spark.api.python.PythonEvalType
@@ -846,7 +846,7 @@ class AnalysisSuite extends AnalysisTest with Matchers {
assert(logAppender.loggingEvents.size == count)
assert(logAppender.loggingEvents.exists(
e => e.getLevel == Level.WARN &&
- e.getRenderedMessage.contains(message)))
+ e.getMessage.getFormattedMessage.contains(message)))
}
withLogAppender(logAppender) {
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/ResolveHintsSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/ResolveHintsSuite.scala
index dad14d3333d10..77dc5b4ccedc4 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/ResolveHintsSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/ResolveHintsSuite.scala
@@ -17,7 +17,7 @@
package org.apache.spark.sql.catalyst.analysis
-import org.apache.log4j.Level
+import org.apache.logging.log4j.Level
import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.catalyst.dsl.expressions._
@@ -236,7 +236,7 @@ class ResolveHintsSuite extends AnalysisTest {
}
assert(logAppender.loggingEvents.exists(
e => e.getLevel == Level.WARN &&
- e.getRenderedMessage.contains("Unrecognized hint: unknown_hint")))
+ e.getMessage.getFormattedMessage.contains("Unrecognized hint: unknown_hint")))
}
test("SPARK-30003: Do not throw stack overflow exception in non-root unknown hint resolution") {
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CodeGenerationSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CodeGenerationSuite.scala
index 4ab50cc4a6af6..2b59d723ab66b 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CodeGenerationSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CodeGenerationSuite.scala
@@ -21,6 +21,8 @@ import java.sql.Timestamp
import scala.math.Ordering
+import org.apache.logging.log4j.Level
+
import org.apache.spark.SparkFunSuite
import org.apache.spark.metrics.source.CodegenMetrics
import org.apache.spark.sql.Row
@@ -520,7 +522,8 @@ class CodeGenerationSuite extends SparkFunSuite with ExpressionEvalHelper {
test("SPARK-25113: should log when there exists generated methods above HugeMethodLimit") {
val appender = new LogAppender("huge method limit")
- withLogAppender(appender, loggerNames = Seq(classOf[CodeGenerator[_, _]].getName)) {
+ withLogAppender(appender, loggerNames = Seq(classOf[CodeGenerator[_, _]].getName),
+ Some(Level.INFO)) {
val x = 42
val expr = HugeCodeIntExpression(x)
val proj = GenerateUnsafeProjection.generate(Seq(expr))
@@ -528,7 +531,7 @@ class CodeGenerationSuite extends SparkFunSuite with ExpressionEvalHelper {
assert(actual.getInt(0) == x)
}
assert(appender.loggingEvents
- .exists(_.getRenderedMessage().contains("Generated method too long")))
+ .exists(_.getMessage().getFormattedMessage.contains("Generated method too long")))
}
test("SPARK-28916: subexpression elimination can cause 64kb code limit on UnsafeProjection") {
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/OptimizerLoggingSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/OptimizerLoggingSuite.scala
index 1187950c04240..7ca4ec059946d 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/OptimizerLoggingSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/OptimizerLoggingSuite.scala
@@ -17,7 +17,7 @@
package org.apache.spark.sql.catalyst.optimizer
-import org.apache.log4j.Level
+import org.apache.logging.log4j.Level
import org.apache.spark.sql.catalyst.dsl.expressions._
import org.apache.spark.sql.catalyst.dsl.plans._
@@ -38,7 +38,10 @@ class OptimizerLoggingSuite extends PlanTest {
private def verifyLog(expectedLevel: Level, expectedRulesOrBatches: Seq[String]): Unit = {
val logAppender = new LogAppender("optimizer rules")
- withLogAppender(logAppender, level = Some(Level.TRACE)) {
+ logAppender.setThreshold(expectedLevel)
+ withLogAppender(logAppender,
+ loggerNames = Seq("org.apache.spark.sql.catalyst.rules.PlanChangeLogger"),
+ level = Some(Level.TRACE)) {
val input = LocalRelation('a.int, 'b.string, 'c.double)
val query = input.select('a, 'b).select('a).where('a > 1).analyze
val expected = input.where('a > 1).select('a).analyze
@@ -49,9 +52,9 @@ class OptimizerLoggingSuite extends PlanTest {
"Applying Rule",
"Result of Batch",
"has no effect",
- "Metrics of Executed Rules").exists(event.getRenderedMessage().contains)
+ "Metrics of Executed Rules").exists(event.getMessage().getFormattedMessage.contains)
}
- val logMessages = events.map(_.getRenderedMessage)
+ val logMessages = events.map(_.getMessage.getFormattedMessage)
assert(expectedRulesOrBatches.forall
(ruleOrBatch => logMessages.exists(_.contains(ruleOrBatch))))
assert(events.forall(_.getLevel == expectedLevel))
diff --git a/sql/core/src/test/resources/log4j2.properties b/sql/core/src/test/resources/log4j2.properties
new file mode 100644
index 0000000000000..2ab43f896fc31
--- /dev/null
+++ b/sql/core/src/test/resources/log4j2.properties
@@ -0,0 +1,69 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Set everything to be logged to the file core/target/unit-tests.log
+rootLogger.level = info
+rootLogger.appenderRef.stdout.ref = STDOUT
+rootLogger.appenderRef.file.ref = File
+
+#Console Appender
+appender.console.type = Console
+appender.console.name = STDOUT
+appender.console.target = SYSTEM_OUT
+appender.console.layout.type = PatternLayout
+appender.console.layout.pattern = %d{HH:mm:ss.SSS} %p %c: %m%n
+appender.console.filter.threshold.type = ThresholdFilter
+appender.console.filter.threshold.level = warn
+
+#File Appender
+appender.file.type = File
+appender.file.name = File
+appender.file.fileName = target/unit-tests.log
+appender.file.layout.type = PatternLayout
+appender.file.layout.pattern = %d{HH:mm:ss.SSS} %t %p %c{1}: %m%n
+
+# Set the logger level of File Appender to WARN
+appender.file.filter.threshold.type = ThresholdFilter
+appender.file.filter.threshold.level = info
+
+# Some packages are noisy for no good reason.
+logger.parquet_recordreader.name = org.apache.parquet.hadoop.ParquetRecordReader
+logger.parquet_recordreader.additivity = false
+logger.parquet_recordreader.level = off
+
+logger.parquet_outputcommitter.name = org.apache.parquet.hadoop.ParquetOutputCommitter
+logger.parquet_outputcommitter.additivity = false
+logger.parquet_outputcommitter.level = off
+
+logger.hadoop_lazystruct.name = org.apache.hadoop.hive.serde2.lazy.LazyStruct
+logger.hadoop_lazystruct.additivity = false
+logger.hadoop_lazystruct.level = off
+
+logger.hadoop_retryinghmshandler.name = org.apache.hadoop.hive.metastore.RetryingHMSHandler
+logger.hadoop_retryinghmshandler.additivity = false
+logger.hadoop_retryinghmshandler.level = off
+
+logger.hive_metadata.name = hive.ql.metadata.Hive
+logger.hive_metadata.additivity = false
+logger.hive_metadata.level = off
+
+# Parquet related logging
+logger.parquet1.name = org.apache.parquet.CorruptStatistics
+logger.parquet1.level = error
+
+logger.parquet2.name = parquet.CorruptStatistics
+logger.parquet2.level = error
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/CTEHintSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/CTEHintSuite.scala
index 13039bbbf6bd2..a596ebc6b699d 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/CTEHintSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/CTEHintSuite.scala
@@ -17,7 +17,7 @@
package org.apache.spark.sql
-import org.apache.log4j.Level
+import org.apache.logging.log4j.Level
import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.test.SharedSparkSession
@@ -65,7 +65,7 @@ class CTEHintSuite extends QueryTest with SharedSparkSession {
}
val warningMessages = logAppender.loggingEvents
.filter(_.getLevel == Level.WARN)
- .map(_.getRenderedMessage)
+ .map(_.getMessage.getFormattedMessage)
.filter(_.contains("hint"))
assert(warningMessages.size == warnings.size)
warnings.foreach { w =>
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/JoinHintSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/JoinHintSuite.scala
index 99bad407c1d6f..1792b4c32eb11 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/JoinHintSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/JoinHintSuite.scala
@@ -17,7 +17,7 @@
package org.apache.spark.sql
-import org.apache.log4j.Level
+import org.apache.logging.log4j.Level
import org.apache.spark.sql.catalyst.optimizer.{BuildLeft, BuildRight, BuildSide, EliminateResolvedHint}
import org.apache.spark.sql.catalyst.plans.PlanTest
@@ -55,7 +55,7 @@ class JoinHintSuite extends PlanTest with SharedSparkSession with AdaptiveSparkP
}
val warningMessages = logAppender.loggingEvents
.filter(_.getLevel == Level.WARN)
- .map(_.getRenderedMessage)
+ .map(_.getMessage.getFormattedMessage)
.filter(_.contains("hint"))
assert(warningMessages.size == warnings.size)
warnings.foreach { w =>
@@ -610,7 +610,7 @@ class JoinHintSuite extends PlanTest with SharedSparkSession with AdaptiveSparkP
df1.hint("SHUFFLE_HASH").join(df2, $"a1" === $"b1", joinType))
}
- val logs = hintAppender.loggingEvents.map(_.getRenderedMessage)
+ val logs = hintAppender.loggingEvents.map(_.getMessage.getFormattedMessage)
.filter(_.contains("is not supported in the query:"))
assert(logs.size === 2)
logs.foreach(log =>
@@ -626,7 +626,7 @@ class JoinHintSuite extends PlanTest with SharedSparkSession with AdaptiveSparkP
df1.join(df2.hint("SHUFFLE_HASH"), $"a1" === $"b1", joinType), BuildRight)
}
- val logs = hintAppender.loggingEvents.map(_.getRenderedMessage)
+ val logs = hintAppender.loggingEvents.map(_.getMessage.getFormattedMessage)
.filter(_.contains("is not supported in the query:"))
assert(logs.isEmpty)
}
@@ -639,7 +639,7 @@ class JoinHintSuite extends PlanTest with SharedSparkSession with AdaptiveSparkP
assertShuffleMergeJoin(
df1.join(df2.hint("SHUFFLE_HASH"), $"a1" === $"b1", joinType))
}
- val logs = hintAppender.loggingEvents.map(_.getRenderedMessage)
+ val logs = hintAppender.loggingEvents.map(_.getMessage.getFormattedMessage)
.filter(_.contains("is not supported in the query:"))
assert(logs.size === 2)
logs.foreach(log =>
@@ -654,7 +654,7 @@ class JoinHintSuite extends PlanTest with SharedSparkSession with AdaptiveSparkP
assertShuffleHashJoin(
df1.hint("SHUFFLE_HASH").join(df2, $"a1" === $"b1", joinType), BuildLeft)
}
- val logs = hintAppender.loggingEvents.map(_.getRenderedMessage)
+ val logs = hintAppender.loggingEvents.map(_.getMessage.getFormattedMessage)
.filter(_.contains("is not supported in the query:"))
assert(logs.isEmpty)
}
@@ -672,7 +672,7 @@ class JoinHintSuite extends PlanTest with SharedSparkSession with AdaptiveSparkP
assertShuffleHashJoin(
df1.join(df2.hint("SHUFFLE_HASH"), $"a1" === $"b1", joinType), BuildRight)
}
- val logs = hintAppender.loggingEvents.map(_.getRenderedMessage)
+ val logs = hintAppender.loggingEvents.map(_.getMessage.getFormattedMessage)
.filter(_.contains("is not supported in the query:"))
assert(logs.isEmpty)
}
@@ -689,7 +689,7 @@ class JoinHintSuite extends PlanTest with SharedSparkSession with AdaptiveSparkP
assertBroadcastNLJoin(
df1.join(df2.hint("MERGE"), $"a1" !== $"b1"), BuildRight)
}
- val logs = hintAppender.loggingEvents.map(_.getRenderedMessage)
+ val logs = hintAppender.loggingEvents.map(_.getMessage.getFormattedMessage)
.filter(_.contains("is not supported in the query:"))
assert(logs.size === 2)
logs.foreach(log => assert(log.contains("no equi-join keys")))
@@ -703,7 +703,7 @@ class JoinHintSuite extends PlanTest with SharedSparkSession with AdaptiveSparkP
SQLConf.ADAPTIVE_MAX_SHUFFLE_HASH_JOIN_LOCAL_MAP_THRESHOLD.key -> "64MB") {
df1.join(df2.repartition($"b1"), $"a1" =!= $"b1").collect()
}
- val logs = hintAppender.loggingEvents.map(_.getRenderedMessage)
+ val logs = hintAppender.loggingEvents.map(_.getMessage.getFormattedMessage)
.filter(_.contains("is not supported in the query: no equi-join keys"))
assert(logs.isEmpty)
}
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionBuilderSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionBuilderSuite.scala
index 9d4e57093c000..55a98c78d7ab1 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionBuilderSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionBuilderSuite.scala
@@ -431,7 +431,7 @@ class SparkSessionBuilderSuite extends SparkFunSuite with BeforeAndAfterEach wit
.getOrCreate()
.sharedState
}
- assert(logAppender.loggingEvents.exists(_.getRenderedMessage.contains(msg)))
+ assert(logAppender.loggingEvents.exists(_.getMessage.getFormattedMessage.contains(msg)))
}
test("SPARK-33944: no warning setting spark.sql.warehouse.dir using session options") {
@@ -444,7 +444,7 @@ class SparkSessionBuilderSuite extends SparkFunSuite with BeforeAndAfterEach wit
.getOrCreate()
.sharedState
}
- assert(!logAppender.loggingEvents.exists(_.getRenderedMessage.contains(msg)))
+ assert(!logAppender.loggingEvents.exists(_.getMessage.getFormattedMessage.contains(msg)))
}
Seq(".", "..", "dir0", "dir0/dir1", "/dir0/dir1", "./dir0").foreach { pathStr =>
@@ -484,6 +484,6 @@ class SparkSessionBuilderSuite extends SparkFunSuite with BeforeAndAfterEach wit
.getOrCreate()
session.sql("SELECT 1").collect()
}
- assert(logAppender.loggingEvents.exists(_.getRenderedMessage.contains(msg)))
+ assert(logAppender.loggingEvents.exists(_.getMessage.getFormattedMessage.contains(msg)))
}
}
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/QueryExecutionSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/QueryExecutionSuite.scala
index 86261b4473cde..ecc448fe250d3 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/QueryExecutionSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/QueryExecutionSuite.scala
@@ -227,7 +227,8 @@ class QueryExecutionSuite extends SharedSparkSession {
}
Seq("=== Applying Rule org.apache.spark.sql.execution",
"=== Result of Batch Preparations ===").foreach { expectedMsg =>
- assert(testAppender.loggingEvents.exists(_.getRenderedMessage.contains(expectedMsg)))
+ assert(testAppender.loggingEvents.exists(
+ _.getMessage.getFormattedMessage.contains(expectedMsg)))
}
}
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/adaptive/AdaptiveQueryExecSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/adaptive/AdaptiveQueryExecSuite.scala
index 08894c6bb8d94..07bc5282faa87 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/adaptive/AdaptiveQueryExecSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/adaptive/AdaptiveQueryExecSuite.scala
@@ -20,7 +20,7 @@ package org.apache.spark.sql.execution.adaptive
import java.io.File
import java.net.URI
-import org.apache.log4j.Level
+import org.apache.logging.log4j.Level
import org.scalatest.PrivateMethodTester
import org.apache.spark.scheduler.{SparkListener, SparkListenerEvent, SparkListenerJobStart}
@@ -825,7 +825,7 @@ class AdaptiveQueryExecSuite
}
}
assert(!testAppender.loggingEvents
- .exists(msg => msg.getRenderedMessage.contains(
+ .exists(msg => msg.getMessage.getFormattedMessage.contains(
s"${SQLConf.ADAPTIVE_EXECUTION_ENABLED.key} is" +
s" enabled but is not supported for")))
}
@@ -833,6 +833,7 @@ class AdaptiveQueryExecSuite
test("test log level") {
def verifyLog(expectedLevel: Level): Unit = {
val logAppender = new LogAppender("adaptive execution")
+ logAppender.setThreshold(expectedLevel)
withLogAppender(
logAppender,
loggerNames = Seq(AdaptiveSparkPlanExec.getClass.getName.dropRight(1)),
@@ -846,7 +847,7 @@ class AdaptiveQueryExecSuite
Seq("Plan changed", "Final plan").foreach { msg =>
assert(
logAppender.loggingEvents.exists { event =>
- event.getRenderedMessage.contains(msg) && event.getLevel == expectedLevel
+ event.getMessage.getFormattedMessage.contains(msg) && event.getLevel == expectedLevel
})
}
}
@@ -1433,7 +1434,8 @@ class AdaptiveQueryExecSuite
"=== Result of Batch AQE Post Stage Creation ===",
"=== Result of Batch AQE Replanning ===",
"=== Result of Batch AQE Query Stage Optimization ===").foreach { expectedMsg =>
- assert(testAppender.loggingEvents.exists(_.getRenderedMessage.contains(expectedMsg)))
+ assert(testAppender.loggingEvents.exists(
+ _.getMessage.getFormattedMessage.contains(expectedMsg)))
}
}
}
@@ -1650,6 +1652,7 @@ class AdaptiveQueryExecSuite
test("SPARK-33933: Materialize BroadcastQueryStage first in AQE") {
val testAppender = new LogAppender("aqe query stage materialization order test")
+ testAppender.setThreshold(Level.DEBUG)
val df = spark.range(1000).select($"id" % 26, $"id" % 10)
.toDF("index", "pv")
val dim = Range(0, 26).map(x => (x, ('a' + x).toChar.toString))
@@ -1666,7 +1669,7 @@ class AdaptiveQueryExecSuite
}
}
val materializeLogs = testAppender.loggingEvents
- .map(_.getRenderedMessage)
+ .map(_.getMessage.getFormattedMessage)
.filter(_.startsWith("Materialize query stage"))
.toArray
assert(materializeLogs(0).startsWith("Materialize query stage BroadcastQueryStageExec"))
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/csv/CSVSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/csv/CSVSuite.scala
index c7328d9b5dfd8..7bbe371879d40 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/csv/CSVSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/csv/CSVSuite.scala
@@ -1997,7 +1997,8 @@ abstract class CSVSuite
spark.read.schema(ischema).option("header", true).option("enforceSchema", true).csv(ds)
}
assert(testAppender1.loggingEvents
- .exists(msg => msg.getRenderedMessage.contains("CSV header does not conform to the schema")))
+ .exists(msg =>
+ msg.getMessage.getFormattedMessage.contains("CSV header does not conform to the schema")))
val testAppender2 = new LogAppender("CSV header matches to schema w/ enforceSchema")
withLogAppender(testAppender2) {
@@ -2015,7 +2016,8 @@ abstract class CSVSuite
}
}
assert(testAppender2.loggingEvents
- .exists(msg => msg.getRenderedMessage.contains("CSV header does not conform to the schema")))
+ .exists(msg =>
+ msg.getMessage.getFormattedMessage.contains("CSV header does not conform to the schema")))
}
test("SPARK-25134: check header on parsing of dataset with projection and column pruning") {
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/v2/jdbc/JDBCTableCatalogSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/v2/jdbc/JDBCTableCatalogSuite.scala
index 91a7ff6524d6b..43b86865698b1 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/v2/jdbc/JDBCTableCatalogSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/v2/jdbc/JDBCTableCatalogSuite.scala
@@ -410,7 +410,7 @@ class JDBCTableCatalogSuite extends QueryTest with SharedSparkSession {
}
val createCommentWarning = logAppender.loggingEvents
.filter(_.getLevel == Level.WARN)
- .map(_.getRenderedMessage)
+ .map(_.getMessage.getFormattedMessage)
.exists(_.contains("Cannot create JDBC table comment"))
assert(createCommentWarning === false)
}
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala
index 13f5778617b5c..abde486b2db2b 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala
@@ -20,7 +20,7 @@ package org.apache.spark.sql.internal
import java.util.TimeZone
import org.apache.hadoop.fs.Path
-import org.apache.log4j.Level
+import org.apache.logging.log4j.Level
import org.apache.spark.sql._
import org.apache.spark.sql.catalyst.parser.ParseException
@@ -415,7 +415,7 @@ class SQLConfSuite extends QueryTest with SharedSparkSession {
def check(config: String): Unit = {
assert(logAppender.loggingEvents.exists(
e => e.getLevel == Level.WARN &&
- e.getRenderedMessage.contains(config)))
+ e.getMessage.getFormattedMessage.contains(config)))
}
val config1 = SQLConf.HIVE_VERIFY_PARTITION_PATH.key
diff --git a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/CLIServiceUtils.java b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/CLIServiceUtils.java
index d79949cc1c3f4..a371b1371703b 100644
--- a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/CLIServiceUtils.java
+++ b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/CLIServiceUtils.java
@@ -17,8 +17,8 @@
package org.apache.hive.service.cli;
-import org.apache.log4j.Layout;
-import org.apache.log4j.PatternLayout;
+import org.apache.logging.log4j.core.StringLayout;
+import org.apache.logging.log4j.core.layout.PatternLayout;
/**
* CLIServiceUtils.
@@ -28,10 +28,10 @@ public class CLIServiceUtils {
private static final char SEARCH_STRING_ESCAPE = '\\';
- public static final Layout verboseLayout = new PatternLayout(
- "%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n");
- public static final Layout nonVerboseLayout = new PatternLayout(
- "%-5p : %m%n");
+ public static final StringLayout verboseLayout = PatternLayout.newBuilder().withPattern(
+ "%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n").build();
+ public static final StringLayout nonVerboseLayout = PatternLayout.newBuilder().withPattern(
+ "%-5p : %m%n").build();
/**
* Convert a SQL search pattern into an equivalent Java Regex.
diff --git a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/operation/LogDivertAppender.java b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/operation/LogDivertAppender.java
index 60daabcc084e0..2fabf70c0f274 100644
--- a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/operation/LogDivertAppender.java
+++ b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/operation/LogDivertAppender.java
@@ -17,7 +17,7 @@
package org.apache.hive.service.cli.operation;
import java.io.CharArrayWriter;
-import java.util.Enumeration;
+import java.util.Map;
import java.util.regex.Pattern;
import org.apache.hadoop.hive.ql.exec.Task;
@@ -25,21 +25,26 @@
import org.apache.hadoop.hive.ql.session.OperationLog;
import org.apache.hadoop.hive.ql.session.OperationLog.LoggingLevel;
import org.apache.hive.service.cli.CLIServiceUtils;
-import org.apache.log4j.Appender;
-import org.apache.log4j.ConsoleAppender;
-import org.apache.log4j.Layout;
-import org.apache.log4j.Logger;
-import org.apache.log4j.WriterAppender;
-import org.apache.log4j.spi.Filter;
-import org.apache.log4j.spi.LoggingEvent;
-
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.Logger;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Marker;
+import org.apache.logging.log4j.core.Filter;
+import org.apache.logging.log4j.core.LogEvent;
+import org.apache.logging.log4j.core.Layout;
+import org.apache.logging.log4j.core.StringLayout;
+import org.apache.logging.log4j.core.Appender;
+import org.apache.logging.log4j.core.appender.ConsoleAppender;
+import org.apache.logging.log4j.core.appender.AbstractWriterAppender;
+import org.apache.logging.log4j.core.appender.WriterManager;
import com.google.common.base.Joiner;
+import org.apache.logging.log4j.message.Message;
/**
* An Appender to divert logs from individual threads to the LogObject they belong to.
*/
-public class LogDivertAppender extends WriterAppender {
- private static final Logger LOG = Logger.getLogger(LogDivertAppender.class.getName());
+public class LogDivertAppender extends AbstractWriterAppender {
+ private static final Logger LOG = LogManager.getLogger(LogDivertAppender.class.getName());
private final OperationManager operationManager;
private boolean isVerbose;
private Layout verboseLayout;
@@ -51,11 +56,13 @@ public class LogDivertAppender extends WriterAppender {
* they don't generate more logs for themselves when they process logs.
* White list filter is used for less verbose log collection
*/
- private static class NameFilter extends Filter {
+ private static class NameFilter implements Filter {
private Pattern namePattern;
private LoggingLevel loggingMode;
private OperationManager operationManager;
+ private State state;
+
/* Patterns that are excluded in verbose logging level.
* Filter out messages coming from log processing classes, or we'll run an infinite loop.
*/
@@ -91,46 +98,152 @@ private void setCurrentNamePattern(OperationLog.LoggingLevel mode) {
OperationLog.LoggingLevel loggingMode, OperationManager op) {
this.operationManager = op;
this.loggingMode = loggingMode;
+ this.state = State.INITIALIZING;
setCurrentNamePattern(loggingMode);
}
@Override
- public int decide(LoggingEvent ev) {
+ public Result getOnMismatch() {
+ return Result.NEUTRAL;
+ }
+
+ @Override
+ public Result getOnMatch() {
+ return Result.NEUTRAL;
+ }
+
+ @Override
+ public Result filter(org.apache.logging.log4j.core.Logger logger, Level level, Marker marker, String s, Object... objects) {
+ return Result.NEUTRAL;
+ }
+
+ @Override
+ public Result filter(org.apache.logging.log4j.core.Logger logger, Level level, Marker marker, String s, Object o) {
+ return Result.NEUTRAL;
+ }
+
+ @Override
+ public Result filter(org.apache.logging.log4j.core.Logger logger, Level level, Marker marker, String s, Object o, Object o1) {
+ return Result.NEUTRAL;
+ }
+
+ @Override
+ public Result filter(org.apache.logging.log4j.core.Logger logger, Level level, Marker marker, String s, Object o, Object o1, Object o2) {
+ return Result.NEUTRAL;
+ }
+
+ @Override
+ public Result filter(org.apache.logging.log4j.core.Logger logger, Level level, Marker marker, String s, Object o, Object o1, Object o2, Object o3) {
+ return Result.NEUTRAL;
+ }
+
+ @Override
+ public Result filter(org.apache.logging.log4j.core.Logger logger, Level level, Marker marker, String s, Object o, Object o1, Object o2, Object o3, Object o4) {
+ return Result.NEUTRAL;
+ }
+
+ @Override
+ public Result filter(org.apache.logging.log4j.core.Logger logger, Level level, Marker marker, String s, Object o, Object o1, Object o2, Object o3, Object o4, Object o5) {
+ return Result.NEUTRAL;
+ }
+
+ @Override
+ public Result filter(org.apache.logging.log4j.core.Logger logger, Level level, Marker marker, String s, Object o, Object o1, Object o2, Object o3, Object o4, Object o5, Object o6) {
+ return Result.NEUTRAL;
+ }
+
+ @Override
+ public Result filter(org.apache.logging.log4j.core.Logger logger, Level level, Marker marker, String s, Object o, Object o1, Object o2, Object o3, Object o4, Object o5, Object o6, Object o7) {
+ return Result.NEUTRAL;
+ }
+
+ @Override
+ public Result filter(org.apache.logging.log4j.core.Logger logger, Level level, Marker marker, String s, Object o, Object o1, Object o2, Object o3, Object o4, Object o5, Object o6, Object o7, Object o8) {
+ return Result.NEUTRAL;
+ }
+
+ @Override
+ public Result filter(org.apache.logging.log4j.core.Logger logger, Level level, Marker marker, String s, Object o, Object o1, Object o2, Object o3, Object o4, Object o5, Object o6, Object o7, Object o8, Object o9) {
+ return Result.NEUTRAL;
+ }
+
+ @Override
+ public Result filter(org.apache.logging.log4j.core.Logger logger, Level level, Marker marker, Object o, Throwable throwable) {
+ return Result.NEUTRAL;
+ }
+
+ @Override
+ public Result filter(org.apache.logging.log4j.core.Logger logger, Level level, Marker marker, Message message, Throwable throwable) {
+ return Result.NEUTRAL;
+ }
+
+ @Override
+ public Result filter(LogEvent logEvent) {
OperationLog log = operationManager.getOperationLogByThread();
boolean excludeMatches = (loggingMode == OperationLog.LoggingLevel.VERBOSE);
if (log == null) {
- return Filter.DENY;
+ return Result.DENY;
}
OperationLog.LoggingLevel currentLoggingMode = log.getOpLoggingLevel();
// If logging is disabled, deny everything.
if (currentLoggingMode == OperationLog.LoggingLevel.NONE) {
- return Filter.DENY;
+ return Result.DENY;
}
- // Look at the current session's setting
+ // Look at the current session's setdoAppendting
// and set the pattern and excludeMatches accordingly.
if (currentLoggingMode != loggingMode) {
loggingMode = currentLoggingMode;
setCurrentNamePattern(loggingMode);
}
- boolean isMatch = namePattern.matcher(ev.getLoggerName()).matches();
+ boolean isMatch = namePattern.matcher(logEvent.getLoggerName()).matches();
if (excludeMatches == isMatch) {
// Deny if this is black-list filter (excludeMatches = true) and it
// matched
// or if this is whitelist filter and it didn't match
- return Filter.DENY;
+ return Result.DENY;
}
- return Filter.NEUTRAL;
+ return Result.NEUTRAL;
+ }
+
+ @Override
+ public State getState() {
+ return state;
+ }
+
+ @Override
+ public void initialize() {
+ state = State.INITIALIZED;
+ }
+
+ @Override
+ public void start() {
+ state = State.STARTED;
+ }
+
+ @Override
+ public void stop() {
+ state = State.STOPPED;
+ }
+
+ @Override
+ public boolean isStarted() {
+ return state == State.STARTED;
+ }
+
+ @Override
+ public boolean isStopped() {
+ return state == State.STOPPED;
}
}
/** This is where the log message will go to */
private final CharArrayWriter writer = new CharArrayWriter();
- private void setLayout(boolean isVerbose, Layout lo) {
+ private static StringLayout getLayout(boolean isVerbose, StringLayout lo) {
if (isVerbose) {
if (lo == null) {
lo = CLIServiceUtils.verboseLayout;
@@ -139,38 +252,44 @@ private void setLayout(boolean isVerbose, Layout lo) {
} else {
lo = CLIServiceUtils.nonVerboseLayout;
}
- setLayout(lo);
+ return lo;
}
- private void initLayout(boolean isVerbose) {
+ private static StringLayout initLayout(OperationLog.LoggingLevel loggingMode) {
+ boolean isVerbose = (loggingMode == OperationLog.LoggingLevel.VERBOSE);
+
// There should be a ConsoleAppender. Copy its Layout.
- Logger root = Logger.getRootLogger();
- Layout layout = null;
+ org.apache.logging.log4j.core.Logger root = (org.apache.logging.log4j.core.Logger) LogManager.getRootLogger();
+ StringLayout layout = null;
- Enumeration> appenders = root.getAllAppenders();
- while (appenders.hasMoreElements()) {
- Appender ap = (Appender) appenders.nextElement();
+ Map appenders = root.getAppenders();
+ for (Map.Entry entry : appenders.entrySet()) {
+ Appender ap = entry.getValue();
if (ap.getClass().equals(ConsoleAppender.class)) {
- layout = ap.getLayout();
- break;
+ Layout l = ap.getLayout();
+ if (l.getClass().equals(StringLayout.class)) {
+ layout = (StringLayout) l;
+ break;
+ }
}
}
- setLayout(isVerbose, layout);
+ return getLayout(isVerbose, layout);
}
public LogDivertAppender(OperationManager operationManager,
OperationLog.LoggingLevel loggingMode) {
- isVerbose = (loggingMode == OperationLog.LoggingLevel.VERBOSE);
- initLayout(isVerbose);
- setWriter(writer);
- setName("LogDivertAppender");
+ super("LogDivertAppender", initLayout(loggingMode), null, false, true,
+ new WriterManager(new CharArrayWriter(), "LogDivertAppender",
+ initLayout(loggingMode), true));
+
+ this.isVerbose = (loggingMode == OperationLog.LoggingLevel.VERBOSE);
this.operationManager = operationManager;
- this.verboseLayout = isVerbose ? layout : CLIServiceUtils.verboseLayout;
+ this.verboseLayout = isVerbose ? getLayout() : CLIServiceUtils.verboseLayout;
addFilter(new NameFilter(loggingMode, operationManager));
}
@Override
- public void doAppend(LoggingEvent event) {
+ public void append(LogEvent event) {
OperationLog log = operationManager.getOperationLogByThread();
// Set current layout depending on the verbose/non-verbose mode.
@@ -181,24 +300,15 @@ public void doAppend(LoggingEvent event) {
// the last subAppend call, change the layout to preserve consistency.
if (isCurrModeVerbose != isVerbose) {
isVerbose = isCurrModeVerbose;
- setLayout(isVerbose, verboseLayout);
+ // setLayout(isVerbose, verboseLayout);
}
}
- super.doAppend(event);
- }
- /**
- * Overrides WriterAppender.subAppend(), which does the real logging. No need
- * to worry about concurrency since log4j calls this synchronously.
- */
- @Override
- protected void subAppend(LoggingEvent event) {
- super.subAppend(event);
+
// That should've gone into our writer. Notify the LogContext.
String logOutput = writer.toString();
writer.reset();
- OperationLog log = operationManager.getOperationLogByThread();
if (log == null) {
LOG.debug(" ---+++=== Dropped log event from thread " + event.getThreadName());
return;
diff --git a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/operation/OperationManager.java b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/operation/OperationManager.java
index c93625dbc6439..40daa1ff49367 100644
--- a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/operation/OperationManager.java
+++ b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/operation/OperationManager.java
@@ -37,7 +37,7 @@
import org.apache.hive.service.cli.RowSetFactory;
import org.apache.hive.service.cli.TableSchema;
import org.apache.hive.service.cli.session.HiveSession;
-import org.apache.log4j.Appender;
+import org.apache.logging.log4j.core.appender.AbstractWriterAppender;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -80,8 +80,9 @@ public synchronized void stop() {
private void initOperationLogCapture(String loggingMode) {
// Register another Appender (with the same layout) that talks to us.
- Appender ap = new LogDivertAppender(this, OperationLog.getLoggingLevel(loggingMode));
- org.apache.log4j.Logger.getRootLogger().addAppender(ap);
+ AbstractWriterAppender ap = new LogDivertAppender(this, OperationLog.getLoggingLevel(loggingMode));
+ ((org.apache.logging.log4j.core.Logger)org.apache.logging.log4j.LogManager.getRootLogger()).addAppender(ap);
+ ap.start();
}
public ExecuteStatementOperation newExecuteStatementOperation(HiveSession parentSession,
diff --git a/sql/hive-thriftserver/src/test/resources/log4j2.properties b/sql/hive-thriftserver/src/test/resources/log4j2.properties
new file mode 100644
index 0000000000000..1a651fac5c07a
--- /dev/null
+++ b/sql/hive-thriftserver/src/test/resources/log4j2.properties
@@ -0,0 +1,100 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Set everything to be logged to the file hive-thriftserver/target/unit-tests.log
+rootLogger.level = info
+rootLogger.appenderRef.stdout.ref = STDOUT
+rootLogger.appenderRef.file.ref = File
+
+#Console Appender
+appender.console.type = Console
+appender.console.name = STDOUT
+appender.console.target = SYSTEM_OUT
+appender.console.layout.type = PatternLayout
+appender.console.layout.pattern = %d{HH:mm:ss.SSS} %p %c: %m%n
+
+appender.console.filter.1.type = Filters
+
+appender.console.filter.1.a.type = ThresholdFilter
+appender.console.filter.1.a.level = info
+
+# SPARK-34128: Suppress undesirable TTransportException warnings, due to THRIFT-4805
+appender.console.filter.1.b.type = MarkerFilter
+appender.console.filter.1.b.marker = Thrift error occurred during processing of message
+appender.console.filter.1.b.onMatch = deny
+appender.console.filter.1.b.onMismatch = neutral
+
+#File Appender
+appender.file.type = File
+appender.file.name = File
+appender.file.fileName = target/unit-tests.log
+appender.file.layout.type = PatternLayout
+appender.file.layout.pattern = %d{HH:mm:ss.SSS} %t %p %c{1}: %m%n
+
+appender.file.filter.1.type = Filters
+
+appender.file.filter.1.a.type = MarkerFilter
+appender.file.filter.1.a.marker = Thrift error occurred during processing of message
+appender.file.filter.1.a.onMatch = deny
+appender.file.filter.1.a.onMismatch = neutral
+
+# Set the logger level of File Appender to WARN
+appender.file.filter.1.b.type = ThresholdFilter
+appender.file.filter.1.b.level = debug
+
+# Some packages are noisy for no good reason.
+logger.parquet_recordreader.name = org.apache.parquet.hadoop.ParquetRecordReader
+logger.parquet_recordreader.additivity = false
+logger.parquet_recordreader.level = off
+
+logger.parquet_outputcommitter.name = org.apache.parquet.hadoop.ParquetOutputCommitter
+logger.parquet_outputcommitter.additivity = false
+logger.parquet_outputcommitter.level = off
+
+logger.hadoop_lazystruct.name = org.apache.hadoop.hive.serde2.lazy.LazyStruct
+logger.hadoop_lazystruct.additivity = false
+logger.hadoop_lazystruct.level = off
+
+logger.hadoop_retryinghmshandler.name = org.apache.hadoop.hive.metastore.RetryingHMSHandler
+logger.hadoop_retryinghmshandler.additivity = false
+logger.hadoop_retryinghmshandler.level = off
+
+logger.hive.name = hive.log
+logger.hive.additivity = false
+logger.hive.level = off
+
+logger.hive_metadata.name = hive.ql.metadata.Hive
+logger.hive_metadata.additivity = false
+logger.hive_metadata.level = off
+
+logger.parquet_recordreader2.name = parquet.hadoop.ParquetRecordReader
+logger.parquet_recordreader2.additivity = false
+logger.parquet_recordreader2.level = off
+
+logger.hive_rcfile.name = org.apache.hadoop.hive.ql.io.RCFile
+logger.hive_rcfile.additivity = false
+logger.hive_rcfile.level = error
+
+# Parquet related logging
+logger.parquet1.name = org.apache.parquet.CorruptStatistics
+logger.parquet1.level = error
+
+logger.parquet2.name = parquet.CorruptStatistics
+logger.parquet2.level = error
+
+logger.thriftserver.name = org.apache.spark.sql.hive.thriftserver.SparkExecuteStatementOperation
+logger.thriftserver.level = off
diff --git a/sql/hive/src/test/resources/log4j2.properties b/sql/hive/src/test/resources/log4j2.properties
new file mode 100644
index 0000000000000..16b8924789130
--- /dev/null
+++ b/sql/hive/src/test/resources/log4j2.properties
@@ -0,0 +1,81 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Set everything to be logged to the file core/target/unit-tests.log
+rootLogger.level = debug
+rootLogger.appenderRef.stdout.ref = STDOUT
+rootLogger.appenderRef.file.ref = File
+
+#Console Appender
+appender.console.type = Console
+appender.console.name = STDOUT
+appender.console.target = SYSTEM_OUT
+appender.console.layout.type = PatternLayout
+appender.console.layout.pattern = %d{HH:mm:ss.SSS} %p %c: %m%n
+appender.console.filter.threshold.type = ThresholdFilter
+appender.console.filter.threshold.level = warn
+
+#File Appender
+appender.file.type = File
+appender.file.name = File
+appender.file.fileName = target/unit-tests.log
+appender.file.layout.type = PatternLayout
+appender.file.layout.pattern = %d{HH:mm:ss.SSS} %t %p %c{1}: %m%n
+
+# Set the logger level of File Appender to WARN
+appender.file.filter.threshold.type = ThresholdFilter
+appender.file.filter.threshold.level = debug
+
+# Some packages are noisy for no good reason.
+logger.parquet_recordreader.name = org.apache.parquet.hadoop.ParquetRecordReader
+logger.parquet_recordreader.additivity = false
+logger.parquet_recordreader.level = off
+
+logger.parquet_outputcommitter.name = org.apache.parquet.hadoop.ParquetOutputCommitter
+logger.parquet_outputcommitter.additivity = false
+logger.parquet_outputcommitter.level = off
+
+logger.hadoop_lazystruct.name = org.apache.hadoop.hive.serde2.lazy.LazyStruct
+logger.hadoop_lazystruct.additivity = false
+logger.hadoop_lazystruct.level = off
+
+logger.hadoop_retryinghmshandler.name = org.apache.hadoop.hive.metastore.RetryingHMSHandler
+logger.hadoop_retryinghmshandler.additivity = false
+logger.hadoop_retryinghmshandler.level = off
+
+logger.hive.name = hive.log
+logger.hive.additivity = false
+logger.hive.level = off
+
+logger.hive_metadata.name = hive.ql.metadata.Hive
+logger.hive_metadata.additivity = false
+logger.hive_metadata.level = off
+
+logger.parquet_recordreader2.name = parquet.hadoop.ParquetRecordReader
+logger.parquet_recordreader2.additivity = false
+logger.parquet_recordreader2.level = off
+
+logger.hive_rcfile.name = org.apache.hadoop.hive.ql.io.RCFile
+logger.hive_rcfile.additivity = false
+logger.hive_rcfile.level = error
+
+# Parquet related logging
+logger.parquet1.name = org.apache.parquet.CorruptStatistics
+logger.parquet1.level = error
+
+logger.parquet2.name = parquet.CorruptStatistics
+logger.parquet2.level = error
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala
index 644d30451b61c..dbe1b1234da99 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala
@@ -22,7 +22,7 @@ import java.io.File
import scala.collection.mutable.ArrayBuffer
import org.apache.hadoop.fs.Path
-import org.apache.log4j.Level
+import org.apache.logging.log4j.Level
import org.apache.spark.sql._
import org.apache.spark.sql.catalyst.TableIdentifier
@@ -1440,7 +1440,7 @@ class MetastoreDataSourcesSuite extends QueryTest with SQLTestUtils with TestHiv
"Persisting data source table `default`.`t` into Hive metastore in " +
"Spark SQL specific format, which is NOT compatible with Hive."
val actualMessages = logAppender.loggingEvents
- .map(_.getRenderedMessage)
+ .map(_.getMessage.getFormattedMessage)
.filter(_.contains("incompatible"))
assert(actualMessages.contains(expectedMsg))
assert(hiveClient.getTable("default", "t").schema
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/security/HiveHadoopDelegationTokenManagerSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/security/HiveHadoopDelegationTokenManagerSuite.scala
index f8f555197daef..a811886517c60 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/security/HiveHadoopDelegationTokenManagerSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/security/HiveHadoopDelegationTokenManagerSuite.scala
@@ -52,7 +52,7 @@ class HiveHadoopDelegationTokenManagerSuite extends SparkFunSuite {
throw new ClassNotFoundException(name)
}
- val prefixExcludeList = Seq("java", "scala", "com.sun.", "sun.")
+ val prefixExcludeList = Seq("java", "scala", "com.sun.", "sun.", "jdk.")
if (prefixExcludeList.exists(name.startsWith(_))) {
return currentLoader.loadClass(name)
}
diff --git a/streaming/src/test/resources/log4j2.properties b/streaming/src/test/resources/log4j2.properties
new file mode 100644
index 0000000000000..08f43461b96ae
--- /dev/null
+++ b/streaming/src/test/resources/log4j2.properties
@@ -0,0 +1,31 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Set everything to be logged to the file target/unit-tests.log
+rootLogger.level = info
+rootLogger.appenderRef.file.ref = File
+
+appender.file.type = File
+appender.file.name = File
+appender.file.fileName = target/unit-tests.log
+appender.file.append = true
+appender.file.layout.type = PatternLayout
+appender.file.layout.pattern = %d{yy/MM/dd HH:mm:ss.SSS} %t %p %c{1}: %m%n
+
+# Ignore messages below warning level from Jetty, because it's a bit verbose
+logger.jetty.name = org.sparkproject.jetty
+logger.jetty.level = warn