diff --git a/bin/hbase b/bin/hbase
index f89e5644890c..84a86bc211d7 100755
--- a/bin/hbase
+++ b/bin/hbase
@@ -43,7 +43,9 @@
#
# HBASE_CONF_DIR Alternate conf dir. Default is ${HBASE_HOME}/conf.
#
-# HBASE_ROOT_LOGGER The root appender. Default is INFO,console
+# HBASE_ROOT_LOGGER_LEVEL The root logger level. Default is INFO.
+#
+# HBASE_ROOT_LOGGER_APPENDER The root logger appemder. Default is console.
#
# JRUBY_HOME JRuby path: $JRUBY_HOME/lib/jruby.jar should exist.
# Defaults to the jar packaged with HBase.
@@ -294,7 +296,10 @@ fi
for f in "${HBASE_HOME}"/lib/client-facing-thirdparty/*.jar; do
if [[ ! "${f}" =~ ^.*/htrace-core-3.*\.jar$ ]] && \
[ "${f}" != "htrace-core.jar$" ] && \
- [[ ! "${f}" =~ ^.*/slf4j-log4j.*$ ]]; then
+ [[ ! "${f}" =~ ^.*/log4j.*$ ]] && \
+ [[ ! "${f}" =~ ^.*/slf4j.*$ ]] && \
+ [[ ! "${f}" =~ ^.*/jcl-over-slf4j.*$ ]] && \
+ [[ ! "${f}" =~ ^.*/jul-to-slf4j.*$ ]]; then
CLASSPATH="${CLASSPATH}:${f}"
fi
done
@@ -639,7 +644,10 @@ elif [ "$COMMAND" = "mapredcp" ] ; then
for f in "${HBASE_HOME}"/lib/client-facing-thirdparty/*.jar; do
if [[ ! "${f}" =~ ^.*/htrace-core-3.*\.jar$ ]] && \
[ "${f}" != "htrace-core.jar$" ] && \
- [[ ! "${f}" =~ ^.*/slf4j-log4j.*$ ]]; then
+ [[ ! "${f}" =~ ^.*/log4j.*$ ]] && \
+ [[ ! "${f}" =~ ^.*/slf4j.*$ ]] && \
+ [[ ! "${f}" =~ ^.*/jcl-over-slf4j.*$ ]] && \
+ [[ ! "${f}" =~ ^.*/jul-to-slf4j.*$ ]]; then
echo -n ":${f}"
fi
done
@@ -756,7 +764,8 @@ HBASE_OPTS="$HBASE_OPTS -Dhbase.log.dir=$HBASE_LOG_DIR"
HBASE_OPTS="$HBASE_OPTS -Dhbase.log.file=$HBASE_LOGFILE"
HBASE_OPTS="$HBASE_OPTS -Dhbase.home.dir=$HBASE_HOME"
HBASE_OPTS="$HBASE_OPTS -Dhbase.id.str=$HBASE_IDENT_STRING"
-HBASE_OPTS="$HBASE_OPTS -Dhbase.root.logger=${HBASE_ROOT_LOGGER:-INFO,console}"
+HBASE_OPTS="$HBASE_OPTS -Dhbase.root.logger.level=${HBASE_ROOT_LOGGER_LEVEL:-INFO}"
+HBASE_OPTS="$HBASE_OPTS -Dhbase.root.logger.appender=${HBASE_ROOT_LOGGER_APPENDER:-console}"
if [ "x$JAVA_LIBRARY_PATH" != "x" ]; then
HBASE_OPTS="$HBASE_OPTS -Djava.library.path=$JAVA_LIBRARY_PATH"
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$JAVA_LIBRARY_PATH"
@@ -764,14 +773,34 @@ fi
# Enable security logging on the master and regionserver only
if [ "$COMMAND" = "master" ] || [ "$COMMAND" = "regionserver" ]; then
- HBASE_OPTS="$HBASE_OPTS -Dhbase.security.logger=${HBASE_SECURITY_LOGGER:-INFO,RFAS}"
+ HBASE_OPTS="$HBASE_OPTS -Dhbase.security.logger.level=${HBASE_SECURITY_LOGGER_LEVEL:-INFO}"
+ HBASE_OPTS="$HBASE_OPTS -Dhbase.security.logger.appender=${HBASE_SECURITY_LOGGER_APPENDER:-RFAS}"
else
- HBASE_OPTS="$HBASE_OPTS -Dhbase.security.logger=${HBASE_SECURITY_LOGGER:-INFO,NullAppender}"
+ HBASE_OPTS="$HBASE_OPTS -Dhbase.security.logger.level=${HBASE_SECURITY_LOGGER_LEVEL:-INFO,NullAppender}"
+ HBASE_OPTS="$HBASE_OPTS -Dhbase.security.logger.appender=${HBASE_SECURITY_LOGGER_APPENDER:-NullAppender}"
fi
HEAP_SETTINGS="$JAVA_HEAP_MAX $JAVA_OFFHEAP_MAX"
# by now if we're running a command it means we need logging
-for f in ${HBASE_HOME}/lib/client-facing-thirdparty/slf4j-log4j*.jar; do
+for f in ${HBASE_HOME}/lib/client-facing-thirdparty/log4j*.jar; do
+ if [ -f "${f}" ]; then
+ CLASSPATH="${CLASSPATH}:${f}"
+ break
+ fi
+done
+for f in ${HBASE_HOME}/lib/client-facing-thirdparty/slf4j*.jar; do
+ if [ -f "${f}" ]; then
+ CLASSPATH="${CLASSPATH}:${f}"
+ break
+ fi
+done
+for f in ${HBASE_HOME}/lib/client-facing-thirdparty/jcl-over-slf4j*.jar; do
+ if [ -f "${f}" ]; then
+ CLASSPATH="${CLASSPATH}:${f}"
+ break
+ fi
+done
+for f in ${HBASE_HOME}/lib/client-facing-thirdparty/jul-to-slf4j*.jar; do
if [ -f "${f}" ]; then
CLASSPATH="${CLASSPATH}:${f}"
break
diff --git a/bin/hbase-daemon.sh b/bin/hbase-daemon.sh
index 11c13eb52300..6fafab0ccec0 100755
--- a/bin/hbase-daemon.sh
+++ b/bin/hbase-daemon.sh
@@ -155,12 +155,20 @@ JAVA=$JAVA_HOME/bin/java
export HBASE_LOG_PREFIX=hbase-$HBASE_IDENT_STRING-$command-$HOSTNAME
export HBASE_LOGFILE=$HBASE_LOG_PREFIX.log
-if [ -z "${HBASE_ROOT_LOGGER}" ]; then
-export HBASE_ROOT_LOGGER=${HBASE_ROOT_LOGGER:-"INFO,RFA"}
+if [ -z "${HBASE_ROOT_LOGGER_LEVEL}" ]; then
+export HBASE_ROOT_LOGGER_LEVEL=${HBASE_ROOT_LOGGER_LEVEL:-"INFO"}
fi
-if [ -z "${HBASE_SECURITY_LOGGER}" ]; then
-export HBASE_SECURITY_LOGGER=${HBASE_SECURITY_LOGGER:-"INFO,RFAS"}
+if [ -z "${HBASE_ROOT_LOGGER_APPENDER}" ]; then
+export HBASE_ROOT_LOGGER_APPENDER=${HBASE_ROOT_LOGGER_APPENDER:-"RFA"}
+fi
+
+if [ -z "${HBASE_SECURITY_LOGGER_LEVEL}" ]; then
+export HBASE_SECURITY_LOGGER_LEVEL=${HBASE_SECURITY_LOGGER_LEVEL:-"INFO"}
+fi
+
+if [ -z "${HBASE_SECURITY_LOGGER_APPENDER}" ]; then
+export HBASE_SECURITY_LOGGER_APPENDER=${HBASE_SECURITY_LOGGER_APPENDER:-"RFAS"}
fi
HBASE_LOGOUT=${HBASE_LOGOUT:-"$HBASE_LOG_DIR/$HBASE_LOG_PREFIX.out"}
diff --git a/bin/hbase.cmd b/bin/hbase.cmd
index fbeb1f8290f6..f6d57d662f48 100644
--- a/bin/hbase.cmd
+++ b/bin/hbase.cmd
@@ -36,7 +36,9 @@
@rem
@rem HBASE_CONF_DIR Alternate conf dir. Default is ${HBASE_HOME}/conf.
@rem
-@rem HBASE_ROOT_LOGGER The root appender. Default is INFO,console
+@rem HBASE_ROOT_LOGGER_LEVEL The root logger level. Default is INFO.
+@rem
+@rem HBASE_ROOT_LOGGER_APPENDER The root logger appender. Default is console.
@rem
@rem JRUBY_HOME JRuby path: $JRUBY_HOME\lib\jruby.jar should exist.
@rem Defaults to the jar packaged with HBase.
@@ -250,10 +252,14 @@ if "%servercommand%" == "true" (
if defined service_entry (
set HBASE_LOG_PREFIX=hbase-%hbase-command%-%COMPUTERNAME%
set HBASE_LOGFILE=!HBASE_LOG_PREFIX!.log
- if not defined HBASE_ROOT_LOGGER (
- set HBASE_ROOT_LOGGER=INFO,DRFA
+ if not defined HBASE_ROOT_LOGGER_LEVEL (
+ set HBASE_ROOT_LOGGER_LEVEL=INFO
+ )
+ if not defined HBASE_ROOT_LOGGER_APPENDER (
+ set HBASE_ROOT_LOGGER_APPENDER=DRFA
)
- set HBASE_SECURITY_LOGGER=INFO,DRFAS
+ set HBASE_SECURITY_LOGGER_LEVEL=INFO
+ set HBASE_SECURITY_LOGGER_APPENDER=DRFAS
set loggc=!HBASE_LOG_DIR!\!HBASE_LOG_PREFIX!.gc
set loglog=!HBASE_LOG_DIR!\!HBASE_LOGFILE!
@@ -317,26 +323,42 @@ set HBASE_OPTS=%HBASE_OPTS% -Dhbase.home.dir="%HBASE_HOME%"
set HBASE_OPTS=%HBASE_OPTS% -Dhbase.id.str="%HBASE_IDENT_STRING%"
set HBASE_OPTS=%HBASE_OPTS% -XX:OnOutOfMemoryError="taskkill /F /PID %p"
-if not defined HBASE_ROOT_LOGGER (
- set HBASE_ROOT_LOGGER=INFO,console
+if not defined HBASE_ROOT_LOGGER_LEVEL (
+ set HBASE_ROOT_LOGGER_LEVEL=INFO
+)
+
+if not defined HBASE_ROOT_LOGGER_APPENDER (
+ set HBASE_ROOT_LOGGER_LEVEL=console
)
-set HBASE_OPTS=%HBASE_OPTS% -Dhbase.root.logger="%HBASE_ROOT_LOGGER%"
+
+set HBASE_OPTS=%HBASE_OPTS% -Dhbase.root.logger.level="%HBASE_ROOT_LOGGER_LEVEL% -Dhbase.root.logger.appender="%HBASE_ROOT_LOGGER_APPENDER% "
if defined JAVA_LIBRARY_PATH (
set HBASE_OPTS=%HBASE_OPTS% -Djava.library.path="%JAVA_LIBRARY_PATH%"
)
rem Enable security logging on the master and regionserver only
-if not defined HBASE_SECURITY_LOGGER (
- set HBASE_SECURITY_LOGGER=INFO,NullAppender
+if not defined HBASE_SECURITY_LOGGER_LEVEL (
+ set HBASE_SECURITY_LOGGER_LEVEL=INFO
+ if "%hbase-command%"=="master" (
+ set HBASE_SECURITY_LOGGER_LEVEL=INFO
+ )
+ if "%hbase-command%"=="regionserver" (
+ set HBASE_SECURITY_LOGGER_LEVEL=INFO
+ )
+)
+
+if not defined HBASE_SECURITY_LOGGER_APPENDER (
+ set HBASE_SECURITY_LOGGER_APPENDER=NullAppender
if "%hbase-command%"=="master" (
- set HBASE_SECURITY_LOGGER=INFO,DRFAS
+ set HBASE_SECURITY_LOGGER_APPENDER=DRFAS
)
if "%hbase-command%"=="regionserver" (
- set HBASE_SECURITY_LOGGER=INFO,DRFAS
+ set HBASE_SECURITY_LOGGER_APPENDER=DRFAS
)
)
-set HBASE_OPTS=%HBASE_OPTS% -Dhbase.security.logger="%HBASE_SECURITY_LOGGER%"
+
+set HBASE_OPTS=%HBASE_OPTS% -Dhbase.security.logger.level="%HBASE_SECURITY_LOGGER_LEVEL% -Dhbase.security.logger.appender="%HBASE_SECURITY_LOGGER_APPENDER%"
set HEAP_SETTINGS=%JAVA_HEAP_MAX% %JAVA_OFFHEAP_MAX%
set java_arguments=%HEAP_SETTINGS% %HBASE_OPTS% -classpath "%CLASSPATH%" %CLASS% %hbase-command-arguments%
diff --git a/conf/log4j-hbtop.properties b/conf/log4j-hbtop.properties
deleted file mode 100644
index 4d68d79db70d..000000000000
--- a/conf/log4j-hbtop.properties
+++ /dev/null
@@ -1,27 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-log4j.rootLogger=WARN,console
-log4j.threshold=WARN
-
-# console
-log4j.appender.console=org.apache.log4j.ConsoleAppender
-log4j.appender.console.target=System.err
-log4j.appender.console.layout=org.apache.log4j.PatternLayout
-log4j.appender.console.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %m%n
-
-# ZooKeeper will still put stuff at WARN
-log4j.logger.org.apache.zookeeper=ERROR
diff --git a/conf/log4j.properties b/conf/log4j.properties
deleted file mode 100644
index 0af1da71752c..000000000000
--- a/conf/log4j.properties
+++ /dev/null
@@ -1,128 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Define some default values that can be overridden by system properties
-hbase.root.logger=INFO,console
-hbase.security.logger=INFO,console
-hbase.log.dir=.
-hbase.log.file=hbase.log
-hbase.log.level=INFO
-
-# Define the root logger to the system property "hbase.root.logger".
-log4j.rootLogger=${hbase.root.logger}
-
-# Logging Threshold
-log4j.threshold=ALL
-
-#
-# Daily Rolling File Appender
-#
-log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender
-log4j.appender.DRFA.File=${hbase.log.dir}/${hbase.log.file}
-
-# Rollver at midnight
-log4j.appender.DRFA.DatePattern=.yyyy-MM-dd
-
-# 30-day backup
-#log4j.appender.DRFA.MaxBackupIndex=30
-log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout
-
-# Pattern format: Date LogLevel LoggerName LogMessage
-log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %.1000m%n
-
-# Rolling File Appender properties
-hbase.log.maxfilesize=256MB
-hbase.log.maxbackupindex=20
-
-# Rolling File Appender
-log4j.appender.RFA=org.apache.log4j.RollingFileAppender
-log4j.appender.RFA.File=${hbase.log.dir}/${hbase.log.file}
-
-log4j.appender.RFA.MaxFileSize=${hbase.log.maxfilesize}
-log4j.appender.RFA.MaxBackupIndex=${hbase.log.maxbackupindex}
-
-log4j.appender.RFA.layout=org.apache.log4j.PatternLayout
-log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %.1000m%n
-
-#
-# Security audit appender
-#
-hbase.security.log.file=SecurityAuth.audit
-hbase.security.log.maxfilesize=256MB
-hbase.security.log.maxbackupindex=20
-log4j.appender.RFAS=org.apache.log4j.RollingFileAppender
-log4j.appender.RFAS.File=${hbase.log.dir}/${hbase.security.log.file}
-log4j.appender.RFAS.MaxFileSize=${hbase.security.log.maxfilesize}
-log4j.appender.RFAS.MaxBackupIndex=${hbase.security.log.maxbackupindex}
-log4j.appender.RFAS.layout=org.apache.log4j.PatternLayout
-log4j.appender.RFAS.layout.ConversionPattern=%d{ISO8601} %p %c: %.1000m%n
-log4j.category.SecurityLogger=${hbase.security.logger}
-log4j.additivity.SecurityLogger=false
-#log4j.logger.SecurityLogger.org.apache.hadoop.hbase.security.access.AccessController=TRACE
-#log4j.logger.SecurityLogger.org.apache.hadoop.hbase.security.visibility.VisibilityController=TRACE
-
-#
-# Null Appender
-#
-log4j.appender.NullAppender=org.apache.log4j.varia.NullAppender
-
-#
-# console
-# Add "console" to rootlogger above if you want to use this
-#
-log4j.appender.console=org.apache.log4j.ConsoleAppender
-log4j.appender.console.target=System.err
-log4j.appender.console.layout=org.apache.log4j.PatternLayout
-log4j.appender.console.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %.1000m%n
-
-log4j.appender.asyncconsole=org.apache.hadoop.hbase.AsyncConsoleAppender
-log4j.appender.asyncconsole.target=System.err
-
-# Custom Logging levels
-
-log4j.logger.org.apache.zookeeper=${hbase.log.level}
-#log4j.logger.org.apache.hadoop.fs.FSNamesystem=DEBUG
-log4j.logger.org.apache.hadoop.hbase=${hbase.log.level}
-log4j.logger.org.apache.hadoop.hbase.META=${hbase.log.level}
-# Make these two classes INFO-level. Make them DEBUG to see more zk debug.
-log4j.logger.org.apache.hadoop.hbase.zookeeper.ZKUtil=${hbase.log.level}
-log4j.logger.org.apache.hadoop.hbase.zookeeper.ZKWatcher=${hbase.log.level}
-#log4j.logger.org.apache.hadoop.dfs=DEBUG
-# Set this class to log INFO only otherwise its OTT
-# Enable this to get detailed connection error/retry logging.
-# log4j.logger.org.apache.hadoop.hbase.client.ConnectionImplementation=TRACE
-
-
-# Uncomment this line to enable tracing on _every_ RPC call (this can be a lot of output)
-#log4j.logger.org.apache.hadoop.ipc.HBaseServer.trace=DEBUG
-
-# Uncomment the below if you want to remove logging of client region caching'
-# and scan of hbase:meta messages
-# log4j.logger.org.apache.hadoop.hbase.client.ConnectionImplementation=INFO
-
-# EventCounter
-# Add "EventCounter" to rootlogger if you want to use this
-# Uncomment the line below to add EventCounter information
-# log4j.appender.EventCounter=org.apache.hadoop.log.metrics.EventCounter
-
-# Prevent metrics subsystem start/stop messages (HBASE-17722)
-log4j.logger.org.apache.hadoop.metrics2.impl.MetricsConfig=WARN
-log4j.logger.org.apache.hadoop.metrics2.impl.MetricsSinkAdapter=WARN
-log4j.logger.org.apache.hadoop.metrics2.impl.MetricsSystemImpl=WARN
-
-# Disable request log by default, you can enable this by changing the appender
-log4j.category.http.requests=INFO,NullAppender
-log4j.additivity.http.requests=false
diff --git a/conf/log4j2-hbtop.xml b/conf/log4j2-hbtop.xml
new file mode 100644
index 000000000000..de0fb5769c50
--- /dev/null
+++ b/conf/log4j2-hbtop.xml
@@ -0,0 +1,35 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/conf/log4j2.xml b/conf/log4j2.xml
new file mode 100644
index 000000000000..19b952eb7644
--- /dev/null
+++ b/conf/log4j2.xml
@@ -0,0 +1,87 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/hbase-archetypes/hbase-client-project/pom.xml b/hbase-archetypes/hbase-client-project/pom.xml
index e8192ed68b34..00f6595db3a7 100644
--- a/hbase-archetypes/hbase-client-project/pom.xml
+++ b/hbase-archetypes/hbase-client-project/pom.xml
@@ -54,13 +54,18 @@
hbase-client
- org.slf4j
- slf4j-log4j12
+ org.apache.logging.log4j
+ log4j-api
runtime
- log4j
- log4j
+ org.apache.logging.log4j
+ log4j-core
+ runtime
+
+
+ org.apache.logging.log4j
+ log4j-slf4j-impl
runtime
diff --git a/hbase-archetypes/hbase-shaded-client-project/pom.xml b/hbase-archetypes/hbase-shaded-client-project/pom.xml
index cd8dfaa05948..b33aa9314b46 100644
--- a/hbase-archetypes/hbase-shaded-client-project/pom.xml
+++ b/hbase-archetypes/hbase-shaded-client-project/pom.xml
@@ -60,13 +60,18 @@
hbase-shaded-client
- org.slf4j
- slf4j-log4j12
+ org.apache.logging.log4j
+ log4j-api
runtime
- log4j
- log4j
+ org.apache.logging.log4j
+ log4j-core
+ runtime
+
+
+ org.apache.logging.log4j
+ log4j-slf4j-impl
runtime
diff --git a/hbase-assembly/pom.xml b/hbase-assembly/pom.xml
index 805351767e37..094778644774 100644
--- a/hbase-assembly/pom.xml
+++ b/hbase-assembly/pom.xml
@@ -332,12 +332,16 @@
jul-to-slf4j
- org.slf4j
- slf4j-log4j12
+ org.apache.logging.log4j
+ log4j-api
+
+
+ org.apache.logging.log4j
+ log4j-core
- log4j
- log4j
+ org.apache.logging.log4j
+ log4j-slf4j-impl
diff --git a/hbase-assembly/src/main/assembly/client.xml b/hbase-assembly/src/main/assembly/client.xml
index bd65cb43f8e9..62828fa5afc9 100644
--- a/hbase-assembly/src/main/assembly/client.xml
+++ b/hbase-assembly/src/main/assembly/client.xml
@@ -61,10 +61,8 @@
org.apache.htrace:htrace-core4
org.apache.htrace:htrace-core
org.apache.yetus:audience-annotations
- org.slf4j:slf4j-api
- org.slf4j:jcl-over-slf4j
- org.slf4j:jul-to-slf4j
- org.slf4j:slf4j-log4j12
+ org.slf4j:*
+ org.apache.logging.log4j:*
@@ -149,10 +147,8 @@
org.apache.htrace:htrace-core4
org.apache.htrace:htrace-core
org.apache.yetus:audience-annotations
- org.slf4j:slf4j-api
- org.slf4j:jcl-over-slf4j
- org.slf4j:jul-to-slf4j
- org.slf4j:slf4j-log4j12
+ org.slf4j:*
+ org.apache.logging.log4j:*
diff --git a/hbase-assembly/src/main/assembly/hadoop-three-compat.xml b/hbase-assembly/src/main/assembly/hadoop-three-compat.xml
index 1c172e9c31ff..ab2a4c5ede8a 100644
--- a/hbase-assembly/src/main/assembly/hadoop-three-compat.xml
+++ b/hbase-assembly/src/main/assembly/hadoop-three-compat.xml
@@ -50,11 +50,9 @@
org.apache.hbase:hbase-metrics
org.apache.hbase:hbase-metrics-api
org.apache.hbase:hbase-procedure
- org.apache.hbase:hbase-protocol
org.apache.hbase:hbase-protocol-shaded
org.apache.hbase:hbase-replication
org.apache.hbase:hbase-rest
- org.apache.hbase:hbase-rsgroup
org.apache.hbase:hbase-server
org.apache.hbase:hbase-shell
org.apache.hbase:hbase-testing-util
@@ -111,8 +109,8 @@
org.apache.htrace:htrace-core4
org.apache.htrace:htrace-core
org.apache.yetus:audience-annotations
- org.slf4j:slf4j-api
- org.slf4j:slf4j-log4j12
+ org.slf4j:*
+ org.apache.logging.log4j:*
@@ -209,10 +207,8 @@
org.apache.htrace:htrace-core4
org.apache.htrace:htrace-core
org.apache.yetus:audience-annotations
- org.slf4j:slf4j-api
- org.slf4j:jcl-over-slf4j
- org.slf4j:jul-to-slf4j
- org.slf4j:slf4j-log4j12
+ org.slf4j:*
+ org.apache.logging.log4j:*
diff --git a/hbase-asyncfs/pom.xml b/hbase-asyncfs/pom.xml
index 3d45e4c7d13e..910d24c6e0e6 100644
--- a/hbase-asyncfs/pom.xml
+++ b/hbase-asyncfs/pom.xml
@@ -149,13 +149,18 @@
test
- org.slf4j
- slf4j-log4j12
+ org.apache.logging.log4j
+ log4j-api
+ test
+
+
+ org.apache.logging.log4j
+ log4j-core
test
- log4j
- log4j
+ org.apache.logging.log4j
+ log4j-slf4j-impl
test
diff --git a/hbase-asyncfs/src/test/java/org/apache/hadoop/hbase/io/asyncfs/AsyncFSTestBase.java b/hbase-asyncfs/src/test/java/org/apache/hadoop/hbase/io/asyncfs/AsyncFSTestBase.java
index 9b276aca0785..12ba93fb50b9 100644
--- a/hbase-asyncfs/src/test/java/org/apache/hadoop/hbase/io/asyncfs/AsyncFSTestBase.java
+++ b/hbase-asyncfs/src/test/java/org/apache/hadoop/hbase/io/asyncfs/AsyncFSTestBase.java
@@ -98,11 +98,6 @@ protected static void startMiniDFSCluster(int servers) throws IOException {
createDirsAndSetProperties();
Configuration conf = UTIL.getConfiguration();
- // Error level to skip some warnings specific to the minicluster. See HBASE-4709
- org.apache.log4j.Logger.getLogger(org.apache.hadoop.metrics2.util.MBeans.class)
- .setLevel(org.apache.log4j.Level.ERROR);
- org.apache.log4j.Logger.getLogger(org.apache.hadoop.metrics2.impl.MetricsSystemImpl.class)
- .setLevel(org.apache.log4j.Level.ERROR);
TraceUtil.initTracer(conf);
CLUSTER = new MiniDFSCluster.Builder(conf).numDataNodes(servers).build();
diff --git a/hbase-backup/pom.xml b/hbase-backup/pom.xml
index 972ea8d02c99..3a35f6b9cab2 100644
--- a/hbase-backup/pom.xml
+++ b/hbase-backup/pom.xml
@@ -148,13 +148,18 @@
test
- org.slf4j
- slf4j-log4j12
+ org.apache.logging.log4j
+ log4j-api
+ test
+
+
+ org.apache.logging.log4j
+ log4j-core
test
- log4j
- log4j
+ org.apache.logging.log4j
+ log4j-slf4j-impl
test
diff --git a/hbase-balancer/pom.xml b/hbase-balancer/pom.xml
index 2fcbc2bc0c8c..c321af556b16 100644
--- a/hbase-balancer/pom.xml
+++ b/hbase-balancer/pom.xml
@@ -108,13 +108,18 @@
test
- org.slf4j
- slf4j-log4j12
+ org.apache.logging.log4j
+ log4j-api
+ test
+
+
+ org.apache.logging.log4j
+ log4j-core
test
- log4j
- log4j
+ org.apache.logging.log4j
+ log4j-slf4j-impl
test
diff --git a/hbase-client/pom.xml b/hbase-client/pom.xml
index 16ffadd60fb4..07b185fda0e4 100644
--- a/hbase-client/pom.xml
+++ b/hbase-client/pom.xml
@@ -159,13 +159,18 @@
test
- org.slf4j
- slf4j-log4j12
+ org.apache.logging.log4j
+ log4j-api
+ test
+
+
+ org.apache.logging.log4j
+ log4j-core
test
- log4j
- log4j
+ org.apache.logging.log4j
+ log4j-slf4j-impl
test
@@ -192,6 +197,21 @@
+
+ org.apache.logging.log4j
+ log4j-api
+ test
+
+
+ org.apache.logging.log4j
+ log4j-core
+ test
+
+
+ org.apache.logging.log4j
+ log4j-slf4j-impl
+ test
+
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestFailedServersLog.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestFailedServersLog.java
index eb1877f189dc..62b7e5faf13b 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestFailedServersLog.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestFailedServersLog.java
@@ -17,73 +17,83 @@
*/
package org.apache.hadoop.hbase.ipc;
-import static org.hamcrest.CoreMatchers.is;
import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertThat;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.Mockito.doAnswer;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
import java.net.InetSocketAddress;
+import java.util.concurrent.atomic.AtomicReference;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.testclassification.ClientTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.apache.log4j.Appender;
-import org.apache.log4j.Level;
-import org.apache.log4j.LogManager;
-import org.apache.log4j.spi.LoggingEvent;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.core.Appender;
+import org.apache.logging.log4j.core.LogEvent;
+import org.apache.logging.log4j.core.Logger;
import org.junit.After;
import org.junit.Before;
import org.junit.ClassRule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
-import org.junit.runner.RunWith;
-import org.mockito.ArgumentCaptor;
-import org.mockito.Captor;
-import org.mockito.Mock;
-import org.mockito.Mockito;
-import org.mockito.runners.MockitoJUnitRunner;
+import org.mockito.invocation.InvocationOnMock;
+import org.mockito.stubbing.Answer;
-@RunWith(MockitoJUnitRunner.class)
@Category({ ClientTests.class, SmallTests.class })
public class TestFailedServersLog {
@ClassRule
public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestFailedServersLog.class);
+ HBaseClassTestRule.forClass(TestFailedServersLog.class);
static final int TEST_PORT = 9999;
+
private InetSocketAddress addr;
- @Mock
private Appender mockAppender;
- @Captor
- private ArgumentCaptor captorLoggingEvent;
-
@Before
public void setup() {
- LogManager.getRootLogger().addAppender(mockAppender);
+ mockAppender = mock(Appender.class);
+ when(mockAppender.getName()).thenReturn("mockAppender");
+ when(mockAppender.isStarted()).thenReturn(true);
+ ((Logger) LogManager.getLogger("org.apache.hadoop.hbase")).addAppender(mockAppender);
+
}
@After
public void teardown() {
- LogManager.getRootLogger().removeAppender(mockAppender);
+ ((Logger) LogManager.getLogger("org.apache.hadoop.hbase")).removeAppender(mockAppender);
}
@Test
public void testAddToFailedServersLogging() {
- Throwable nullException = new NullPointerException();
+ AtomicReference level = new AtomicReference<>();
+ AtomicReference msg = new AtomicReference();
+ doAnswer(new Answer() {
+ @Override
+ public Void answer(InvocationOnMock invocation) throws Throwable {
+ LogEvent logEvent = invocation.getArgument(0, LogEvent.class);
+ level.set(logEvent.getLevel());
+ msg.set(logEvent.getMessage().getFormattedMessage());
+ return null;
+ }
+ }).when(mockAppender).append(any(LogEvent.class));
+
+ Throwable nullException = new NullPointerException();
FailedServers fs = new FailedServers(new Configuration());
addr = new InetSocketAddress(TEST_PORT);
-
fs.addToFailedServers(addr, nullException);
- Mockito.verify(mockAppender).doAppend((LoggingEvent) captorLoggingEvent.capture());
- LoggingEvent loggingEvent = (LoggingEvent) captorLoggingEvent.getValue();
- assertThat(loggingEvent.getLevel(), is(Level.DEBUG));
- assertEquals("Added failed server with address " + addr.toString() + " to list caused by "
- + nullException.toString(),
- loggingEvent.getRenderedMessage());
+ verify(mockAppender, times(1)).append(any(LogEvent.class));
+ assertEquals(Level.DEBUG, level.get());
+ assertEquals("Added failed server with address " + addr.toString() + " to list caused by " +
+ nullException.toString(), msg.get());
}
-
}
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.java
index 9fc510c365a1..aa6b613ab940 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.java
@@ -30,7 +30,6 @@
import java.io.IOException;
import java.net.InetAddress;
import java.util.Map;
-
import javax.security.auth.callback.Callback;
import javax.security.auth.callback.NameCallback;
import javax.security.auth.callback.PasswordCallback;
@@ -39,7 +38,6 @@
import javax.security.sasl.RealmCallback;
import javax.security.sasl.Sasl;
import javax.security.sasl.SaslClient;
-
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseConfiguration;
@@ -54,15 +52,14 @@
import org.apache.hadoop.io.DataOutputBuffer;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenIdentifier;
-import org.apache.log4j.Level;
-import org.apache.log4j.Logger;
-import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.ExpectedException;
import org.mockito.Mockito;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hbase.thirdparty.com.google.common.base.Strings;
@@ -81,17 +78,12 @@ public class TestHBaseSaslRpcClient {
static final String DEFAULT_USER_NAME = "principal";
static final String DEFAULT_USER_PASSWORD = "password";
- private static final Logger LOG = Logger.getLogger(TestHBaseSaslRpcClient.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestHBaseSaslRpcClient.class);
@Rule
public ExpectedException exception = ExpectedException.none();
- @BeforeClass
- public static void before() {
- Logger.getRootLogger().setLevel(Level.DEBUG);
- }
-
@Test
public void testSaslClientUsesGivenRpcProtection() throws Exception {
Token extends TokenIdentifier> token = createTokenMockWithCredentials(DEFAULT_USER_NAME,
diff --git a/hbase-common/pom.xml b/hbase-common/pom.xml
index 18398430decf..b014d06950d1 100644
--- a/hbase-common/pom.xml
+++ b/hbase-common/pom.xml
@@ -231,13 +231,18 @@
test
- org.slf4j
- slf4j-log4j12
+ org.apache.logging.log4j
+ log4j-api
+ test
+
+
+ org.apache.logging.log4j
+ log4j-core
test
- log4j
- log4j
+ org.apache.logging.log4j
+ log4j-slf4j-impl
test
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/logging/TestLog4jUtils.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/logging/TestLog4jUtils.java
index 89931de7128f..d7af08190756 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/logging/TestLog4jUtils.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/logging/TestLog4jUtils.java
@@ -24,9 +24,10 @@
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.apache.log4j.Level;
-import org.apache.log4j.LogManager;
-import org.apache.log4j.Logger;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.apache.logging.log4j.core.config.Configurator;
import org.junit.ClassRule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
@@ -45,11 +46,11 @@ public class TestLog4jUtils {
@Test
public void test() {
Logger zk = LogManager.getLogger("org.apache.zookeeper");
- Level zkLevel = zk.getEffectiveLevel();
+ Level zkLevel = zk.getLevel();
Logger hbaseZk = LogManager.getLogger("org.apache.hadoop.hbase.zookeeper");
- Level hbaseZkLevel = hbaseZk.getEffectiveLevel();
+ Level hbaseZkLevel = hbaseZk.getLevel();
Logger client = LogManager.getLogger("org.apache.hadoop.hbase.client");
- Level clientLevel = client.getEffectiveLevel();
+ Level clientLevel = client.getLevel();
Log4jUtils.disableZkAndClientLoggers();
assertEquals(Level.OFF, zk.getLevel());
assertEquals(Level.OFF.toString(), Log4jUtils.getEffectiveLevel(zk.getName()));
@@ -58,9 +59,9 @@ public void test() {
assertEquals(Level.OFF, client.getLevel());
assertEquals(Level.OFF.toString(), Log4jUtils.getEffectiveLevel(client.getName()));
// restore the level
- zk.setLevel(zkLevel);
- hbaseZk.setLevel(hbaseZkLevel);
- client.setLevel(clientLevel);
+ Configurator.setLevel(zk.getName(), zkLevel);
+ Configurator.setLevel(hbaseZk.getName(), hbaseZkLevel);
+ Configurator.setLevel(client.getName(), clientLevel);
}
@Test
diff --git a/hbase-endpoint/pom.xml b/hbase-endpoint/pom.xml
index 09558d47508c..bb173c4cec41 100644
--- a/hbase-endpoint/pom.xml
+++ b/hbase-endpoint/pom.xml
@@ -194,13 +194,18 @@
test
- org.slf4j
- slf4j-log4j12
+ org.apache.logging.log4j
+ log4j-api
+ test
+
+
+ org.apache.logging.log4j
+ log4j-core
test
- log4j
- log4j
+ org.apache.logging.log4j
+ log4j-slf4j-impl
test
diff --git a/hbase-examples/pom.xml b/hbase-examples/pom.xml
index e26cb276f6f9..e84d5ed591b7 100644
--- a/hbase-examples/pom.xml
+++ b/hbase-examples/pom.xml
@@ -271,13 +271,18 @@
test
- org.slf4j
- slf4j-log4j12
+ org.apache.logging.log4j
+ log4j-api
+ test
+
+
+ org.apache.logging.log4j
+ log4j-core
test
- log4j
- log4j
+ org.apache.logging.log4j
+ log4j-slf4j-impl
test
diff --git a/hbase-hadoop-compat/pom.xml b/hbase-hadoop-compat/pom.xml
index a687de8214ca..c5673a57fc45 100644
--- a/hbase-hadoop-compat/pom.xml
+++ b/hbase-hadoop-compat/pom.xml
@@ -1,5 +1,7 @@
-
+
- 4.0.0
-
- hbase-build-configuration
- org.apache.hbase
- 3.0.0-SNAPSHOT
- ../hbase-build-configuration
-
+ 4.0.0
+
+ hbase-build-configuration
+ org.apache.hbase
+ 3.0.0-SNAPSHOT
+ ../hbase-build-configuration
+
- hbase-hadoop-compat
- Apache HBase - Hadoop Compatibility
-
+ hbase-hadoop-compat
+ Apache HBase - Hadoop Compatibility
+
Interfaces to be implemented in order to smooth
over hadoop version differences
-
-
+
+
maven-assembly-plugin
@@ -44,156 +46,161 @@
-
- org.apache.maven.plugins
- maven-source-plugin
-
-
- org.apache.maven.plugins
- maven-checkstyle-plugin
-
- true
-
-
-
- net.revelc.code
- warbucks-maven-plugin
-
-
-
+
+ org.apache.maven.plugins
+ maven-source-plugin
+
+
+ org.apache.maven.plugins
+ maven-checkstyle-plugin
+
+ true
+
+
+
+ net.revelc.code
+ warbucks-maven-plugin
+
+
+
-
-
- org.apache.hbase
- hbase-annotations
- test-jar
- test
-
-
- org.apache.hbase
- hbase-logging
- test-jar
- test
-
-
- org.apache.hbase
- hbase-common
-
-
- org.apache.hbase
- hbase-common
- test-jar
- test
-
-
- org.apache.hbase
- hbase-metrics
-
-
- org.apache.hbase
- hbase-metrics-api
-
-
- org.apache.hbase.thirdparty
- hbase-shaded-miscellaneous
-
-
- org.apache.hadoop
- hadoop-mapreduce-client-core
-
-
- com.google.guava
- guava
-
-
-
-
- org.apache.hadoop
- hadoop-common
-
-
- org.slf4j
- slf4j-api
-
-
+
+
+ org.apache.hbase
+ hbase-annotations
+ test-jar
+ test
+
+
+ org.apache.hbase
+ hbase-logging
+ test-jar
+ test
+
+
+ org.apache.hbase
+ hbase-common
+
+
+ org.apache.hbase
+ hbase-common
+ test-jar
+ test
+
+
+ org.apache.hbase
+ hbase-metrics
+
+
+ org.apache.hbase
+ hbase-metrics-api
+
+
+ org.apache.hbase.thirdparty
+ hbase-shaded-miscellaneous
+
+
+ org.apache.hadoop
+ hadoop-mapreduce-client-core
+
+
+ com.google.guava
+ guava
+
+
+
+
+ org.apache.hadoop
+ hadoop-common
+
+
+ org.slf4j
+ slf4j-api
+
+
- javax.activation
- javax.activation-api
- runtime
-
-
- org.apache.commons
- commons-lang3
-
-
- junit
- junit
- test
-
-
- org.slf4j
- jcl-over-slf4j
- test
-
-
- org.slf4j
- jul-to-slf4j
- test
-
-
- org.slf4j
- slf4j-log4j12
- test
-
-
- log4j
- log4j
- test
-
-
+ javax.activation
+ javax.activation-api
+ runtime
+
+
+ org.apache.commons
+ commons-lang3
+
+
+ junit
+ junit
+ test
+
+
+ org.slf4j
+ jcl-over-slf4j
+ test
+
+
+ org.slf4j
+ jul-to-slf4j
+ test
+
+
+ org.apache.logging.log4j
+ log4j-api
+ test
+
+
+ org.apache.logging.log4j
+ log4j-core
+ test
+
+
+ org.apache.logging.log4j
+ log4j-slf4j-impl
+ test
+
+
-
+
-
- skipHadoopCompatTests
-
-
- skipHadoopCompatTests
-
-
-
- true
- true
-
-
-
- eclipse-specific
-
-
- m2e.version
-
-
-
-
-
+
+ skipHadoopCompatTests
+
+
+ skipHadoopCompatTests
+
+
+
+ true
+ true
+
+
+
+ eclipse-specific
+
+
+ m2e.version
+
+
+
+
+
-
- org.eclipse.m2e
- lifecycle-mapping
-
-
-
-
-
-
-
-
-
-
-
-
+
+ org.eclipse.m2e
+ lifecycle-mapping
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/metrics2/source/JvmMetrics.java b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/metrics2/source/JvmMetrics.java
new file mode 100644
index 000000000000..26d72b6a1e37
--- /dev/null
+++ b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/metrics2/source/JvmMetrics.java
@@ -0,0 +1,256 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2.source;
+
+import static org.apache.hadoop.metrics2.impl.MsInfo.ProcessName;
+import static org.apache.hadoop.metrics2.impl.MsInfo.SessionId;
+import static org.apache.hadoop.metrics2.source.JvmMetricsInfo.GcCount;
+import static org.apache.hadoop.metrics2.source.JvmMetricsInfo.GcNumInfoThresholdExceeded;
+import static org.apache.hadoop.metrics2.source.JvmMetricsInfo.GcNumWarnThresholdExceeded;
+import static org.apache.hadoop.metrics2.source.JvmMetricsInfo.GcTimeMillis;
+import static org.apache.hadoop.metrics2.source.JvmMetricsInfo.GcTimePercentage;
+import static org.apache.hadoop.metrics2.source.JvmMetricsInfo.GcTotalExtraSleepTime;
+import static org.apache.hadoop.metrics2.source.JvmMetricsInfo.JvmMetrics;
+import static org.apache.hadoop.metrics2.source.JvmMetricsInfo.LogError;
+import static org.apache.hadoop.metrics2.source.JvmMetricsInfo.LogFatal;
+import static org.apache.hadoop.metrics2.source.JvmMetricsInfo.LogInfo;
+import static org.apache.hadoop.metrics2.source.JvmMetricsInfo.LogWarn;
+import static org.apache.hadoop.metrics2.source.JvmMetricsInfo.MemHeapCommittedM;
+import static org.apache.hadoop.metrics2.source.JvmMetricsInfo.MemHeapMaxM;
+import static org.apache.hadoop.metrics2.source.JvmMetricsInfo.MemHeapUsedM;
+import static org.apache.hadoop.metrics2.source.JvmMetricsInfo.MemMaxM;
+import static org.apache.hadoop.metrics2.source.JvmMetricsInfo.MemNonHeapCommittedM;
+import static org.apache.hadoop.metrics2.source.JvmMetricsInfo.MemNonHeapMaxM;
+import static org.apache.hadoop.metrics2.source.JvmMetricsInfo.MemNonHeapUsedM;
+import static org.apache.hadoop.metrics2.source.JvmMetricsInfo.ThreadsBlocked;
+import static org.apache.hadoop.metrics2.source.JvmMetricsInfo.ThreadsNew;
+import static org.apache.hadoop.metrics2.source.JvmMetricsInfo.ThreadsRunnable;
+import static org.apache.hadoop.metrics2.source.JvmMetricsInfo.ThreadsTerminated;
+import static org.apache.hadoop.metrics2.source.JvmMetricsInfo.ThreadsTimedWaiting;
+import static org.apache.hadoop.metrics2.source.JvmMetricsInfo.ThreadsWaiting;
+
+import java.lang.management.GarbageCollectorMXBean;
+import java.lang.management.ManagementFactory;
+import java.lang.management.MemoryMXBean;
+import java.lang.management.MemoryUsage;
+import java.lang.management.ThreadInfo;
+import java.lang.management.ThreadMXBean;
+import java.util.List;
+import java.util.concurrent.ConcurrentHashMap;
+import org.apache.hadoop.hbase.logging.EventCounter;
+import org.apache.hadoop.metrics2.MetricsCollector;
+import org.apache.hadoop.metrics2.MetricsInfo;
+import org.apache.hadoop.metrics2.MetricsRecordBuilder;
+import org.apache.hadoop.metrics2.MetricsSource;
+import org.apache.hadoop.metrics2.MetricsSystem;
+import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
+import org.apache.hadoop.metrics2.lib.Interns;
+import org.apache.hadoop.util.GcTimeMonitor;
+import org.apache.hadoop.util.JvmPauseMonitor;
+import org.apache.yetus.audience.InterfaceAudience;
+
+import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
+
+/**
+ * JVM and logging related metrics. Mostly used by various servers as a part of the metrics they
+ * export.
+ */
+@InterfaceAudience.Private
+public class JvmMetrics implements MetricsSource {
+
+ private enum Singleton {
+ INSTANCE;
+
+ JvmMetrics impl;
+
+ synchronized JvmMetrics init(String processName, String sessionId) {
+ if (impl == null) {
+ impl = create(processName, sessionId, DefaultMetricsSystem.instance());
+ }
+ return impl;
+ }
+
+ synchronized void shutdown() {
+ DefaultMetricsSystem.instance().unregisterSource(JvmMetrics.name());
+ impl = null;
+ }
+ }
+
+ private static final float M = 1024 * 1024;
+ private static final float MEMORY_MAX_UNLIMITED_MB = -1;
+
+ private final MemoryMXBean memoryMXBean = ManagementFactory.getMemoryMXBean();
+ private final List gcBeans =
+ ManagementFactory.getGarbageCollectorMXBeans();
+ private final ThreadMXBean threadMXBean = ManagementFactory.getThreadMXBean();
+ private final String processName, sessionId;
+ private JvmPauseMonitor pauseMonitor = null;
+ private final ConcurrentHashMap gcInfoCache =
+ new ConcurrentHashMap();
+ private GcTimeMonitor gcTimeMonitor = null;
+
+ private JvmMetrics(String processName, String sessionId) {
+ this.processName = processName;
+ this.sessionId = sessionId;
+ }
+
+ public void setPauseMonitor(final JvmPauseMonitor pauseMonitor) {
+ this.pauseMonitor = pauseMonitor;
+ }
+
+ public void setGcTimeMonitor(GcTimeMonitor gcTimeMonitor) {
+ Preconditions.checkNotNull(gcTimeMonitor);
+ this.gcTimeMonitor = gcTimeMonitor;
+ }
+
+ public static JvmMetrics create(String processName, String sessionId, MetricsSystem ms) {
+ return ms.register(JvmMetrics.name(), JvmMetrics.description(),
+ new JvmMetrics(processName, sessionId));
+ }
+
+ public static void reattach(MetricsSystem ms, JvmMetrics jvmMetrics) {
+ ms.register(JvmMetrics.name(), JvmMetrics.description(), jvmMetrics);
+ }
+
+ public static JvmMetrics initSingleton(String processName, String sessionId) {
+ return Singleton.INSTANCE.init(processName, sessionId);
+ }
+
+ /**
+ * Shutdown the JvmMetrics singleton. This is not necessary if the JVM itself is shutdown, but may
+ * be necessary for scenarios where JvmMetrics instance needs to be re-created while the JVM is
+ * still around. One such scenario is unit-testing.
+ */
+ public static void shutdownSingleton() {
+ Singleton.INSTANCE.shutdown();
+ }
+
+ @Override
+ public void getMetrics(MetricsCollector collector, boolean all) {
+ MetricsRecordBuilder rb = collector.addRecord(JvmMetrics).setContext("jvm")
+ .tag(ProcessName, processName).tag(SessionId, sessionId);
+ getMemoryUsage(rb);
+ getGcUsage(rb);
+ getThreadUsage(rb);
+ getEventCounters(rb);
+ }
+
+ private void getMemoryUsage(MetricsRecordBuilder rb) {
+ MemoryUsage memNonHeap = memoryMXBean.getNonHeapMemoryUsage();
+ MemoryUsage memHeap = memoryMXBean.getHeapMemoryUsage();
+ Runtime runtime = Runtime.getRuntime();
+ rb.addGauge(MemNonHeapUsedM, memNonHeap.getUsed() / M)
+ .addGauge(MemNonHeapCommittedM, memNonHeap.getCommitted() / M)
+ .addGauge(MemNonHeapMaxM, calculateMaxMemoryUsage(memNonHeap))
+ .addGauge(MemHeapUsedM, memHeap.getUsed() / M)
+ .addGauge(MemHeapCommittedM, memHeap.getCommitted() / M)
+ .addGauge(MemHeapMaxM, calculateMaxMemoryUsage(memHeap))
+ .addGauge(MemMaxM, runtime.maxMemory() / M);
+ }
+
+ private float calculateMaxMemoryUsage(MemoryUsage memHeap) {
+ long max = memHeap.getMax();
+
+ if (max == -1) {
+ return MEMORY_MAX_UNLIMITED_MB;
+ }
+
+ return max / M;
+ }
+
+ private void getGcUsage(MetricsRecordBuilder rb) {
+ long count = 0;
+ long timeMillis = 0;
+ for (GarbageCollectorMXBean gcBean : gcBeans) {
+ long c = gcBean.getCollectionCount();
+ long t = gcBean.getCollectionTime();
+ MetricsInfo[] gcInfo = getGcInfo(gcBean.getName());
+ rb.addCounter(gcInfo[0], c).addCounter(gcInfo[1], t);
+ count += c;
+ timeMillis += t;
+ }
+ rb.addCounter(GcCount, count).addCounter(GcTimeMillis, timeMillis);
+
+ if (pauseMonitor != null) {
+ rb.addCounter(GcNumWarnThresholdExceeded, pauseMonitor.getNumGcWarnThresholdExceeded());
+ rb.addCounter(GcNumInfoThresholdExceeded, pauseMonitor.getNumGcInfoThresholdExceeded());
+ rb.addCounter(GcTotalExtraSleepTime, pauseMonitor.getTotalGcExtraSleepTime());
+ }
+
+ if (gcTimeMonitor != null) {
+ rb.addGauge(GcTimePercentage, gcTimeMonitor.getLatestGcData().getGcTimePercentage());
+ }
+ }
+
+ private MetricsInfo[] getGcInfo(String gcName) {
+ MetricsInfo[] gcInfo = gcInfoCache.get(gcName);
+ if (gcInfo == null) {
+ gcInfo = new MetricsInfo[2];
+ gcInfo[0] = Interns.info("GcCount" + gcName, "GC Count for " + gcName);
+ gcInfo[1] = Interns.info("GcTimeMillis" + gcName, "GC Time for " + gcName);
+ MetricsInfo[] previousGcInfo = gcInfoCache.putIfAbsent(gcName, gcInfo);
+ if (previousGcInfo != null) {
+ return previousGcInfo;
+ }
+ }
+ return gcInfo;
+ }
+
+ private void getThreadUsage(MetricsRecordBuilder rb) {
+ int threadsNew = 0;
+ int threadsRunnable = 0;
+ int threadsBlocked = 0;
+ int threadsWaiting = 0;
+ int threadsTimedWaiting = 0;
+ int threadsTerminated = 0;
+ long threadIds[] = threadMXBean.getAllThreadIds();
+ for (ThreadInfo threadInfo : threadMXBean.getThreadInfo(threadIds, 0)) {
+ if (threadInfo == null) continue; // race protection
+ switch (threadInfo.getThreadState()) {
+ case NEW:
+ threadsNew++;
+ break;
+ case RUNNABLE:
+ threadsRunnable++;
+ break;
+ case BLOCKED:
+ threadsBlocked++;
+ break;
+ case WAITING:
+ threadsWaiting++;
+ break;
+ case TIMED_WAITING:
+ threadsTimedWaiting++;
+ break;
+ case TERMINATED:
+ threadsTerminated++;
+ break;
+ }
+ }
+ rb.addGauge(ThreadsNew, threadsNew).addGauge(ThreadsRunnable, threadsRunnable)
+ .addGauge(ThreadsBlocked, threadsBlocked).addGauge(ThreadsWaiting, threadsWaiting)
+ .addGauge(ThreadsTimedWaiting, threadsTimedWaiting)
+ .addGauge(ThreadsTerminated, threadsTerminated);
+ }
+
+ private void getEventCounters(MetricsRecordBuilder rb) {
+ rb.addCounter(LogFatal, EventCounter.getFatal()).addCounter(LogError, EventCounter.getError())
+ .addCounter(LogWarn, EventCounter.getWarn()).addCounter(LogInfo, EventCounter.getInfo());
+ }
+}
diff --git a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/metrics2/source/JvmMetricsInfo.java b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/metrics2/source/JvmMetricsInfo.java
new file mode 100644
index 000000000000..7252245099e3
--- /dev/null
+++ b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/metrics2/source/JvmMetricsInfo.java
@@ -0,0 +1,72 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2.source;
+
+import java.util.StringJoiner;
+import org.apache.hadoop.metrics2.MetricsInfo;
+import org.apache.yetus.audience.InterfaceAudience;
+
+/**
+ * JVM and logging related metrics info instances
+ */
+@InterfaceAudience.Private
+public enum JvmMetricsInfo implements MetricsInfo {
+ JvmMetrics("JVM related metrics etc."), // record info
+ // metrics
+ MemNonHeapUsedM("Non-heap memory used in MB"),
+ MemNonHeapCommittedM("Non-heap memory committed in MB"),
+ MemNonHeapMaxM("Non-heap memory max in MB"),
+ MemHeapUsedM("Heap memory used in MB"),
+ MemHeapCommittedM("Heap memory committed in MB"),
+ MemHeapMaxM("Heap memory max in MB"),
+ MemMaxM("Max memory size in MB"),
+ GcCount("Total GC count"),
+ GcTimeMillis("Total GC time in milliseconds"),
+ ThreadsNew("Number of new threads"),
+ ThreadsRunnable("Number of runnable threads"),
+ ThreadsBlocked("Number of blocked threads"),
+ ThreadsWaiting("Number of waiting threads"),
+ ThreadsTimedWaiting("Number of timed waiting threads"),
+ ThreadsTerminated("Number of terminated threads"),
+ LogFatal("Total number of fatal log events"),
+ LogError("Total number of error log events"),
+ LogWarn("Total number of warning log events"),
+ LogInfo("Total number of info log events"),
+ GcNumWarnThresholdExceeded("Number of times that the GC warn threshold is exceeded"),
+ GcNumInfoThresholdExceeded("Number of times that the GC info threshold is exceeded"),
+ GcTotalExtraSleepTime("Total GC extra sleep time in milliseconds"),
+ GcTimePercentage("Percentage of time the JVM was paused in GC");
+
+ private final String desc;
+
+ JvmMetricsInfo(String desc) {
+ this.desc = desc;
+ }
+
+ @Override
+ public String description() {
+ return desc;
+ }
+
+ @Override
+ public String toString() {
+ return new StringJoiner(", ", this.getClass().getSimpleName() + "{", "}").add("name=" + name())
+ .add("description=" + desc).toString();
+ }
+}
diff --git a/hbase-hbtop/pom.xml b/hbase-hbtop/pom.xml
index 2a1fd38a7b43..07542e024d70 100644
--- a/hbase-hbtop/pom.xml
+++ b/hbase-hbtop/pom.xml
@@ -92,13 +92,18 @@
test
- org.slf4j
- slf4j-log4j12
+ org.apache.logging.log4j
+ log4j-api
+ test
+
+
+ org.apache.logging.log4j
+ log4j-core
test
- log4j
- log4j
+ org.apache.logging.log4j
+ log4j-slf4j-impl
test
diff --git a/hbase-http/pom.xml b/hbase-http/pom.xml
index 7d1150a27d54..6081fb5c0697 100644
--- a/hbase-http/pom.xml
+++ b/hbase-http/pom.xml
@@ -269,13 +269,18 @@
test
- org.slf4j
- slf4j-log4j12
+ org.apache.logging.log4j
+ log4j-api
+ test
+
+
+ org.apache.logging.log4j
+ log4j-core
test
- log4j
- log4j
+ org.apache.logging.log4j
+ log4j-slf4j-impl
test
diff --git a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/log/LogLevel.java b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/log/LogLevel.java
index dede1f9d8fdf..93df21750473 100644
--- a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/log/LogLevel.java
+++ b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/log/LogLevel.java
@@ -43,7 +43,6 @@
import org.apache.hadoop.util.ServletUtil;
import org.apache.hadoop.util.Tool;
import org.apache.yetus.audience.InterfaceAudience;
-import org.apache.yetus.audience.InterfaceStability;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -305,8 +304,7 @@ private void process(String urlString) throws Exception {
/**
* A servlet implementation
*/
- @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
- @InterfaceStability.Unstable
+ @InterfaceAudience.Private
public static class Servlet extends HttpServlet {
private static final long serialVersionUID = 1L;
diff --git a/hbase-http/src/main/java/org/apache/hadoop/hbase/util/LogMonitoring.java b/hbase-http/src/main/java/org/apache/hadoop/hbase/util/LogMonitoring.java
index 70ce5ec0b4c4..92dc20d35b59 100644
--- a/hbase-http/src/main/java/org/apache/hadoop/hbase/util/LogMonitoring.java
+++ b/hbase-http/src/main/java/org/apache/hadoop/hbase/util/LogMonitoring.java
@@ -31,8 +31,7 @@
import org.apache.yetus.audience.InterfaceAudience;
/**
- * Utility functions for reading the log4j logs that are
- * being written by HBase.
+ * Utility functions for reading the log4j logs that are being written by HBase.
*/
@InterfaceAudience.Private
public abstract class LogMonitoring {
@@ -54,13 +53,12 @@ public static void dumpTailOfLogs(
}
}
- private static void dumpTailOfLog(File f, PrintWriter out, long tailKb)
- throws IOException {
+ private static void dumpTailOfLog(File f, PrintWriter out, long tailKb) throws IOException {
FileInputStream fis = new FileInputStream(f);
BufferedReader r = null;
try {
FileChannel channel = fis.getChannel();
- channel.position(Math.max(0, channel.size() - tailKb*1024));
+ channel.position(Math.max(0, channel.size() - tailKb * 1024));
r = new BufferedReader(new InputStreamReader(fis));
r.readLine(); // skip the first partial line
String line;
diff --git a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/log/TestLogLevel.java b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/log/TestLogLevel.java
index 2c5d0c42b6da..d32636940ea1 100644
--- a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/log/TestLogLevel.java
+++ b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/log/TestLogLevel.java
@@ -22,6 +22,7 @@
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
+
import java.io.File;
import java.net.BindException;
import java.net.SocketException;
@@ -51,9 +52,10 @@
import org.apache.hadoop.security.ssl.SSLFactory;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.util.StringUtils;
-import org.apache.log4j.Level;
-import org.apache.log4j.LogManager;
-import org.apache.log4j.Logger;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.apache.logging.log4j.core.config.Configurator;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.ClassRule;
@@ -296,7 +298,7 @@ private void testDynamicLogLevel(final String bindProtocol, final String connect
if (!LogLevel.isValidProtocol(connectProtocol)) {
throw new Exception("Invalid client protocol " + connectProtocol);
}
- Level oldLevel = log.getEffectiveLevel();
+ Level oldLevel = log.getLevel();
assertNotEquals("Get default Log Level which shouldn't be ERROR.",
Level.ERROR, oldLevel);
@@ -334,7 +336,7 @@ private void testDynamicLogLevel(final String bindProtocol, final String connect
}
// restore log level
- GenericTestUtils.setLogLevel(log, oldLevel);
+ Configurator.setLevel(log.getName(), oldLevel);
}
/**
@@ -366,7 +368,7 @@ private void setLevel(String protocol, String authority, String newLevel)
cli.run(setLevelArgs);
assertEquals("new level not equal to expected: ", newLevel.toUpperCase(),
- log.getEffectiveLevel().toString());
+ log.getLevel().toString());
}
/**
diff --git a/hbase-it/pom.xml b/hbase-it/pom.xml
index d3168f3be056..2bea3145a096 100644
--- a/hbase-it/pom.xml
+++ b/hbase-it/pom.xml
@@ -250,13 +250,18 @@
test
- org.slf4j
- slf4j-log4j12
+ org.apache.logging.log4j
+ log4j-api
+ test
+
+
+ org.apache.logging.log4j
+ log4j-core
test
- log4j
- log4j
+ org.apache.logging.log4j
+ log4j-slf4j-impl
test
diff --git a/hbase-logging/pom.xml b/hbase-logging/pom.xml
index d48ffca32248..e19585dcddd7 100644
--- a/hbase-logging/pom.xml
+++ b/hbase-logging/pom.xml
@@ -38,7 +38,7 @@
src/test/resources
- log4j.properties
+ log4j2.xml
@@ -80,13 +80,28 @@
org.slf4j
- slf4j-log4j12
+ jcl-over-slf4j
test
- log4j
- log4j
+ org.slf4j
+ jul-to-slf4j
+ test
+
+
+ org.apache.logging.log4j
+ log4j-api
+ provided
+
+
+ org.apache.logging.log4j
+ log4j-core
provided
+
+ org.apache.logging.log4j
+ log4j-slf4j-impl
+ test
+
diff --git a/hbase-logging/src/main/java/org/apache/hadoop/hbase/logging/CounterAppender.java b/hbase-logging/src/main/java/org/apache/hadoop/hbase/logging/CounterAppender.java
new file mode 100644
index 000000000000..ecaae44939cf
--- /dev/null
+++ b/hbase-logging/src/main/java/org/apache/hadoop/hbase/logging/CounterAppender.java
@@ -0,0 +1,56 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.logging;
+
+import org.apache.yetus.audience.InterfaceAudience;
+
+/**
+ * A log appender for collecting the log event count.
+ */
+@org.apache.logging.log4j.core.config.plugins.Plugin(name = CounterAppender.PLUGIN_NAME,
+ category = org.apache.logging.log4j.core.Core.CATEGORY_NAME,
+ elementType = org.apache.logging.log4j.core.Appender.ELEMENT_TYPE, printObject = true)
+@InterfaceAudience.Private
+public class CounterAppender extends org.apache.logging.log4j.core.appender.AbstractAppender {
+
+ public static final String PLUGIN_NAME = "Counter";
+
+ @org.apache.logging.log4j.core.config.plugins.PluginFactory
+ public static CounterAppender createAppender(
+ @org.apache.logging.log4j.core.config.plugins.PluginAttribute(value = "name") String name) {
+ return new CounterAppender(name);
+ }
+
+ private CounterAppender(String name) {
+ super(name, null, null, true, org.apache.logging.log4j.core.config.Property.EMPTY_ARRAY);
+ }
+
+ @Override
+ public void append(org.apache.logging.log4j.core.LogEvent event) {
+ org.apache.logging.log4j.Level level = event.getLevel();
+ if (level == org.apache.logging.log4j.Level.INFO) {
+ EventCounter.info();
+ } else if (level == org.apache.logging.log4j.Level.WARN) {
+ EventCounter.warn();
+ } else if (level == org.apache.logging.log4j.Level.ERROR) {
+ EventCounter.error();
+ } else if (level == org.apache.logging.log4j.Level.FATAL) {
+ EventCounter.fatal();
+ }
+ }
+}
diff --git a/hbase-logging/src/main/java/org/apache/hadoop/hbase/logging/EventCounter.java b/hbase-logging/src/main/java/org/apache/hadoop/hbase/logging/EventCounter.java
new file mode 100644
index 000000000000..302384f07d00
--- /dev/null
+++ b/hbase-logging/src/main/java/org/apache/hadoop/hbase/logging/EventCounter.java
@@ -0,0 +1,68 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.logging;
+
+import java.util.concurrent.atomic.LongAdder;
+import org.apache.yetus.audience.InterfaceAudience;
+
+/**
+ * Collect the log event count.
+ */
+@InterfaceAudience.Private
+public class EventCounter {
+
+ private static final LongAdder INFO = new LongAdder();
+
+ private static final LongAdder WARN = new LongAdder();
+
+ private static final LongAdder ERROR = new LongAdder();
+
+ private static final LongAdder FATAL = new LongAdder();
+
+ static void info() {
+ INFO.increment();
+ }
+
+ static void warn() {
+ WARN.increment();
+ }
+
+ static void error() {
+ ERROR.increment();
+ }
+
+ static void fatal() {
+ FATAL.increment();
+ }
+
+ public static long getInfo() {
+ return INFO.sum();
+ }
+
+ public static long getWarn() {
+ return WARN.sum();
+ }
+
+ public static long getError() {
+ return ERROR.sum();
+ }
+
+ public static long getFatal() {
+ return FATAL.sum();
+ }
+}
diff --git a/hbase-logging/src/main/java/org/apache/hadoop/hbase/logging/InternalLog4jUtils.java b/hbase-logging/src/main/java/org/apache/hadoop/hbase/logging/InternalLog4jUtils.java
index 28d29bf30131..b0711d7e8f1a 100644
--- a/hbase-logging/src/main/java/org/apache/hadoop/hbase/logging/InternalLog4jUtils.java
+++ b/hbase-logging/src/main/java/org/apache/hadoop/hbase/logging/InternalLog4jUtils.java
@@ -19,16 +19,15 @@
import java.io.File;
import java.io.IOException;
-import java.util.Enumeration;
import java.util.HashSet;
import java.util.Set;
import org.apache.yetus.audience.InterfaceAudience;
/**
- * The actual class for operating on log4j.
+ * The actual class for operating on log4j2.
*
* This class will depend on log4j directly, so callers should not use this class directly to avoid
- * introducing log4j dependencies to downstream users. Please call the methods in
+ * introducing log4j2 dependencies to downstream users. Please call the methods in
* {@link Log4jUtils}, as they will call the methods here through reflection.
*/
@InterfaceAudience.Private
@@ -38,32 +37,53 @@ private InternalLog4jUtils() {
}
static void setLogLevel(String loggerName, String levelName) {
- org.apache.log4j.Logger logger = org.apache.log4j.LogManager.getLogger(loggerName);
- org.apache.log4j.Level level = org.apache.log4j.Level.toLevel(levelName.toUpperCase());
+ org.apache.logging.log4j.Level level =
+ org.apache.logging.log4j.Level.toLevel(levelName.toUpperCase());
if (!level.toString().equalsIgnoreCase(levelName)) {
throw new IllegalArgumentException("Unsupported log level " + levelName);
}
- logger.setLevel(level);
+ org.apache.logging.log4j.core.config.Configurator.setLevel(loggerName, level);
}
static String getEffectiveLevel(String loggerName) {
- org.apache.log4j.Logger logger = org.apache.log4j.LogManager.getLogger(loggerName);
- return logger.getEffectiveLevel().toString();
+ org.apache.logging.log4j.Logger logger =
+ org.apache.logging.log4j.LogManager.getLogger(loggerName);
+ return logger.getLevel().name();
}
static Set getActiveLogFiles() throws IOException {
Set ret = new HashSet<>();
- org.apache.log4j.Appender a;
- @SuppressWarnings("unchecked")
- Enumeration e =
- org.apache.log4j.Logger.getRootLogger().getAllAppenders();
- while (e.hasMoreElements()) {
- a = e.nextElement();
- if (a instanceof org.apache.log4j.FileAppender) {
- org.apache.log4j.FileAppender fa = (org.apache.log4j.FileAppender) a;
- String filename = fa.getFile();
- ret.add(new File(filename));
- }
+ org.apache.logging.log4j.Logger logger = org.apache.logging.log4j.LogManager.getRootLogger();
+ if (!(logger instanceof org.apache.logging.log4j.core.Logger)) {
+ return ret;
+ }
+ org.apache.logging.log4j.core.Logger coreLogger = (org.apache.logging.log4j.core.Logger) logger;
+ for (org.apache.logging.log4j.core.Appender appender : coreLogger.getAppenders().values()) {
+ if (appender instanceof org.apache.logging.log4j.core.appender.FileAppender) {
+ String fileName =
+ ((org.apache.logging.log4j.core.appender.FileAppender) appender).getFileName();
+ ret.add(new File(fileName));
+ } else if (appender instanceof org.apache.logging.log4j.core.appender.AbstractFileAppender) {
+ String fileName =
+ ((org.apache.logging.log4j.core.appender.AbstractFileAppender>) appender).getFileName();
+ ret.add(new File(fileName));
+ } else if (appender instanceof org.apache.logging.log4j.core.appender.RollingFileAppender) {
+ String fileName =
+ ((org.apache.logging.log4j.core.appender.RollingFileAppender) appender).getFileName();
+ ret.add(new File(fileName));
+ } else
+ if (appender instanceof org.apache.logging.log4j.core.appender.RandomAccessFileAppender) {
+ String fileName =
+ ((org.apache.logging.log4j.core.appender.RandomAccessFileAppender) appender)
+ .getFileName();
+ ret.add(new File(fileName));
+ } else
+ if (appender instanceof org.apache.logging.log4j.core.appender.MemoryMappedFileAppender) {
+ String fileName =
+ ((org.apache.logging.log4j.core.appender.MemoryMappedFileAppender) appender)
+ .getFileName();
+ ret.add(new File(fileName));
+ }
}
return ret;
}
diff --git a/hbase-logging/src/main/java/org/apache/hadoop/hbase/AsyncConsoleAppender.java b/hbase-logging/src/main/java/org/apache/log4j/Appender.java
similarity index 56%
rename from hbase-logging/src/main/java/org/apache/hadoop/hbase/AsyncConsoleAppender.java
rename to hbase-logging/src/main/java/org/apache/log4j/Appender.java
index 939b453c8d4b..5967bf9c6f98 100644
--- a/hbase-logging/src/main/java/org/apache/hadoop/hbase/AsyncConsoleAppender.java
+++ b/hbase-logging/src/main/java/org/apache/log4j/Appender.java
@@ -15,32 +15,14 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.apache.hadoop.hbase;
+package org.apache.log4j;
import org.apache.yetus.audience.InterfaceAudience;
/**
- * Logger class that buffers before trying to log to the specified console.
+ * Just for making hadoop work without log4j
*/
@InterfaceAudience.Private
-public class AsyncConsoleAppender extends org.apache.log4j.AsyncAppender {
- private final org.apache.log4j.ConsoleAppender consoleAppender;
-
- public AsyncConsoleAppender() {
- super();
- consoleAppender = new org.apache.log4j.ConsoleAppender(
- new org.apache.log4j.PatternLayout("%d{ISO8601} %-5p [%t] %c{2}: %m%n"));
- this.addAppender(consoleAppender);
- }
-
- public void setTarget(String value) {
- consoleAppender.setTarget(value);
- }
-
- @Override
- public void activateOptions() {
- consoleAppender.activateOptions();
- super.activateOptions();
- }
+public interface Appender {
}
diff --git a/hbase-logging/src/main/java/org/apache/log4j/AsyncAppender.java b/hbase-logging/src/main/java/org/apache/log4j/AsyncAppender.java
new file mode 100644
index 000000000000..7c45f31dc0c5
--- /dev/null
+++ b/hbase-logging/src/main/java/org/apache/log4j/AsyncAppender.java
@@ -0,0 +1,28 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.log4j;
+
+import org.apache.yetus.audience.InterfaceAudience;
+
+/**
+ * Just for making hadoop work without log4j
+ */
+@InterfaceAudience.Private
+public class AsyncAppender implements Appender {
+
+}
diff --git a/hbase-logging/src/main/java/org/apache/log4j/Logger.java b/hbase-logging/src/main/java/org/apache/log4j/Logger.java
new file mode 100644
index 000000000000..407b68528e24
--- /dev/null
+++ b/hbase-logging/src/main/java/org/apache/log4j/Logger.java
@@ -0,0 +1,28 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.log4j;
+
+import org.apache.yetus.audience.InterfaceAudience;
+
+/**
+ * Just for making hadoop work without log4j
+ */
+@InterfaceAudience.Private
+public class Logger {
+
+}
diff --git a/hbase-logging/src/test/resources/log4j.properties b/hbase-logging/src/test/resources/log4j.properties
deleted file mode 100644
index c322699ced24..000000000000
--- a/hbase-logging/src/test/resources/log4j.properties
+++ /dev/null
@@ -1,68 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Define some default values that can be overridden by system properties
-hbase.root.logger=INFO,console
-hbase.log.dir=.
-hbase.log.file=hbase.log
-
-# Define the root logger to the system property "hbase.root.logger".
-log4j.rootLogger=${hbase.root.logger}
-
-# Logging Threshold
-log4j.threshold=ALL
-
-#
-# Daily Rolling File Appender
-#
-log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender
-log4j.appender.DRFA.File=${hbase.log.dir}/${hbase.log.file}
-
-# Rollver at midnight
-log4j.appender.DRFA.DatePattern=.yyyy-MM-dd
-
-# 30-day backup
-#log4j.appender.DRFA.MaxBackupIndex=30
-log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout
-# Debugging Pattern format
-log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %C{2}(%L): %m%n
-
-
-#
-# console
-# Add "console" to rootlogger above if you want to use this
-#
-log4j.appender.console=org.apache.log4j.ConsoleAppender
-log4j.appender.console.target=System.err
-log4j.appender.console.layout=org.apache.log4j.PatternLayout
-log4j.appender.console.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %C{2}(%L): %m%n
-
-# Custom Logging levels
-
-#log4j.logger.org.apache.hadoop.fs.FSNamesystem=DEBUG
-
-log4j.logger.org.apache.hadoop=WARN
-log4j.logger.org.apache.zookeeper=ERROR
-log4j.logger.org.apache.hadoop.hbase=DEBUG
-
-#These settings are workarounds against spurious logs from the minicluster.
-#See HBASE-4709
-log4j.logger.org.apache.hadoop.metrics2.impl.MetricsConfig=WARN
-log4j.logger.org.apache.hadoop.metrics2.impl.MetricsSinkAdapter=WARN
-log4j.logger.org.apache.hadoop.metrics2.impl.MetricsSystemImpl=WARN
-log4j.logger.org.apache.hadoop.metrics2.util.MBeans=WARN
-# Enable this to get detailed connection error/retry logging.
-# log4j.logger.org.apache.hadoop.hbase.client.ConnectionImplementation=TRACE
diff --git a/hbase-logging/src/test/resources/log4j2.xml b/hbase-logging/src/test/resources/log4j2.xml
new file mode 100644
index 000000000000..b2c7d4d52a0d
--- /dev/null
+++ b/hbase-logging/src/test/resources/log4j2.xml
@@ -0,0 +1,42 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/hbase-mapreduce/pom.xml b/hbase-mapreduce/pom.xml
index 5bd03f4e394c..abcba001f10f 100644
--- a/hbase-mapreduce/pom.xml
+++ b/hbase-mapreduce/pom.xml
@@ -260,13 +260,18 @@
test
- org.slf4j
- slf4j-log4j12
+ org.apache.logging.log4j
+ log4j-api
+ test
+
+
+ org.apache.logging.log4j
+ log4j-core
test
- log4j
- log4j
+ org.apache.logging.log4j
+ log4j-slf4j-impl
test
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java
index 63c1760626f0..7614b8376d07 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java
@@ -25,24 +25,16 @@
import java.util.List;
import java.util.Properties;
import java.util.concurrent.atomic.AtomicReference;
-
import javax.crypto.spec.SecretKeySpec;
-
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
-import org.apache.log4j.Level;
-import org.apache.log4j.LogManager;
-import org.apache.yetus.audience.InterfaceAudience;
-import org.apache.zookeeper.ZooKeeper;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Durability;
@@ -52,6 +44,7 @@
import org.apache.hadoop.hbase.io.crypto.Encryption;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
import org.apache.hadoop.hbase.log.HBaseMarkers;
+import org.apache.hadoop.hbase.logging.Log4jUtils;
import org.apache.hadoop.hbase.regionserver.BloomType;
import org.apache.hadoop.hbase.security.EncryptionUtil;
import org.apache.hadoop.hbase.security.HBaseKerberosUtils;
@@ -61,6 +54,10 @@
import org.apache.hadoop.hbase.util.test.LoadTestDataGenerator;
import org.apache.hadoop.hbase.util.test.LoadTestDataGeneratorWithACL;
import org.apache.hadoop.util.ToolRunner;
+import org.apache.yetus.audience.InterfaceAudience;
+import org.apache.zookeeper.ZooKeeper;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.apache.hbase.thirdparty.org.apache.commons.cli.AlreadySelectedException;
import org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine;
@@ -583,7 +580,7 @@ public void initTestTable() throws IOException {
@Override
protected int doWork() throws IOException {
if (!isVerbose) {
- LogManager.getLogger(ZooKeeper.class.getName()).setLevel(Level.WARN);
+ Log4jUtils.setLogLevel(ZooKeeper.class.getName(), "WARN");
}
if (numTables > 1) {
return parallelLoadTables();
diff --git a/hbase-metrics-api/pom.xml b/hbase-metrics-api/pom.xml
index ec8bb4d19582..0db0458b26ed 100644
--- a/hbase-metrics-api/pom.xml
+++ b/hbase-metrics-api/pom.xml
@@ -133,13 +133,18 @@
test
- org.slf4j
- slf4j-log4j12
+ org.apache.logging.log4j
+ log4j-api
+ test
+
+
+ org.apache.logging.log4j
+ log4j-core
test
- log4j
- log4j
+ org.apache.logging.log4j
+ log4j-slf4j-impl
test
diff --git a/hbase-metrics/pom.xml b/hbase-metrics/pom.xml
index 9c2bcc2de7d9..0496cc3db793 100644
--- a/hbase-metrics/pom.xml
+++ b/hbase-metrics/pom.xml
@@ -141,13 +141,18 @@
test
- org.slf4j
- slf4j-log4j12
+ org.apache.logging.log4j
+ log4j-api
+ test
+
+
+ org.apache.logging.log4j
+ log4j-core
test
- log4j
- log4j
+ org.apache.logging.log4j
+ log4j-slf4j-impl
test
diff --git a/hbase-procedure/pom.xml b/hbase-procedure/pom.xml
index 13cfc739d86c..5cf027096bd5 100644
--- a/hbase-procedure/pom.xml
+++ b/hbase-procedure/pom.xml
@@ -130,13 +130,18 @@
test
- org.slf4j
- slf4j-log4j12
+ org.apache.logging.log4j
+ log4j-api
+ test
+
+
+ org.apache.logging.log4j
+ log4j-core
test
- log4j
- log4j
+ org.apache.logging.log4j
+ log4j-slf4j-impl
test
diff --git a/hbase-replication/pom.xml b/hbase-replication/pom.xml
index f56ca628f4b3..7575fd176144 100644
--- a/hbase-replication/pom.xml
+++ b/hbase-replication/pom.xml
@@ -138,13 +138,18 @@
test
- org.slf4j
- slf4j-log4j12
+ org.apache.logging.log4j
+ log4j-api
+ test
+
+
+ org.apache.logging.log4j
+ log4j-core
test
- log4j
- log4j
+ org.apache.logging.log4j
+ log4j-slf4j-impl
test
diff --git a/hbase-rest/pom.xml b/hbase-rest/pom.xml
index 391f061e8485..433165e17f03 100644
--- a/hbase-rest/pom.xml
+++ b/hbase-rest/pom.xml
@@ -378,13 +378,18 @@
test
- org.slf4j
- slf4j-log4j12
+ org.apache.logging.log4j
+ log4j-api
+ test
+
+
+ org.apache.logging.log4j
+ log4j-core
test
- log4j
- log4j
+ org.apache.logging.log4j
+ log4j-slf4j-impl
test
diff --git a/hbase-server/pom.xml b/hbase-server/pom.xml
index f1c28473c262..1469197188bb 100644
--- a/hbase-server/pom.xml
+++ b/hbase-server/pom.xml
@@ -510,13 +510,18 @@
test
- org.slf4j
- slf4j-log4j12
+ org.apache.logging.log4j
+ log4j-api
+ test
+
+
+ org.apache.logging.log4j
+ log4j-core
test
- log4j
- log4j
+ org.apache.logging.log4j
+ log4j-slf4j-impl
test
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
index 30a7841bba7b..4153bc174a24 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
@@ -598,10 +598,8 @@ public MiniDFSCluster startMiniDFSCluster(int servers, final String racks[], St
EditLogFileOutputStream.setShouldSkipFsyncForTesting(true);
// Error level to skip some warnings specific to the minicluster. See HBASE-4709
- org.apache.log4j.Logger.getLogger(org.apache.hadoop.metrics2.util.MBeans.class).
- setLevel(org.apache.log4j.Level.ERROR);
- org.apache.log4j.Logger.getLogger(org.apache.hadoop.metrics2.impl.MetricsSystemImpl.class).
- setLevel(org.apache.log4j.Level.ERROR);
+ Log4jUtils.setLogLevel(org.apache.hadoop.metrics2.util.MBeans.class.getName(), "ERROR");
+ Log4jUtils.setLogLevel(org.apache.hadoop.metrics2.impl.MetricsSystemImpl.class.getName(), "ERROR");
TraceUtil.initTracer(conf);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableBatchRetryImmediately.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableBatchRetryImmediately.java
index 57cfbeca6e29..028b8fd8c30f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableBatchRetryImmediately.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableBatchRetryImmediately.java
@@ -29,11 +29,10 @@
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.logging.Log4jUtils;
import org.apache.hadoop.hbase.testclassification.ClientTests;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.log4j.Level;
-import org.apache.log4j.LogManager;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.ClassRule;
@@ -61,10 +60,13 @@ public class TestAsyncTableBatchRetryImmediately {
private static AsyncConnection CONN;
+ private static String LOG_LEVEL;
+
@BeforeClass
public static void setUp() throws Exception {
// disable the debug log to avoid flooding the output
- LogManager.getLogger(AsyncRegionLocatorHelper.class).setLevel(Level.INFO);
+ LOG_LEVEL = Log4jUtils.getEffectiveLevel(AsyncRegionLocatorHelper.class.getName());
+ Log4jUtils.setLogLevel(AsyncRegionLocatorHelper.class.getName(), "INFO");
UTIL.getConfiguration().setLong(HConstants.HBASE_SERVER_SCANNER_MAX_RESULT_SIZE_KEY, 1024);
UTIL.startMiniCluster(1);
Table table = UTIL.createTable(TABLE_NAME, FAMILY);
@@ -79,6 +81,9 @@ public static void setUp() throws Exception {
@AfterClass
public static void tearDown() throws Exception {
+ if (LOG_LEVEL != null) {
+ Log4jUtils.setLogLevel(AsyncRegionLocatorHelper.class.getName(), LOG_LEVEL);
+ }
CONN.close();
UTIL.shutdownMiniCluster();
}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiRespectsLimits.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiRespectsLimits.java
index eaf1f9deb4c5..4818b6d9b968 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiRespectsLimits.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiRespectsLimits.java
@@ -33,14 +33,13 @@
import org.apache.hadoop.hbase.Waiter;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
import org.apache.hadoop.hbase.ipc.RpcServerInterface;
+import org.apache.hadoop.hbase.logging.Log4jUtils;
import org.apache.hadoop.hbase.metrics.BaseSource;
import org.apache.hadoop.hbase.regionserver.HRegionServer;
import org.apache.hadoop.hbase.test.MetricsAssertHelper;
import org.apache.hadoop.hbase.testclassification.ClientTests;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.log4j.Level;
-import org.apache.log4j.LogManager;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.ClassRule;
@@ -65,6 +64,7 @@ public class TestMultiRespectsLimits {
CompatibilityFactory.getInstance(MetricsAssertHelper.class);
private final static byte[] FAMILY = Bytes.toBytes("D");
public static final int MAX_SIZE = 100;
+ private static String LOG_LEVEL;
@Rule
public TestName name = new TestName();
@@ -72,7 +72,8 @@ public class TestMultiRespectsLimits {
@BeforeClass
public static void setUpBeforeClass() throws Exception {
// disable the debug log to avoid flooding the output
- LogManager.getLogger(AsyncRegionLocatorHelper.class).setLevel(Level.INFO);
+ LOG_LEVEL = Log4jUtils.getEffectiveLevel(AsyncRegionLocatorHelper.class.getName());
+ Log4jUtils.setLogLevel(AsyncRegionLocatorHelper.class.getName(), "INFO");
TEST_UTIL.getConfiguration().setLong(HConstants.HBASE_SERVER_SCANNER_MAX_RESULT_SIZE_KEY,
MAX_SIZE);
@@ -82,6 +83,9 @@ public static void setUpBeforeClass() throws Exception {
@AfterClass
public static void tearDownAfterClass() throws Exception {
+ if (LOG_LEVEL != null) {
+ Log4jUtils.setLogLevel(AsyncRegionLocatorHelper.class.getName(), LOG_LEVEL);
+ }
TEST_UTIL.shutdownMiniCluster();
}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestProtoBufRpc.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestProtoBufRpc.java
index 941d921481d3..a45804a45159 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestProtoBufRpc.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestProtoBufRpc.java
@@ -30,6 +30,7 @@
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.logging.Log4jUtils;
import org.apache.hadoop.hbase.testclassification.RPCTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.junit.After;
@@ -82,10 +83,8 @@ public void setUp() throws IOException { // Setup server for both protocols
this.conf = HBaseConfiguration.create();
this.conf.set(RpcServerFactory.CUSTOM_RPC_SERVER_IMPL_CONF_KEY,
rpcServerImpl);
- org.apache.log4j.Logger.getLogger("org.apache.hadoop.ipc.HBaseServer")
- .setLevel(org.apache.log4j.Level.ERROR);
- org.apache.log4j.Logger.getLogger("org.apache.hadoop.ipc.HBaseServer.trace")
- .setLevel(org.apache.log4j.Level.TRACE);
+ Log4jUtils.setLogLevel("org.apache.hadoop.ipc.HBaseServer", "ERROR");
+ Log4jUtils.setLogLevel("org.apache.hadoop.ipc.HBaseServer.trace", "TRACE");
// Create server side implementation
// Get RPC server for server side implementation
this.server = RpcServerFactory.createRpcServer(null, "testrpc",
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcServerTraceLogging.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcServerTraceLogging.java
index 2d66106a3d2b..19228b4e8754 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcServerTraceLogging.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcServerTraceLogging.java
@@ -25,6 +25,9 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.core.Logger;
import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.Test;
@@ -38,7 +41,7 @@ public class TestRpcServerTraceLogging {
public static final HBaseClassTestRule CLASS_RULE = HBaseClassTestRule
.forClass(TestRpcServerTraceLogging.class);
- static org.apache.log4j.Logger rpcServerLog = org.apache.log4j.Logger.getLogger(RpcServer.class);
+ private static final Logger rpcServerLog = (Logger) LogManager.getLogger(RpcServer.class);
static final String TRACE_LOG_MSG =
"This is dummy message for testing:: region { type: REGION_NAME value: \"hbase:meta,,1\" }"
@@ -62,7 +65,7 @@ public static void setUp() {
@Test
public void testLoggingWithTraceOff() {
conf.setInt("hbase.ipc.trace.log.max.length", 250);
- rpcServerLog.setLevel(org.apache.log4j.Level.DEBUG);
+ rpcServerLog.setLevel(Level.DEBUG);
String truncatedString = mockRpcServer.truncateTraceLog(TRACE_LOG_MSG);
assertEquals(150 + RpcServer.KEY_WORD_TRUNCATED.length(), truncatedString.length());
@@ -72,7 +75,7 @@ public void testLoggingWithTraceOff() {
@Test
public void testLoggingWithTraceOn() {
conf.setInt("hbase.ipc.trace.log.max.length", 250);
- rpcServerLog.setLevel(org.apache.log4j.Level.TRACE);
+ rpcServerLog.setLevel(Level.TRACE);
String truncatedString = mockRpcServer.truncateTraceLog(TRACE_LOG_MSG);
assertEquals(250 + RpcServer.KEY_WORD_TRUNCATED.length(), truncatedString.length());
@@ -82,7 +85,7 @@ public void testLoggingWithTraceOn() {
@Test
public void testLoggingWithTraceOnLargeMax() {
conf.setInt("hbase.ipc.trace.log.max.length", 2000);
- rpcServerLog.setLevel(org.apache.log4j.Level.TRACE);
+ rpcServerLog.setLevel(Level.TRACE);
String truncatedString = mockRpcServer.truncateTraceLog(TRACE_LOG_MSG);
assertEquals(TRACE_LOG_LENGTH, truncatedString.length());
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerReportForDuty.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerReportForDuty.java
index dd1c085718b2..4da72708a6e7 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerReportForDuty.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerReportForDuty.java
@@ -38,10 +38,8 @@
import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.util.JVMClusterUtil.MasterThread;
import org.apache.hadoop.hbase.util.JVMClusterUtil.RegionServerThread;
-import org.apache.log4j.Appender;
-import org.apache.log4j.Layout;
-import org.apache.log4j.PatternLayout;
-import org.apache.log4j.WriterAppender;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.core.appender.WriterAppender;
import org.apache.zookeeper.KeeperException;
import org.junit.After;
import org.junit.Before;
@@ -86,26 +84,14 @@ public void tearDown() throws Exception {
testUtil.shutdownMiniDFSCluster();
}
- /**
- * LogCapturer is similar to {@link org.apache.hadoop.test.GenericTestUtils.LogCapturer}
- * except that this implementation has a default appender to the root logger.
- * Hadoop 2.8+ supports the default appender in the LogCapture it ships and this can be replaced.
- * TODO: This class can be removed after we upgrade Hadoop dependency.
- */
- static class LogCapturer {
+ private static class LogCapturer {
private StringWriter sw = new StringWriter();
private WriterAppender appender;
- private org.apache.log4j.Logger logger;
+ private org.apache.logging.log4j.core.Logger logger;
- LogCapturer(org.apache.log4j.Logger logger) {
+ LogCapturer(org.apache.logging.log4j.core.Logger logger) {
this.logger = logger;
- Appender defaultAppender = org.apache.log4j.Logger.getRootLogger().getAppender("stdout");
- if (defaultAppender == null) {
- defaultAppender = org.apache.log4j.Logger.getRootLogger().getAppender("console");
- }
- final Layout layout = (defaultAppender == null) ? new PatternLayout() :
- defaultAppender.getLayout();
- this.appender = new WriterAppender(layout, sw);
+ this.appender = WriterAppender.newBuilder().setName("test").setTarget(sw).build();
this.logger.addAppender(this.appender);
}
@@ -141,7 +127,8 @@ public void testReportForDutyBackoff() throws IOException, InterruptedException
master = cluster.addMaster();
master.start();
- LogCapturer capturer = new LogCapturer(org.apache.log4j.Logger.getLogger(HRegionServer.class));
+ LogCapturer capturer = new LogCapturer(
+ (org.apache.logging.log4j.core.Logger) LogManager.getLogger(HRegionServer.class));
// Set sleep interval relatively low so that exponential backoff is more demanding.
int msginterval = 100;
cluster.getConfiguration().setInt("hbase.regionserver.msginterval", msginterval);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/PerfTestCompactionPolicies.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/PerfTestCompactionPolicies.java
index 13c7a6bc1039..b7b42f429513 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/PerfTestCompactionPolicies.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/PerfTestCompactionPolicies.java
@@ -26,9 +26,9 @@
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
-
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.logging.Log4jUtils;
import org.apache.hadoop.hbase.regionserver.HStore;
import org.apache.hadoop.hbase.regionserver.HStoreFile;
import org.apache.hadoop.hbase.regionserver.StoreConfigInformation;
@@ -120,12 +120,9 @@ public PerfTestCompactionPolicies(
this.ratio = inRatio;
// Hide lots of logging so the system out is usable as a tab delimited file.
- org.apache.log4j.Logger.getLogger(CompactionConfiguration.class).
- setLevel(org.apache.log4j.Level.ERROR);
- org.apache.log4j.Logger.getLogger(RatioBasedCompactionPolicy.class).
- setLevel(org.apache.log4j.Level.ERROR);
-
- org.apache.log4j.Logger.getLogger(cpClass).setLevel(org.apache.log4j.Level.ERROR);
+ Log4jUtils.setLogLevel(CompactionConfiguration.class.getName(), "ERROR");
+ Log4jUtils.setLogLevel(RatioBasedCompactionPolicy.class.getName(), "ERROR");
+ Log4jUtils.setLogLevel(cpClass.getName(), "ERROR");
Configuration configuration = HBaseConfiguration.create();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestCanaryTool.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestCanaryTool.java
index 9680502bf6c5..99138ab646c9 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestCanaryTool.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestCanaryTool.java
@@ -27,10 +27,13 @@
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.ArgumentMatchers.isA;
import static org.mockito.Mockito.atLeastOnce;
+import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutorService;
@@ -47,9 +50,10 @@
import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.util.ToolRunner;
-import org.apache.log4j.Appender;
-import org.apache.log4j.LogManager;
-import org.apache.log4j.spi.LoggingEvent;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.core.Appender;
+import org.apache.logging.log4j.core.LogEvent;
+import org.apache.logging.log4j.core.Logger;
import org.junit.After;
import org.junit.Before;
import org.junit.ClassRule;
@@ -57,13 +61,10 @@
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
-import org.junit.runner.RunWith;
import org.mockito.ArgumentMatcher;
-import org.mockito.Mock;
-import org.mockito.junit.MockitoJUnitRunner;
+
import org.apache.hbase.thirdparty.com.google.common.collect.Iterables;
-@RunWith(MockitoJUnitRunner.class)
@Category({LargeTests.class})
public class TestCanaryTool {
@@ -78,22 +79,24 @@ public class TestCanaryTool {
@Rule
public TestName name = new TestName();
+ private Appender mockAppender;
+
@Before
public void setUp() throws Exception {
testingUtility = new HBaseTestingUtility();
testingUtility.startMiniCluster();
- LogManager.getRootLogger().addAppender(mockAppender);
+ mockAppender = mock(Appender.class);
+ when(mockAppender.getName()).thenReturn("mockAppender");
+ when(mockAppender.isStarted()).thenReturn(true);
+ ((Logger) LogManager.getLogger("org.apache.hadoop.hbase")).addAppender(mockAppender);
}
@After
public void tearDown() throws Exception {
testingUtility.shutdownMiniCluster();
- LogManager.getRootLogger().removeAppender(mockAppender);
+ ((Logger) LogManager.getLogger("org.apache.hadoop.hbase")).removeAppender(mockAppender);
}
- @Mock
- Appender mockAppender;
-
@Test
public void testBasicZookeeperCanaryWorks() throws Exception {
final String[] args = { "-t", "10000", "-zookeeper" };
@@ -230,16 +233,16 @@ public void testReadTableTimeouts() throws Exception {
sink.getReadLatencyMap().get(tableNames[i].getNameAsString()));
}
// One table's timeout is set for 0 ms and thus, should lead to an error.
- verify(mockAppender, times(1)).doAppend(argThat(new ArgumentMatcher() {
+ verify(mockAppender, times(1)).append(argThat(new ArgumentMatcher() {
@Override
- public boolean matches(LoggingEvent argument) {
- return argument.getRenderedMessage().contains("exceeded the configured read timeout.");
+ public boolean matches(LogEvent argument) {
+ return argument.getMessage().getFormattedMessage().contains("exceeded the configured read timeout.");
}
}));
- verify(mockAppender, times(2)).doAppend(argThat(new ArgumentMatcher() {
+ verify(mockAppender, times(2)).append(argThat(new ArgumentMatcher() {
@Override
- public boolean matches(LoggingEvent argument) {
- return argument.getRenderedMessage().contains("Configured read timeout");
+ public boolean matches(LogEvent argument) {
+ return argument.getMessage().getFormattedMessage().contains("Configured read timeout");
}
}));
}
@@ -253,11 +256,11 @@ public void testWriteTableTimeout() throws Exception {
assertEquals(0, ToolRunner.run(testingUtility.getConfiguration(), canary, args));
assertNotEquals("verify non-null write latency", null, sink.getWriteLatency());
assertNotEquals("verify non-zero write latency", 0L, sink.getWriteLatency());
- verify(mockAppender, times(1)).doAppend(argThat(
- new ArgumentMatcher() {
+ verify(mockAppender, times(1)).append(argThat(
+ new ArgumentMatcher() {
@Override
- public boolean matches(LoggingEvent argument) {
- return argument.getRenderedMessage().contains("Configured write timeout");
+ public boolean matches(LogEvent argument) {
+ return argument.getMessage().getFormattedMessage().contains("Configured write timeout");
}
}));
}
@@ -266,10 +269,10 @@ public boolean matches(LoggingEvent argument) {
@Test
public void testRegionserverNoRegions() throws Exception {
runRegionserverCanary();
- verify(mockAppender).doAppend(argThat(new ArgumentMatcher() {
+ verify(mockAppender).append(argThat(new ArgumentMatcher() {
@Override
- public boolean matches(LoggingEvent argument) {
- return argument.getRenderedMessage().contains("Regionserver not serving any regions");
+ public boolean matches(LogEvent argument) {
+ return argument.getMessage().getFormattedMessage().contains("Regionserver not serving any regions");
}
}));
}
@@ -280,10 +283,10 @@ public void testRegionserverWithRegions() throws Exception {
final TableName tableName = TableName.valueOf(name.getMethodName());
testingUtility.createTable(tableName, new byte[][] { FAMILY });
runRegionserverCanary();
- verify(mockAppender, never()).doAppend(argThat(new ArgumentMatcher() {
+ verify(mockAppender, never()).append(argThat(new ArgumentMatcher() {
@Override
- public boolean matches(LoggingEvent argument) {
- return argument.getRenderedMessage().contains("Regionserver not serving any regions");
+ public boolean matches(LogEvent argument) {
+ return argument.getMessage().getFormattedMessage().contains("Regionserver not serving any regions");
}
}));
}
@@ -322,8 +325,7 @@ private void runRegionserverCanary() throws Exception {
}
private void testZookeeperCanaryWithArgs(String[] args) throws Exception {
- Integer port =
- Iterables.getOnlyElement(testingUtility.getZkCluster().getClientPortList(), null);
+ Iterables.getOnlyElement(testingUtility.getZkCluster().getClientPortList(), null);
String hostPort = testingUtility.getZkCluster().getAddress().toString();
testingUtility.getConfiguration().set(HConstants.ZOOKEEPER_QUORUM, hostPort);
ExecutorService executor = new ScheduledThreadPoolExecutor(2);
diff --git a/hbase-shaded/hbase-shaded-check-invariants/pom.xml b/hbase-shaded/hbase-shaded-check-invariants/pom.xml
index bd9b3f4139ff..e61e1637c555 100644
--- a/hbase-shaded/hbase-shaded-check-invariants/pom.xml
+++ b/hbase-shaded/hbase-shaded-check-invariants/pom.xml
@@ -46,12 +46,10 @@
org.apache.hbase
hbase-shaded-mapreduce
- ${project.version}
org.apache.hbase
hbase-shaded-client-byo-hadoop
- ${project.version}
@@ -60,8 +58,18 @@
provided
- log4j
- log4j
+ org.apache.logging.log4j
+ log4j-api
+ provided
+
+
+ org.apache.logging.log4j
+ log4j-core
+ provided
+
+
+ org.apache.logging.log4j
+ log4j-slf4j-impl
provided
diff --git a/hbase-shaded/hbase-shaded-client-byo-hadoop/pom.xml b/hbase-shaded/hbase-shaded-client-byo-hadoop/pom.xml
index 090e0682196c..18aaf716a8fe 100644
--- a/hbase-shaded/hbase-shaded-client-byo-hadoop/pom.xml
+++ b/hbase-shaded/hbase-shaded-client-byo-hadoop/pom.xml
@@ -1,118 +1,120 @@
-
- 4.0.0
-
- hbase-shaded
- org.apache.hbase
- 3.0.0-SNAPSHOT
- ..
-
- hbase-shaded-client-byo-hadoop
- Apache HBase - Shaded - Client
-
-
-
- org.apache.maven.plugins
- maven-site-plugin
-
- true
-
-
-
-
- maven-assembly-plugin
-
- true
-
-
-
- org.apache.maven.plugins
- maven-shade-plugin
-
-
-
-
+ xmlns:xsi="https://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="https://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
+
+ 4.0.0
+
+ hbase-shaded
+ org.apache.hbase
+ 3.0.0-SNAPSHOT
+ ..
+
+ hbase-shaded-client-byo-hadoop
+ Apache HBase - Shaded - Client
+
+
+
+ org.apache.maven.plugins
+ maven-site-plugin
+
+ true
+
+
+
+
+ maven-assembly-plugin
+
+ true
+
+
+
+ org.apache.maven.plugins
+ maven-shade-plugin
+
+
+
+
+
+ org.apache.hbase
+ hbase-client
+
+
+
+
+
+
+
+ hadoop-3.0
+
+
+ !hadoop.profile
+
+
+
- org.apache.hbase
- hbase-client
+ org.apache.hadoop
+ hadoop-auth
+ provided
-
-
-
-
-
-
- hadoop-3.0
-
- !hadoop.profile
-
-
-
- org.apache.hadoop
- hadoop-auth
- provided
-
-
- org.apache.hadoop
- hadoop-common
- provided
-
-
- org.codehaus.jackson
- jackson-jaxrs
- 1.9.13
- provided
-
-
- org.codehaus.jackson
- jackson-mapper-asl
-
-
- org.codehaus.jackson
- jackson-core-asl
-
-
-
-
- org.codehaus.jackson
- jackson-xc
- 1.9.13
- provided
-
-
- org.codehaus.jackson
- jackson-mapper-asl
-
-
- org.codehaus.jackson
- jackson-core-asl
-
-
-
-
-
-
+
+ org.apache.hadoop
+ hadoop-common
+ provided
+
+
+ org.codehaus.jackson
+ jackson-jaxrs
+ 1.9.13
+ provided
+
+
+ org.codehaus.jackson
+ jackson-mapper-asl
+
+
+ org.codehaus.jackson
+ jackson-core-asl
+
+
+
+
+ org.codehaus.jackson
+ jackson-xc
+ 1.9.13
+ provided
+
+
+ org.codehaus.jackson
+ jackson-mapper-asl
+
+
+ org.codehaus.jackson
+ jackson-core-asl
+
+
+
+
+
+
diff --git a/hbase-shaded/hbase-shaded-client/pom.xml b/hbase-shaded/hbase-shaded-client/pom.xml
index c85d7f79b53a..9249d57157b2 100644
--- a/hbase-shaded/hbase-shaded-client/pom.xml
+++ b/hbase-shaded/hbase-shaded-client/pom.xml
@@ -1,92 +1,92 @@
-
- 4.0.0
-
- hbase-shaded
- org.apache.hbase
- 3.0.0-SNAPSHOT
- ..
-
- hbase-shaded-client
- Apache HBase - Shaded - Client (with Hadoop bundled)
-
-
-
- org.apache.maven.plugins
- maven-site-plugin
-
- true
-
-
-
-
- maven-assembly-plugin
-
- true
-
-
-
- org.apache.maven.plugins
- maven-shade-plugin
-
-
- aggregate-into-a-jar-with-relocated-third-parties
-
-
-
-
- javax.annotation:javax.annotation-api
- javax.activation:javax.activation-api
-
-
- org.apache.hbase:hbase-resource-bundle
- org.slf4j:*
- com.google.code.findbugs:*
- com.github.stephenc.findbugs:*
- com.github.spotbugs:*
- org.apache.htrace:*
- org.apache.yetus:*
- log4j:*
- commons-logging:*
-
-
-
-
-
-
-
-
-
-
- org.apache.hbase
- hbase-client
- ${project.version}
-
-
+ xmlns:xsi="https://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="https://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
+
+ 4.0.0
+
+ hbase-shaded
+ org.apache.hbase
+ 3.0.0-SNAPSHOT
+ ..
+
+ hbase-shaded-client
+ Apache HBase - Shaded - Client (with Hadoop bundled)
+
+
+
+ org.apache.maven.plugins
+ maven-site-plugin
+
+ true
+
+
+
+
+ maven-assembly-plugin
+
+ true
+
+
+
+ org.apache.maven.plugins
+ maven-shade-plugin
+
+
+ aggregate-into-a-jar-with-relocated-third-parties
+
+
+
+
+ javax.annotation:javax.annotation-api
+ javax.activation:javax.activation-api
+
+
+ org.apache.hbase:hbase-resource-bundle
+ org.slf4j:*
+ com.google.code.findbugs:*
+ com.github.stephenc.findbugs:*
+ com.github.spotbugs:*
+ org.apache.htrace:*
+ org.apache.yetus:*
+ log4j:*
+ org.apache.logging.log4j:*
+ commons-logging:*
+
+
+
+
+
+
+
+
+
+
+ org.apache.hbase
+ hbase-client
+
+
diff --git a/hbase-shaded/hbase-shaded-mapreduce/pom.xml b/hbase-shaded/hbase-shaded-mapreduce/pom.xml
index 4fe7fe8a767a..fb48adb5d977 100644
--- a/hbase-shaded/hbase-shaded-mapreduce/pom.xml
+++ b/hbase-shaded/hbase-shaded-mapreduce/pom.xml
@@ -1,6 +1,6 @@
+ xmlns:xsi="https://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="https://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
- 4.0.0
-
- hbase-shaded
- org.apache.hbase
- 3.0.0-SNAPSHOT
- ..
-
- hbase-shaded-mapreduce
- Apache HBase - Shaded - MapReduce
-
-
-
- org.apache.maven.plugins
- maven-site-plugin
-
- true
-
-
-
-
- maven-assembly-plugin
-
- true
-
-
-
- org.apache.maven.plugins
- maven-jar-plugin
-
-
-
-
- org/apache/hadoop/hbase/mapreduce/Driver
-
-
-
-
-
- org.apache.maven.plugins
- maven-shade-plugin
-
-
-
-
-
+ 4.0.0
+
+ hbase-shaded
+ org.apache.hbase
+ 3.0.0-SNAPSHOT
+ ..
+
+ hbase-shaded-mapreduce
+ Apache HBase - Shaded - MapReduce
+
+
+
+ org.apache.maven.plugins
+ maven-site-plugin
+
+ true
+
+
+
+
+ maven-assembly-plugin
+
+ true
+
+
+
+ org.apache.maven.plugins
+ maven-jar-plugin
+
+
+
+
+ org/apache/hadoop/hbase/mapreduce/Driver
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-shade-plugin
+
+
+
+
+
+
+ org.apache.hbase
+ hbase-mapreduce
+
+
+
+ javax.xml.bind
+ jaxb-api
+
+
+ javax.ws.rs
+ jsr311-api
+
+
+
+ javax.ws.rs
+ javax.ws.rs-api
+
+
+ com.sun.jersey
+ jersey-server
+
+
+ com.sun.jersey
+ jersey-client
+
+
+ com.sun.jersey
+ jersey-core
+
+
+ com.sun.jersey
+ jersey-json
+
+
+ com.sun.jersey.contribs
+ jersey-guice
+
+
+
+ javax.servlet
+ javax.servlet-api
+
+
+ org.eclipse.jetty
+ jetty-http
+
+
+ org.eclipse.jetty
+ jetty-security
+
+
+ org.eclipse.jetty
+ jetty-server
+
+
+ org.eclipse.jetty
+ jetty-servlet
+
+
+ org.eclipse.jetty
+ jetty-util
+
+
+ org.eclipse.jetty
+ jetty-util-ajax
+
+
+ org.glassfish
+ javax.el
+
+
+ org.eclipse.jetty
+ jetty-webapp
+
+
+ org.glassfish.jersey.core
+ jersey-server
+
+
+ org.glassfish.jersey.containers
+ jersey-container-servlet-core
+
+
+
+ org.glassfish.web
+ javax.servlet.jsp
+
+
+ javax.servlet.jsp
+ javax.servlet.jsp-api
+
+
+
+
+
+
+
+
+
+ hadoop-3.0
+
+
+ !hadoop.profile
+
+
+
+ ${hadoop-three.version}
+
+
- org.apache.hbase
- hbase-mapreduce
-
-
-
- javax.xml.bind
- jaxb-api
-
-
- javax.ws.rs
- jsr311-api
-
-
-
- javax.ws.rs
- javax.ws.rs-api
-
-
- com.sun.jersey
- jersey-server
-
-
- com.sun.jersey
- jersey-client
-
-
- com.sun.jersey
- jersey-core
-
-
- com.sun.jersey
- jersey-json
-
-
- com.sun.jersey.contribs
- jersey-guice
-
-
-
- javax.servlet
- javax.servlet-api
-
-
- org.eclipse.jetty
- jetty-http
-
-
- org.eclipse.jetty
- jetty-security
-
-
- org.eclipse.jetty
- jetty-server
-
-
- org.eclipse.jetty
- jetty-servlet
-
-
- org.eclipse.jetty
- jetty-util
-
-
- org.eclipse.jetty
- jetty-util-ajax
-
-
- org.glassfish
- javax.el
-
-
- org.eclipse.jetty
- jetty-webapp
-
-
- org.glassfish.jersey.core
- jersey-server
-
-
- org.glassfish.jersey.containers
- jersey-container-servlet-core
-
-
-
- org.glassfish.web
- javax.servlet.jsp
-
-
- javax.servlet.jsp
- javax.servlet.jsp-api
-
-
+ org.apache.hadoop
+ hadoop-common
+ provided
-
-
-
-
-
-
- hadoop-3.0
-
- !hadoop.profile
-
-
- ${hadoop-three.version}
-
-
-
- org.apache.hadoop
- hadoop-common
- provided
-
-
- org.apache.hadoop
- hadoop-hdfs
- provided
-
-
- org.apache.hadoop
- hadoop-auth
- provided
-
-
- org.apache.hadoop
- hadoop-mapreduce-client-core
- provided
-
-
- com.google.guava
- guava
-
-
- javax.xml.bind
- jaxb-api
-
-
- javax.ws.rs
- jsr311-api
-
-
-
-
+
+ org.apache.hadoop
+ hadoop-hdfs
+ provided
+
+
+ org.apache.hadoop
+ hadoop-auth
+ provided
+
+
+ org.apache.hadoop
+ hadoop-mapreduce-client-core
+ provided
+
+
+ com.google.guava
+ guava
+
+
+ javax.xml.bind
+ jaxb-api
+
+
+ javax.ws.rs
+ jsr311-api
+
+
+
+
+ org.codehaus.jackson
+ jackson-jaxrs
+ 1.9.13
+ provided
+
+
+ org.codehaus.jackson
+ jackson-mapper-asl
+
+
org.codehaus.jackson
- jackson-jaxrs
- 1.9.13
- provided
-
-
- org.codehaus.jackson
- jackson-mapper-asl
-
-
- org.codehaus.jackson
- jackson-core-asl
-
-
-
-
+ jackson-core-asl
+
+
+
+
+ org.codehaus.jackson
+ jackson-xc
+ 1.9.13
+ provided
+
+
org.codehaus.jackson
- jackson-xc
- 1.9.13
- provided
-
-
- org.codehaus.jackson
- jackson-mapper-asl
-
-
- org.codehaus.jackson
- jackson-core-asl
-
-
-
-
-
-
+ jackson-mapper-asl
+
+
+ org.codehaus.jackson
+ jackson-core-asl
+
+
+
+
+
+
diff --git a/hbase-shaded/hbase-shaded-testing-util-tester/pom.xml b/hbase-shaded/hbase-shaded-testing-util-tester/pom.xml
index 4750c9c96795..45f0b67bc2fe 100644
--- a/hbase-shaded/hbase-shaded-testing-util-tester/pom.xml
+++ b/hbase-shaded/hbase-shaded-testing-util-tester/pom.xml
@@ -1,68 +1,77 @@
-
- 4.0.0
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+
+ 4.0.0
-
- org.apache.hbase
- hbase-build-configuration
- 3.0.0-SNAPSHOT
- ../../hbase-build-configuration
-
+
+ org.apache.hbase
+ hbase-build-configuration
+ 3.0.0-SNAPSHOT
+ ../../hbase-build-configuration
+
- hbase-shaded-testing-util-tester
- Apache HBase - Shaded - Testing Util Tester
- Ensures that hbase-shaded-testing-util works with hbase-shaded-client.
+ hbase-shaded-testing-util-tester
+ Apache HBase - Shaded - Testing Util Tester
+ Ensures that hbase-shaded-testing-util works with hbase-shaded-client.
-
-
- junit
- junit
- test
-
+
+
+ junit
+ junit
+ test
+
-
- org.slf4j
- slf4j-log4j12
- test
-
+
+ org.apache.logging.log4j
+ log4j-api
+ test
+
+
+ org.apache.logging.log4j
+ log4j-core
+ test
+
+
+ org.apache.logging.log4j
+ log4j-slf4j-impl
+ test
+
-
- org.apache.hbase
- hbase-shaded-client
- ${project.version}
-
-
- org.apache.hbase
- hbase-shaded-testing-util
- ${project.version}
- test
-
-
- org.codehaus.jackson
- jackson-mapper-asl
- 1.9.13
- test
-
-
+
+ org.apache.hbase
+ hbase-shaded-client
+
+
+ org.apache.hbase
+ hbase-shaded-testing-util
+ ${project.version}
+ test
+
+
+ org.codehaus.jackson
+ jackson-mapper-asl
+ 1.9.13
+ test
+
+
diff --git a/hbase-shaded/hbase-shaded-testing-util/pom.xml b/hbase-shaded/hbase-shaded-testing-util/pom.xml
index 7712690a9c9c..0bc30cac8f9c 100644
--- a/hbase-shaded/hbase-shaded-testing-util/pom.xml
+++ b/hbase-shaded/hbase-shaded-testing-util/pom.xml
@@ -1,229 +1,231 @@
-
- 4.0.0
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+
+ 4.0.0
-
- hbase-shaded
- org.apache.hbase
- 3.0.0-SNAPSHOT
- ..
-
+
+ hbase-shaded
+ org.apache.hbase
+ 3.0.0-SNAPSHOT
+ ..
+
- hbase-shaded-testing-util
- Apache HBase - Shaded - Testing Util
+ hbase-shaded-testing-util
+ Apache HBase - Shaded - Testing Util
-
+
-
- org.apache.hadoop
- hadoop-common
- ${hadoop.version}
- test-jar
- compile
-
-
- javax.servlet.jsp
- jsp-api
-
-
- org.codehaus.jackson
- jackson-mapper-asl
-
-
- org.codehaus.jackson
- jackson-core-asl
-
-
- org.codehaus.jackson
- jackson-jaxrs
-
-
- org.codehaus.jackson
- jackson-xc
-
-
- javax.xml.bind
- jaxb-api
-
-
- javax.ws.rs
- jsr311-api
-
-
-
-
- org.apache.hadoop
- hadoop-hdfs
- test-jar
- compile
-
-
- org.apache.hadoop
- hadoop-mapreduce-client-app
- ${hadoop.version}
- test-jar
- compile
-
-
- org.codehaus.jackson
- jackson-mapper-asl
-
-
- org.codehaus.jackson
- jackson-core-asl
-
-
- org.codehaus.jackson
- jackson-jaxrs
-
-
- org.codehaus.jackson
- jackson-xc
-
-
- javax.xml.bind
- jaxb-api
-
-
- javax.ws.rs
- jsr311-api
-
-
-
-
- org.apache.hadoop
- hadoop-mapreduce-client-jobclient
- ${hadoop.version}
- test-jar
- compile
-
-
- org.apache.hbase
- hbase-common
- test-jar
- compile
-
-
- org.apache.hbase
- hbase-server
- test-jar
- compile
-
-
- javax.xml.bind
- jaxb-api
-
-
-
-
- org.apache.hbase
- hbase-asyncfs
- test-jar
- compile
-
-
- org.apache.hbase
- hbase-zookeeper
- test-jar
- compile
-
-
- org.apache.hbase
- hbase-hadoop-compat
- test-jar
- compile
-
-
- org.codehaus.jackson
- jackson-jaxrs
- 1.9.13
- compile
-
-
- org.apache.hbase
- hbase-testing-util
- ${project.version}
- compile
-
-
- javax.xml.bind
- jaxb-api
-
-
-
-
-
-
-
-
- org.apache.maven.plugins
- maven-site-plugin
-
- true
-
-
-
-
- maven-assembly-plugin
-
- true
-
-
-
- org.apache.maven.plugins
- maven-shade-plugin
-
-
- aggregate-into-a-jar-with-relocated-third-parties
-
-
-
-
- javax.annotation:javax.annotation-api
- javax.activation:javax.activation-api
-
-
- org.apache.hbase:hbase-resource-bundle
- org.slf4j:*
- com.google.code.findbugs:*
- com.github.stephenc.findbugs:*
- com.github.spotbugs:*
- org.apache.htrace:*
- org.apache.yetus:*
- log4j:*
- commons-logging:*
-
-
-
-
-
-
-
-
+
+ org.apache.hadoop
+ hadoop-common
+ ${hadoop.version}
+ test-jar
+ compile
+
+
+ javax.servlet.jsp
+ jsp-api
+
+
+ org.codehaus.jackson
+ jackson-mapper-asl
+
+
+ org.codehaus.jackson
+ jackson-core-asl
+
+
+ org.codehaus.jackson
+ jackson-jaxrs
+
+
+ org.codehaus.jackson
+ jackson-xc
+
+
+ javax.xml.bind
+ jaxb-api
+
+
+ javax.ws.rs
+ jsr311-api
+
+
+
+
+ org.apache.hadoop
+ hadoop-hdfs
+ test-jar
+ compile
+
+
+ org.apache.hadoop
+ hadoop-mapreduce-client-app
+ ${hadoop.version}
+ test-jar
+ compile
+
+
+ org.codehaus.jackson
+ jackson-mapper-asl
+
+
+ org.codehaus.jackson
+ jackson-core-asl
+
+
+ org.codehaus.jackson
+ jackson-jaxrs
+
+
+ org.codehaus.jackson
+ jackson-xc
+
+
+ javax.xml.bind
+ jaxb-api
+
+
+ javax.ws.rs
+ jsr311-api
+
+
+ org.slf4j
+ slf4j-log4j12
+
+
+
+
+ org.apache.hadoop
+ hadoop-mapreduce-client-jobclient
+ test-jar
+ compile
+
+
+ org.apache.hbase
+ hbase-common
+ test-jar
+ compile
+
+
+ org.apache.hbase
+ hbase-server
+ test-jar
+ compile
+
+
+ javax.xml.bind
+ jaxb-api
+
+
+
+
+ org.apache.hbase
+ hbase-asyncfs
+ test-jar
+ compile
+
+
+ org.apache.hbase
+ hbase-zookeeper
+ test-jar
+ compile
+
+
+ org.apache.hbase
+ hbase-hadoop-compat
+ test-jar
+ compile
+
+
+ org.codehaus.jackson
+ jackson-jaxrs
+ 1.9.13
+ compile
+
+
+ org.apache.hbase
+ hbase-testing-util
+ compile
+
+
+ javax.xml.bind
+ jaxb-api
+
+
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-site-plugin
+
+ true
+
+
+
+
+ maven-assembly-plugin
+
+ true
+
+
+
+ org.apache.maven.plugins
+ maven-shade-plugin
+
+
+ aggregate-into-a-jar-with-relocated-third-parties
+
+
+
+
+ javax.annotation:javax.annotation-api
+ javax.activation:javax.activation-api
+
+
+ org.apache.hbase:hbase-resource-bundle
+ org.slf4j:*
+ com.google.code.findbugs:*
+ com.github.stephenc.findbugs:*
+ com.github.spotbugs:*
+ org.apache.htrace:*
+ org.apache.yetus:*
+ log4j:*
+ org.apache.logging.log4j:*
+ commons-logging:*
+
+
+
+
+
+
+
+
diff --git a/hbase-shaded/hbase-shaded-with-hadoop-check-invariants/pom.xml b/hbase-shaded/hbase-shaded-with-hadoop-check-invariants/pom.xml
index 2d05ee2a037f..1ebdf5929b3b 100644
--- a/hbase-shaded/hbase-shaded-with-hadoop-check-invariants/pom.xml
+++ b/hbase-shaded/hbase-shaded-with-hadoop-check-invariants/pom.xml
@@ -40,7 +40,6 @@
org.apache.hbase
hbase-shaded-client
- ${project.version}
@@ -49,8 +48,18 @@
provided
- log4j
- log4j
+ org.apache.logging.log4j
+ log4j-api
+ provided
+
+
+ org.apache.logging.log4j
+ log4j-core
+ provided
+
+
+ org.apache.logging.log4j
+ log4j-slf4j-impl
provided
diff --git a/hbase-shaded/pom.xml b/hbase-shaded/pom.xml
index d4671ea07cd4..45304ea72d47 100644
--- a/hbase-shaded/pom.xml
+++ b/hbase-shaded/pom.xml
@@ -1,556 +1,560 @@
-
-
- 4.0.0
-
- hbase-build-configuration
- org.apache.hbase
- 3.0.0-SNAPSHOT
- ../hbase-build-configuration
-
- hbase-shaded
- Apache HBase - Shaded
- Module of HBase with most deps shaded.
- pom
-
-
- true
-
- true
- true
- org.apache.hadoop.hbase.shaded
-
-
- hbase-shaded-client-byo-hadoop
- hbase-shaded-client
- hbase-shaded-mapreduce
- hbase-shaded-testing-util
- hbase-shaded-testing-util-tester
- hbase-shaded-check-invariants
- hbase-shaded-with-hadoop-check-invariants
-
-
-
- org.apache.hbase
- hbase-resource-bundle
- true
-
-
-
- log4j
- log4j
- true
-
-
- org.slf4j
- slf4j-log4j12
- true
-
-
-
-
-
-
- maven-assembly-plugin
-
- true
-
-
-
-
- org.apache.maven.plugins
- maven-remote-resources-plugin
-
-
- aggregate-licenses
-
- process
-
-
-
- ${build.year}
- ${license.debug.print.included}
- ${license.bundles.dependencies}
- ${license.bundles.jquery}
- ${license.bundles.logo}
- ${license.bundles.bootstrap}
-
-
- ${project.groupId}:hbase-resource-bundle:${project.version}
-
-
- ${project.groupId}:hbase-resource-bundle:${project.version}
-
-
- supplemental-models.xml
-
-
-
-
-
-
-
-
-
-
- maven-assembly-plugin
-
- true
-
-
-
- org.apache.maven.plugins
- maven-shade-plugin
-
-
- aggregate-into-a-jar-with-relocated-third-parties
- package
-
- shade
-
-
- false
- false
- true
- false
-
-
-
- javax.annotation:javax.annotation-api
- javax.activation:javax.activation-api
-
- org.apache.hadoop:*
-
- org.apache.hbase:hbase-resource-bundle
- org.slf4j:*
- com.google.code.findbugs:*
- com.github.stephenc.findbugs:*
- com.github.spotbugs:*
- org.apache.htrace:*
- org.apache.yetus:*
- log4j:*
- commons-logging:*
-
-
-
-
-
- com.cedarsoftware
- ${shaded.prefix}.com.cedarsoftware
-
-
- com.codahale
- ${shaded.prefix}.com.codahale
-
-
- com.ctc
- ${shaded.prefix}.com.ctc
-
-
- com.dropwizard
- ${shaded.prefix}.com.dropwizard
-
-
- com.fasterxml
- ${shaded.prefix}.com.fasterxml
-
-
- com.github.benmanes.caffeine
- ${shaded.prefix}.com.github.benmanes.caffeine
-
-
- com.google
- ${shaded.prefix}.com.google
-
-
- com.jamesmurty
- ${shaded.prefix}.com.jamesmurty
-
-
- com.jcraft
- ${shaded.prefix}.com.jcraft
-
-
- com.lmax
- ${shaded.prefix}.com.lmax
-
-
- com.microsoft
- ${shaded.prefix}.com.microsoft
-
-
- com.nimbusds
- ${shaded.prefix}.com.nimbusds
-
-
- com.squareup
- ${shaded.prefix}.com.squareup
-
-
- com.thoughtworks
- ${shaded.prefix}.com.thoughtworks
-
-
- com.zaxxer
- ${shaded.prefix}.com.zaxxer
-
+
+
+ 4.0.0
+
+ hbase-build-configuration
+ org.apache.hbase
+ 3.0.0-SNAPSHOT
+ ../hbase-build-configuration
+
+ hbase-shaded
+ Apache HBase - Shaded
+ Module of HBase with most deps shaded.
+ pom
+
+
+ true
+
+ true
+ true
+ org.apache.hadoop.hbase.shaded
+
+
+ hbase-shaded-client-byo-hadoop
+ hbase-shaded-client
+ hbase-shaded-mapreduce
+ hbase-shaded-testing-util
+ hbase-shaded-testing-util-tester
+ hbase-shaded-check-invariants
+ hbase-shaded-with-hadoop-check-invariants
+
+
+
+ org.apache.hbase
+ hbase-resource-bundle
+ true
+
+
+
+ org.apache.logging.log4j
+ log4j-api
+ true
+
+
+ org.apache.logging.log4j
+ log4j-core
+ true
+
+
+ org.apache.logging.log4j
+ log4j-slf4j-impl
+ true
+
+
+
+
+
+
+ maven-assembly-plugin
+
+ true
+
+
+
+
+ org.apache.maven.plugins
+ maven-remote-resources-plugin
+
+
+ aggregate-licenses
+
+ process
+
+
+
+ ${build.year}
+ ${license.debug.print.included}
+ ${license.bundles.dependencies}
+ ${license.bundles.jquery}
+ ${license.bundles.logo}
+ ${license.bundles.bootstrap}
+
+
+ ${project.groupId}:hbase-resource-bundle:${project.version}
+
+
+ ${project.groupId}:hbase-resource-bundle:${project.version}
+
+
+ supplemental-models.xml
+
+
+
+
+
+
+
+
+
+
+ maven-assembly-plugin
+
+ true
+
+
+
+ org.apache.maven.plugins
+ maven-shade-plugin
+
+
+ aggregate-into-a-jar-with-relocated-third-parties
+ package
+
+ shade
+
+
+ false
+ false
+ true
+ false
+
+
+
+ javax.annotation:javax.annotation-api
+ javax.activation:javax.activation-api
+
+ org.apache.hadoop:*
+
+ org.apache.hbase:hbase-resource-bundle
+ org.slf4j:*
+ com.google.code.findbugs:*
+ com.github.stephenc.findbugs:*
+ com.github.spotbugs:*
+ org.apache.htrace:*
+ org.apache.yetus:*
+ log4j:*
+ org.apache.logging.log4j:*
+ commons-logging:*
+
+
+
+
+
+ com.cedarsoftware
+ ${shaded.prefix}.com.cedarsoftware
+
+
+ com.codahale
+ ${shaded.prefix}.com.codahale
+
+
+ com.ctc
+ ${shaded.prefix}.com.ctc
+
+
+ com.dropwizard
+ ${shaded.prefix}.com.dropwizard
+
+
+ com.fasterxml
+ ${shaded.prefix}.com.fasterxml
+
+
+ com.github.benmanes.caffeine
+ ${shaded.prefix}.com.github.benmanes.caffeine
+
+
+ com.google
+ ${shaded.prefix}.com.google
+
+
+ com.jamesmurty
+ ${shaded.prefix}.com.jamesmurty
+
+
+ com.jcraft
+ ${shaded.prefix}.com.jcraft
+
+
+ com.lmax
+ ${shaded.prefix}.com.lmax
+
+
+ com.microsoft
+ ${shaded.prefix}.com.microsoft
+
+
+ com.nimbusds
+ ${shaded.prefix}.com.nimbusds
+
+
+ com.squareup
+ ${shaded.prefix}.com.squareup
+
+
+ com.thoughtworks
+ ${shaded.prefix}.com.thoughtworks
+
+
+ com.zaxxer
+ ${shaded.prefix}.com.zaxxer
+
+
+
+ org.xbill
+ ${shaded.prefix}.org.xbill
+
+
+
+ org.jboss.netty
+ ${shaded.prefix}.org.jboss.netty
+
+
+ io.netty
+ ${shaded.prefix}.io.netty
+
+
+
+ okio
+ ${shaded.prefix}.okio
+
+
+
+ org.checkerframework
+ ${shaded.prefix}.org.checkerframework
+
+
+ org.codehaus
+ ${shaded.prefix}.org.codehaus
+
+
+ org.eclipse
+ ${shaded.prefix}.org.eclipse
+
+
+ org.ehcache
+ ${shaded.prefix}.org.ehcache
+
+
+ org.jcodings
+ ${shaded.prefix}.org.jcodings
+
+
+ org.joni
+ ${shaded.prefix}.org.joni
+
+
+ org.mortbay
+ ${shaded.prefix}.org.mortbay
+
+
+ org.nustaq
+ ${shaded.prefix}.org.nustaq
+
+
+ org.terracotta
+ ${shaded.prefix}.org.terracotta
+
+
+ org.tukaani
+ ${shaded.prefix}.org.tukaani
+
+
+ org.xerial
+ ${shaded.prefix}.org.xerial
+
+
+ org.znerd
+ ${shaded.prefix}.org.znerd
+
+
+ org.aopalliance
+ ${shaded.prefix}.org.aopalliance
+
+
+ org.fusesource
+ ${shaded.prefix}.org.fusesource
+
+
+ org.iq80
+ ${shaded.prefix}.org.iq80
+
+
+ org.jamon
+ ${shaded.prefix}.org.jamon
+
+
+ org.jets3t
+ ${shaded.prefix}.org.jets3t
+
+
+
+ contribs.mx
+ ${shaded.prefix}.contribs.mx
+
+
+ org.objectweb
+ ${shaded.prefix}.org.objectweb
+
+
+
+ org.apache.avro
+ ${shaded.prefix}.org.apache.avro
+
+
+ org.apache.curator
+ ${shaded.prefix}.org.apache.curator
+
+
+ org.apache.directory
+ ${shaded.prefix}.org.apache.directory
+
+
+ org.apache.http
+ ${shaded.prefix}.org.apache.http
+
+
+ org.apache.jasper
+ ${shaded.prefix}.org.apache.jasper
+
+
+ org.apache.jute
+ ${shaded.prefix}.org.apache.jute
+
+
+ org.apache.kerby
+ ${shaded.prefix}.org.apache.kerby
+
+
+ org.apache.taglibs
+ ${shaded.prefix}.org.apache.taglibs
+
+
+ org.apache.zookeeper
+ ${shaded.prefix}.org.apache.zookeeper
+
+
+
+ org.apache.commons.validator
+ ${shaded.prefix}.org.apache.commons.validator
+
+
+ org.apache.commons.beanutils
+ ${shaded.prefix}.org.apache.commons.beanutils
+
+
+ org.apache.commons.cli
+ ${shaded.prefix}.org.apache.commons.cli
+
+
+ org.apache.commons.collections
+ ${shaded.prefix}.org.apache.commons.collections
+
+
+ org.apache.commons.configuration
+ ${shaded.prefix}.org.apache.commons.configuration
+
+
+ org.apache.commons.crypto
+ ${shaded.prefix}.org.apache.commons.crypto
+
+
+ org.apache.commons.csv
+ ${shaded.prefix}.org.apache.commons.csv
+
+
+ org.apache.commons.daemon
+ ${shaded.prefix}.org.apache.commons.daemon
+
+
+ org.apache.commons.io
+ ${shaded.prefix}.org.apache.commons.io
+
+
+ org.apache.commons.math
+ ${shaded.prefix}.org.apache.commons.math
+
+
+ org.apache.commons.math3
+ ${shaded.prefix}.org.apache.commons.math3
+
+
+ org.apache.commons.net
+ ${shaded.prefix}.org.apache.commons.net
+
+
+ org.apache.commons.lang
+ ${shaded.prefix}.org.apache.commons.lang
+
+
+ org.apache.commons.lang3
+ ${shaded.prefix}.org.apache.commons.lang3
+
+
+ org.apache.commons.el
+ ${shaded.prefix}.org.apache.commons.el
+
+
+ org.apache.commons.httpclient
+ ${shaded.prefix}.org.apache.commons.httpclient
+
+
+ org.apache.commons.compress
+ ${shaded.prefix}.org.apache.commons.compress
+
+
+ org.apache.commons.digester
+ ${shaded.prefix}.org.apache.commons.digester
+
+
+ org.apache.commons.codec
+ ${shaded.prefix}.org.apache.commons.codec
+
+
+ org.apache.commons.text
+ ${shaded.prefix}.org.apache.commons.text
+
-
-
- org.xbill
- ${shaded.prefix}.org.xbill
-
+
+
+ net/
+ ${shaded.prefix}.net.
+
-
-
- org.jboss.netty
- ${shaded.prefix}.org.jboss.netty
-
-
- io.netty
- ${shaded.prefix}.io.netty
-
-
-
-
- okio
- ${shaded.prefix}.okio
-
-
-
-
- org.checkerframework
- ${shaded.prefix}.org.checkerframework
-
-
- org.codehaus
- ${shaded.prefix}.org.codehaus
-
-
- org.eclipse
- ${shaded.prefix}.org.eclipse
-
-
- org.ehcache
- ${shaded.prefix}.org.ehcache
-
-
- org.jcodings
- ${shaded.prefix}.org.jcodings
-
-
- org.joni
- ${shaded.prefix}.org.joni
-
-
- org.mortbay
- ${shaded.prefix}.org.mortbay
-
-
- org.nustaq
- ${shaded.prefix}.org.nustaq
-
-
- org.terracotta
- ${shaded.prefix}.org.terracotta
-
-
- org.tukaani
- ${shaded.prefix}.org.tukaani
-
-
- org.xerial
- ${shaded.prefix}.org.xerial
-
-
- org.znerd
- ${shaded.prefix}.org.znerd
-
-
- org.aopalliance
- ${shaded.prefix}.org.aopalliance
-
-
- org.fusesource
- ${shaded.prefix}.org.fusesource
-
-
- org.iq80
- ${shaded.prefix}.org.iq80
-
-
- org.jamon
- ${shaded.prefix}.org.jamon
-
-
- org.jets3t
- ${shaded.prefix}.org.jets3t
-
-
-
- contribs.mx
- ${shaded.prefix}.contribs.mx
-
-
- org.objectweb
- ${shaded.prefix}.org.objectweb
-
-
-
-
-
- org.apache.avro
- ${shaded.prefix}.org.apache.avro
-
-
- org.apache.curator
- ${shaded.prefix}.org.apache.curator
-
-
- org.apache.directory
- ${shaded.prefix}.org.apache.directory
-
-
- org.apache.http
- ${shaded.prefix}.org.apache.http
-
-
- org.apache.jasper
- ${shaded.prefix}.org.apache.jasper
-
-
- org.apache.jute
- ${shaded.prefix}.org.apache.jute
-
-
- org.apache.kerby
- ${shaded.prefix}.org.apache.kerby
-
-
- org.apache.taglibs
- ${shaded.prefix}.org.apache.taglibs
-
-
- org.apache.zookeeper
- ${shaded.prefix}.org.apache.zookeeper
-
-
-
-
- org.apache.commons.validator
- ${shaded.prefix}.org.apache.commons.validator
-
-
- org.apache.commons.beanutils
- ${shaded.prefix}.org.apache.commons.beanutils
-
-
- org.apache.commons.cli
- ${shaded.prefix}.org.apache.commons.cli
-
-
- org.apache.commons.collections
- ${shaded.prefix}.org.apache.commons.collections
-
-
- org.apache.commons.configuration
- ${shaded.prefix}.org.apache.commons.configuration
-
-
- org.apache.commons.crypto
- ${shaded.prefix}.org.apache.commons.crypto
-
-
- org.apache.commons.csv
- ${shaded.prefix}.org.apache.commons.csv
-
-
- org.apache.commons.daemon
- ${shaded.prefix}.org.apache.commons.daemon
-
-
- org.apache.commons.io
- ${shaded.prefix}.org.apache.commons.io
-
-
- org.apache.commons.math
- ${shaded.prefix}.org.apache.commons.math
-
-
- org.apache.commons.math3
- ${shaded.prefix}.org.apache.commons.math3
-
-
- org.apache.commons.net
- ${shaded.prefix}.org.apache.commons.net
-
-
- org.apache.commons.lang
- ${shaded.prefix}.org.apache.commons.lang
-
-
- org.apache.commons.lang3
- ${shaded.prefix}.org.apache.commons.lang3
-
-
- org.apache.commons.el
- ${shaded.prefix}.org.apache.commons.el
-
-
- org.apache.commons.httpclient
- ${shaded.prefix}.org.apache.commons.httpclient
-
-
- org.apache.commons.compress
- ${shaded.prefix}.org.apache.commons.compress
-
-
- org.apache.commons.digester
- ${shaded.prefix}.org.apache.commons.digester
-
-
- org.apache.commons.codec
- ${shaded.prefix}.org.apache.commons.codec
-
-
- org.apache.commons.text
- ${shaded.prefix}.org.apache.commons.text
-
-
-
-
- net/
- ${shaded.prefix}.net.
-
-
-
-
+
+
-
-
- LICENSE.txt
- ASL2.0
+
+
+ LICENSE.txt
+ ASL2.0
- overview.html
-
-
+ overview.html
+
+
-
- false
- ${project.name}
-
-
-
-
-
+
+ false
+ ${project.name}
+
+
+
+
+
-
- dnsjava:dnsjava
-
- dig*
- jnamed*
- lookup*
- update*
-
-
-
+
+ dnsjava:dnsjava
+
+ dig*
+ jnamed*
+ lookup*
+ update*
+
+
+
- org.eclipse.jetty.orbit:javax.servlet.jsp.jstl
-
- META-INF/ECLIPSEF.SF
- META-INF/ECLIPSEF.RSA
-
-
-
-
- commons-beanutils:commons-beanutils-core
-
- org/apache/commons/collections/*.class
-
-
-
-
- org.apache.hadoop:hadoop-yarn-common
-
- webapps/*
- webapps/**/*
-
-
-
- *:*
-
-
- *.proto
- **/*.proto
-
- LICENSE
- NOTICE
-
-
-
-
- org.apache.commons:commons-math3
-
- assets/org/apache/commons/math3/**/*
-
-
-
-
- org.apache.hadoop:*
-
- mapred-default.xml.orig
-
-
-
-
- org.eclipse.jetty:*
-
- about.html
- jetty-dir.css
-
-
-
- org.apache.kerby:*
-
- krb5-template.conf
- krb5_udp-template.conf
- ccache.txt
- keytab.txt
-
-
-
-
-
-
-
-
-
-
+ org.eclipse.jetty.orbit:javax.servlet.jsp.jstl
+
+ META-INF/ECLIPSEF.SF
+ META-INF/ECLIPSEF.RSA
+
+
+
+
+ commons-beanutils:commons-beanutils-core
+
+ org/apache/commons/collections/*.class
+
+
+
+
+ org.apache.hadoop:hadoop-yarn-common
+
+ webapps/*
+ webapps/**/*
+
+
+
+ *:*
+
+
+ *.proto
+ **/*.proto
+
+ LICENSE
+ NOTICE
+
+
+
+
+ org.apache.commons:commons-math3
+
+ assets/org/apache/commons/math3/**/*
+
+
+
+
+ org.apache.hadoop:*
+
+ mapred-default.xml.orig
+
+
+
+
+ org.eclipse.jetty:*
+
+ about.html
+ jetty-dir.css
+
+
+
+ org.apache.kerby:*
+
+ krb5-template.conf
+ krb5_udp-template.conf
+ ccache.txt
+ keytab.txt
+
+
+
+
+
+
+
+
+
+
diff --git a/hbase-shell/pom.xml b/hbase-shell/pom.xml
index b6d2e30388a8..571da04530b0 100644
--- a/hbase-shell/pom.xml
+++ b/hbase-shell/pom.xml
@@ -192,13 +192,18 @@
test
- org.slf4j
- slf4j-log4j12
+ org.apache.logging.log4j
+ log4j-api
+ test
+
+
+ org.apache.logging.log4j
+ log4j-core
test
- log4j
- log4j
+ org.apache.logging.log4j
+ log4j-slf4j-impl
test
diff --git a/hbase-testing-util/pom.xml b/hbase-testing-util/pom.xml
index 6be2bfb6c394..e7dc42a68454 100644
--- a/hbase-testing-util/pom.xml
+++ b/hbase-testing-util/pom.xml
@@ -1,5 +1,7 @@
-
+
- 4.0.0
-
- hbase-build-configuration
- org.apache.hbase
- 3.0.0-SNAPSHOT
- ../hbase-build-configuration
-
- hbase-testing-util
- Apache HBase - Testing Util
- HBase Testing Utilities.
-
+ 4.0.0
+
+ hbase-build-configuration
+ org.apache.hbase
+ 3.0.0-SNAPSHOT
+ ../hbase-build-configuration
+
+ hbase-testing-util
+ Apache HBase - Testing Util
+ HBase Testing Utilities.
+
-
- org.apache.hbase
- hbase-logging
- test-jar
- test
-
-
- org.apache.hbase
- hbase-common
- jar
- compile
-
-
- org.apache.hbase
- hbase-common
- test-jar
- compile
-
-
- org.apache.hbase
- hbase-annotations
- test-jar
- compile
-
-
- jdk.tools
- jdk.tools
-
-
-
-
- org.apache.hbase
- hbase-client
- jar
- compile
-
-
- org.apache.hbase
- hbase-zookeeper
- jar
- compile
-
-
- org.apache.hbase
- hbase-zookeeper
- test-jar
- compile
-
-
- org.apache.hbase
- hbase-server
- jar
- compile
-
-
- org.apache.hbase
- hbase-server
- test-jar
- compile
-
-
- org.apache.hbase
- hbase-asyncfs
- test-jar
- compile
-
-
- org.apache.hbase
- hbase-hadoop-compat
- jar
- compile
-
-
- org.apache.hbase
- hbase-hadoop-compat
- test-jar
- compile
-
-
- org.slf4j
- jcl-over-slf4j
- test
-
-
- org.slf4j
- jul-to-slf4j
- test
-
-
- org.slf4j
- slf4j-log4j12
- test
-
-
- log4j
- log4j
- test
-
-
+
+ org.apache.hbase
+ hbase-logging
+ test-jar
+ test
+
+
+ org.apache.hbase
+ hbase-common
+ jar
+ compile
+
+
+ org.apache.hbase
+ hbase-common
+ test-jar
+ compile
+
+
+ org.apache.hbase
+ hbase-annotations
+ test-jar
+ compile
+
+
+ jdk.tools
+ jdk.tools
+
+
+
+
+ org.apache.hbase
+ hbase-client
+ jar
+ compile
+
+
+ org.apache.hbase
+ hbase-zookeeper
+ jar
+ compile
+
+
+ org.apache.hbase
+ hbase-zookeeper
+ test-jar
+ compile
+
+
+ org.apache.hbase
+ hbase-server
+ jar
+ compile
+
+
+ org.apache.hbase
+ hbase-server
+ test-jar
+ compile
+
+
+ org.apache.hbase
+ hbase-asyncfs
+ test-jar
+ compile
+
+
+ org.apache.hbase
+ hbase-hadoop-compat
+ jar
+ compile
+
+
+ org.apache.hbase
+ hbase-hadoop-compat
+ test-jar
+ compile
+
+
+ org.slf4j
+ jcl-over-slf4j
+ test
+
+
+ org.slf4j
+ jul-to-slf4j
+ test
+
+
+ org.apache.logging.log4j
+ log4j-api
+ test
+
+
+ org.apache.logging.log4j
+ log4j-core
+ test
+
+
+ org.apache.logging.log4j
+ log4j-slf4j-impl
+ test
+
+
-
-
-
-
-
- hadoop-3.0
-
- !hadoop.profile
-
-
-
- org.apache.hadoop
- hadoop-common
-
-
- javax.xml.bind
- jaxb-api
-
-
- javax.ws.rs
- jsr311-api
-
-
-
-
- org.apache.hadoop
- hadoop-minicluster
- compile
-
-
- com.google.guava
- guava
-
-
- javax.ws.rs
- jsr311-api
-
-
-
-
- org.apache.hadoop
- hadoop-minikdc
-
-
-
-
+
+
+
+
+
+ hadoop-3.0
+
+
+ !hadoop.profile
+
+
+
+
+ org.apache.hadoop
+ hadoop-common
+
+
+ javax.xml.bind
+ jaxb-api
+
+
+ javax.ws.rs
+ jsr311-api
+
+
+
+
+ org.apache.hadoop
+ hadoop-minicluster
+ compile
+
+
+ com.google.guava
+ guava
+
+
+ javax.ws.rs
+ jsr311-api
+
+
+
+
+ org.apache.hadoop
+ hadoop-minikdc
+
+
+
+
diff --git a/hbase-thrift/pom.xml b/hbase-thrift/pom.xml
index a3e88e50d089..376b7e8f8f23 100644
--- a/hbase-thrift/pom.xml
+++ b/hbase-thrift/pom.xml
@@ -269,13 +269,18 @@
test
- org.slf4j
- slf4j-log4j12
+ org.apache.logging.log4j
+ log4j-api
+ test
+
+
+ org.apache.logging.log4j
+ log4j-core
test
- log4j
- log4j
+ org.apache.logging.log4j
+ log4j-slf4j-impl
test
diff --git a/hbase-zookeeper/pom.xml b/hbase-zookeeper/pom.xml
index 966ef5b14324..6270d0ebd9a5 100644
--- a/hbase-zookeeper/pom.xml
+++ b/hbase-zookeeper/pom.xml
@@ -166,13 +166,18 @@
test
- org.slf4j
- slf4j-log4j12
+ org.apache.logging.log4j
+ log4j-api
+ test
+
+
+ org.apache.logging.log4j
+ log4j-core
test
- log4j
- log4j
+ org.apache.logging.log4j
+ log4j-slf4j-impl
test
diff --git a/pom.xml b/pom.xml
index 7814b66e46b8..592df510ee65 100755
--- a/pom.xml
+++ b/pom.xml
@@ -832,7 +832,7 @@
- log4j.properties
+ log4j2.xml
@@ -1144,6 +1144,43 @@
+
+ banned-log4j
+
+ enforce
+
+
+
+
+
+ log4j:log4j
+
+
+ We do not allow log4j dependencies as now we use log4j2
+
+ true
+
+
+
+
+
+ banned-slf4j-log4j12
+
+ enforce
+
+
+
+
+
+ org.slf4j:slf4j-log4j12
+
+
+ We do not allow slf4j-log4j12 dependency as now we use log4j-slf4j-impl
+
+
+
+
+
check-aggregate-license
@@ -1193,16 +1230,17 @@
Use SLF4j for logging
org.apache.commons.logging.**
+ org.apache.log4j.**
false
512
- Do not use log4j directly in code, see Log4jUtils in hbase-logging for more details.
+ Do not use log4j2 directly in code, see Log4jUtils in hbase-logging for more details.
- org.apache.log4j.**
+ org.apache.logging.log4j.**
@@ -1553,7 +1591,7 @@
4.13
1.3
4.2.0-incubating
- 1.2.17
+ 2.13.2
2.28.2
2.5.0
@@ -1951,8 +1989,8 @@
org.slf4j
slf4j-api
${slf4j.version}
-
- org.slf4j
- slf4j-log4j12
- ${slf4j.version}
-
org.slf4j
jcl-over-slf4j
@@ -1985,9 +2019,19 @@
${slf4j.version}
- log4j
- log4j
- ${log4j.version}
+ org.apache.logging.log4j
+ log4j-api
+ ${log4j2.version}
+
+
+ org.apache.logging.log4j
+ log4j-core
+ ${log4j2.version}
+
+
+ org.apache.logging.log4j
+ log4j-slf4j-impl
+ ${log4j2.version}
@@ -1995,8 +2039,6 @@
avro
${avro.version}
-
com.github.ben-manes.caffeine
caffeine
@@ -2757,9 +2799,9 @@
slf4j-log4j12
- log4j
- log4j
-
+ log4j
+ log4j
+
@@ -2837,9 +2879,9 @@
slf4j-log4j12
- log4j
- log4j
-
+ log4j
+ log4j
+
@@ -2886,9 +2928,9 @@
slf4j-log4j12
- log4j
- log4j
-
+ log4j
+ log4j
+
@@ -2923,9 +2965,9 @@
slf4j-log4j12
- log4j
- log4j
-
+ log4j
+ log4j
+
@@ -2986,9 +3028,9 @@
slf4j-log4j12
- log4j
- log4j
-
+ log4j
+ log4j
+
@@ -3015,6 +3057,14 @@
com.google.code.findbugs
jsr305
+
+ org.slf4j
+ slf4j-log4j12
+
+
+ log4j
+ log4j
+
@@ -3075,9 +3125,9 @@
slf4j-log4j12
- log4j
- log4j
-
+ log4j
+ log4j
+
@@ -3091,9 +3141,9 @@
slf4j-log4j12
- log4j
- log4j
-
+ log4j
+ log4j
+