From a31bb1ad14dad53d91b4c87c4c18fa0a235abb5a Mon Sep 17 00:00:00 2001 From: Duo Zhang Date: Sat, 20 Mar 2021 09:21:25 +0800 Subject: [PATCH] HBASE-26802 Backport the log4j2 changes to branch-2 --- bin/hbase | 20 +- bin/hbase-daemon.sh | 16 +- bin/hbase.cmd | 2 + conf/hbase-env.cmd | 5 +- conf/hbase-env.sh | 4 +- conf/log4j-hbtop.properties | 27 - conf/log4j.properties | 139 --- conf/log4j2-hbtop.properties | 35 + conf/log4j2.properties | 137 +++ hbase-archetypes/hbase-client-project/pom.xml | 18 +- .../src/main/resources/log4j.properties | 121 -- .../src/main/resources/log4j2.properties | 137 +++ .../hbase-shaded-client-project/pom.xml | 18 +- .../src/main/resources/log4j.properties | 121 -- .../src/main/resources/log4j2.properties | 137 +++ hbase-assembly/pom.xml | 16 +- hbase-assembly/src/main/assembly/client.xml | 10 + .../src/main/assembly/hadoop-three-compat.xml | 12 +- hbase-asyncfs/pom.xml | 18 +- .../hbase/io/asyncfs/AsyncFSTestBase.java | 5 - hbase-balancer/pom.xml | 168 +++ hbase-client/pom.xml | 13 +- .../hbase/ipc/TestFailedServersLog.java | 67 +- .../security/TestHBaseSaslRpcClient.java | 14 +- hbase-common/pom.xml | 13 +- .../hadoop/hbase/logging/TestJul2Slf4j.java | 48 +- .../hadoop/hbase/logging/TestLog4jUtils.java | 39 +- .../hbase-compression-aircompressor/pom.xml | 13 +- .../hbase-compression-lz4/pom.xml | 13 +- .../hbase-compression-snappy/pom.xml | 13 +- .../hbase-compression-xz/pom.xml | 13 +- .../hbase-compression-zstd/pom.xml | 13 +- hbase-endpoint/pom.xml | 18 +- hbase-examples/pom.xml | 18 +- hbase-hadoop-compat/pom.xml | 255 ++-- hbase-hadoop2-compat/pom.xml | 13 +- hbase-hbtop/pom.xml | 13 +- hbase-http/pom.xml | 13 +- .../hadoop/hbase/http/log/LogLevel.java | 4 +- .../hadoop/hbase/http/log/TestLogLevel.java | 213 ++-- hbase-it/pom.xml | 18 +- hbase-logging/pom.xml | 23 +- .../hadoop/hbase/AsyncConsoleAppender.java | 46 - .../hbase/logging/InternalLog4jUtils.java | 58 +- .../java/org/apache/log4j/FileAppender.java | 288 +++++ .../src/test/resources/log4j.properties | 68 -- .../src/test/resources/log4j2.properties | 68 ++ hbase-mapreduce/pom.xml | 18 +- .../hadoop/hbase/util/LoadTestTool.java | 17 +- hbase-metrics-api/pom.xml | 13 +- hbase-metrics/pom.xml | 13 +- hbase-procedure/pom.xml | 13 +- hbase-replication/pom.xml | 13 +- hbase-rest/pom.xml | 18 +- hbase-rsgroup/pom.xml | 18 +- hbase-server/pom.xml | 18 +- .../hadoop/hbase/HBaseTestingUtility.java | 1 - .../TestAsyncTableBatchRetryImmediately.java | 11 +- .../hbase/client/TestMultiRespectsLimits.java | 13 +- .../hadoop/hbase/ipc/TestProtoBufRpc.java | 7 +- .../hbase/ipc/TestRpcServerTraceLogging.java | 24 +- .../regionserver/TestMultiLogThreshold.java | 64 +- .../TestRegionServerReportForDuty.java | 34 +- .../PerfTestCompactionPolicies.java | 26 +- .../hadoop/hbase/tool/TestCanaryTool.java | 162 +-- .../hbase-shaded-check-invariants/pom.xml | 19 +- .../hbase-shaded-client-byo-hadoop/pom.xml | 294 ++--- hbase-shaded/hbase-shaded-client/pom.xml | 152 ++- hbase-shaded/hbase-shaded-mapreduce/pom.xml | 614 ++++------ .../hbase-shaded-testing-util-tester/pom.xml | 57 +- .../hbase-shaded-testing-util/pom.xml | 332 +++-- .../pom.xml | 15 +- hbase-shaded/pom.xml | 1073 +++++++++-------- hbase-shell/pom.xml | 18 +- hbase-testing-util/pom.xml | 531 ++++---- hbase-thrift/pom.xml | 18 +- hbase-zookeeper/pom.xml | 13 +- pom.xml | 116 +- 78 files changed, 3514 insertions(+), 2762 deletions(-) delete mode 100644 conf/log4j-hbtop.properties delete mode 100644 conf/log4j.properties create mode 100644 conf/log4j2-hbtop.properties create mode 100644 conf/log4j2.properties delete mode 100644 hbase-archetypes/hbase-client-project/src/main/resources/log4j.properties create mode 100644 hbase-archetypes/hbase-client-project/src/main/resources/log4j2.properties delete mode 100644 hbase-archetypes/hbase-shaded-client-project/src/main/resources/log4j.properties create mode 100644 hbase-archetypes/hbase-shaded-client-project/src/main/resources/log4j2.properties create mode 100644 hbase-balancer/pom.xml delete mode 100644 hbase-logging/src/main/java/org/apache/hadoop/hbase/AsyncConsoleAppender.java create mode 100644 hbase-logging/src/test/java/org/apache/log4j/FileAppender.java delete mode 100644 hbase-logging/src/test/resources/log4j.properties create mode 100644 hbase-logging/src/test/resources/log4j2.properties diff --git a/bin/hbase b/bin/hbase index 75aa81b7c3a9..c9083b5ea9db 100755 --- a/bin/hbase +++ b/bin/hbase @@ -305,10 +305,13 @@ else # make it easier to check for shaded/not later on. shaded_jar="" fi +# here we will add slf4j-api, commons-logging, jul-to-slf4j, jcl-over-slf4j +# to classpath, as they are all logging bridges. Only exclude log4j* so we +# will not actually log anything out. Add it later if necessary for f in "${HBASE_HOME}"/lib/client-facing-thirdparty/*.jar; do if [[ ! "${f}" =~ ^.*/htrace-core-3.*\.jar$ ]] && \ - [ "${f}" != "htrace-core.jar$" ] && \ - [[ ! "${f}" =~ ^.*/slf4j-log4j.*$ ]]; then + [[ "${f}" != "htrace-core.jar$" ]] && \ + [[ ! "${f}" =~ ^.*/log4j.*$ ]]; then CLASSPATH="${CLASSPATH}:${f}" fi done @@ -671,7 +674,7 @@ elif [ "$COMMAND" = "mapredcp" ] ; then for f in "${HBASE_HOME}"/lib/client-facing-thirdparty/*.jar; do if [[ ! "${f}" =~ ^.*/htrace-core-3.*\.jar$ ]] && \ [ "${f}" != "htrace-core.jar$" ] && \ - [[ ! "${f}" =~ ^.*/slf4j-log4j.*$ ]]; then + [[ ! "${f}" =~ ^.*/log4j.*$ ]]; then echo -n ":${f}" fi done @@ -720,8 +723,8 @@ elif [ "$COMMAND" = "hbtop" ] ; then done fi - if [ -f "${HBASE_HOME}/conf/log4j-hbtop.properties" ] ; then - HBASE_HBTOP_OPTS="${HBASE_HBTOP_OPTS} -Dlog4j.configuration=file:${HBASE_HOME}/conf/log4j-hbtop.properties" + if [ -f "${HBASE_HOME}/conf/log4j2-hbtop.properties" ] ; then + HBASE_HBTOP_OPTS="${HBASE_HBTOP_OPTS} -Dlog4j2.configurationFile=file:${HBASE_HOME}/conf/log4j2-hbtop.properties" fi HBASE_OPTS="${HBASE_OPTS} ${HBASE_HBTOP_OPTS}" else @@ -796,10 +799,6 @@ HBASE_OPTS="$HBASE_OPTS -Dhbase.log.file=$HBASE_LOGFILE" HBASE_OPTS="$HBASE_OPTS -Dhbase.home.dir=$HBASE_HOME" HBASE_OPTS="$HBASE_OPTS -Dhbase.id.str=$HBASE_IDENT_STRING" HBASE_OPTS="$HBASE_OPTS -Dhbase.root.logger=${HBASE_ROOT_LOGGER:-INFO,console}" -if [ "x$JAVA_LIBRARY_PATH" != "x" ]; then - HBASE_OPTS="$HBASE_OPTS -Djava.library.path=$JAVA_LIBRARY_PATH" - export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$JAVA_LIBRARY_PATH" -fi # Enable security logging on the master and regionserver only if [ "$COMMAND" = "master" ] || [ "$COMMAND" = "regionserver" ]; then @@ -810,10 +809,9 @@ fi HEAP_SETTINGS="$JAVA_HEAP_MAX $JAVA_OFFHEAP_MAX" # by now if we're running a command it means we need logging -for f in ${HBASE_HOME}/lib/client-facing-thirdparty/slf4j-log4j*.jar; do +for f in ${HBASE_HOME}/lib/client-facing-thirdparty/log4j*.jar; do if [ -f "${f}" ]; then CLASSPATH="${CLASSPATH}:${f}" - break fi done diff --git a/bin/hbase-daemon.sh b/bin/hbase-daemon.sh index 11c13eb52300..6fafab0ccec0 100755 --- a/bin/hbase-daemon.sh +++ b/bin/hbase-daemon.sh @@ -155,12 +155,20 @@ JAVA=$JAVA_HOME/bin/java export HBASE_LOG_PREFIX=hbase-$HBASE_IDENT_STRING-$command-$HOSTNAME export HBASE_LOGFILE=$HBASE_LOG_PREFIX.log -if [ -z "${HBASE_ROOT_LOGGER}" ]; then -export HBASE_ROOT_LOGGER=${HBASE_ROOT_LOGGER:-"INFO,RFA"} +if [ -z "${HBASE_ROOT_LOGGER_LEVEL}" ]; then +export HBASE_ROOT_LOGGER_LEVEL=${HBASE_ROOT_LOGGER_LEVEL:-"INFO"} fi -if [ -z "${HBASE_SECURITY_LOGGER}" ]; then -export HBASE_SECURITY_LOGGER=${HBASE_SECURITY_LOGGER:-"INFO,RFAS"} +if [ -z "${HBASE_ROOT_LOGGER_APPENDER}" ]; then +export HBASE_ROOT_LOGGER_APPENDER=${HBASE_ROOT_LOGGER_APPENDER:-"RFA"} +fi + +if [ -z "${HBASE_SECURITY_LOGGER_LEVEL}" ]; then +export HBASE_SECURITY_LOGGER_LEVEL=${HBASE_SECURITY_LOGGER_LEVEL:-"INFO"} +fi + +if [ -z "${HBASE_SECURITY_LOGGER_APPENDER}" ]; then +export HBASE_SECURITY_LOGGER_APPENDER=${HBASE_SECURITY_LOGGER_APPENDER:-"RFAS"} fi HBASE_LOGOUT=${HBASE_LOGOUT:-"$HBASE_LOG_DIR/$HBASE_LOG_PREFIX.out"} diff --git a/bin/hbase.cmd b/bin/hbase.cmd index 3b569099090f..240b63c7ec71 100644 --- a/bin/hbase.cmd +++ b/bin/hbase.cmd @@ -332,6 +332,7 @@ set HBASE_OPTS=%HBASE_OPTS% -Djava.util.logging.config.class="org.apache.hadoop. if not defined HBASE_ROOT_LOGGER ( set HBASE_ROOT_LOGGER=INFO,console ) + set HBASE_OPTS=%HBASE_OPTS% -Dhbase.root.logger="%HBASE_ROOT_LOGGER%" if defined JAVA_LIBRARY_PATH ( @@ -348,6 +349,7 @@ if not defined HBASE_SECURITY_LOGGER ( set HBASE_SECURITY_LOGGER=INFO,DRFAS ) ) + set HBASE_OPTS=%HBASE_OPTS% -Dhbase.security.logger="%HBASE_SECURITY_LOGGER%" set HEAP_SETTINGS=%JAVA_HEAP_MAX% %JAVA_OFFHEAP_MAX% diff --git a/conf/hbase-env.cmd b/conf/hbase-env.cmd index 4beebf646dee..84519d5606d2 100644 --- a/conf/hbase-env.cmd +++ b/conf/hbase-env.cmd @@ -32,7 +32,7 @@ @rem set HBASE_OFFHEAPSIZE=1000 @rem For example, to allocate 8G of offheap, to 8G: -@rem etHBASE_OFFHEAPSIZE=8G +@rem set HBASE_OFFHEAPSIZE=8G @rem Extra Java runtime options. @rem Below are what we set by default. May only work with SUN JVM. @@ -82,6 +82,9 @@ set HBASE_OPTS=%HBASE_OPTS% "-XX:+UseConcMarkSweepGC" "-Djava.net.preferIPv4Stac @rem Tell HBase whether it should manage it's own instance of ZooKeeper or not. @rem set HBASE_MANAGES_ZK=true +@rem Tell HBase the logger level and appenders +@rem set HBASE_ROOT_LOGGER=INFO,DRFA + @rem Uncomment to enable trace, you can change the options to use other exporters such as jaeger or @rem zipkin. See https://github.com/open-telemetry/opentelemetry-java-instrumentation on how to @rem configure exporters and other components through system properties. diff --git a/conf/hbase-env.sh b/conf/hbase-env.sh index ee71a0ab56dc..e049fd6d853d 100644 --- a/conf/hbase-env.sh +++ b/conf/hbase-env.sh @@ -126,11 +126,11 @@ # export HBASE_MANAGES_ZK=true # The default log rolling policy is RFA, where the log file is rolled as per the size defined for the -# RFA appender. Please refer to the log4j.properties file to see more details on this appender. +# RFA appender. Please refer to the log4j2.properties file to see more details on this appender. # In case one needs to do log rolling on a date change, one should set the environment property # HBASE_ROOT_LOGGER to ",DRFA". # For example: -# HBASE_ROOT_LOGGER=INFO,DRFA +# export HBASE_ROOT_LOGGER=INFO,DRFA # The reason for changing default to RFA is to avoid the boundary case of filling out disk space as # DRFA doesn't put any cap on the log size. Please refer to HBase-5655 for more context. diff --git a/conf/log4j-hbtop.properties b/conf/log4j-hbtop.properties deleted file mode 100644 index 4d68d79db70d..000000000000 --- a/conf/log4j-hbtop.properties +++ /dev/null @@ -1,27 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -log4j.rootLogger=WARN,console -log4j.threshold=WARN - -# console -log4j.appender.console=org.apache.log4j.ConsoleAppender -log4j.appender.console.target=System.err -log4j.appender.console.layout=org.apache.log4j.PatternLayout -log4j.appender.console.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %m%n - -# ZooKeeper will still put stuff at WARN -log4j.logger.org.apache.zookeeper=ERROR diff --git a/conf/log4j.properties b/conf/log4j.properties deleted file mode 100644 index 2282fa5d4a35..000000000000 --- a/conf/log4j.properties +++ /dev/null @@ -1,139 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Define some default values that can be overridden by system properties -hbase.root.logger=INFO,console -hbase.security.logger=INFO,console -hbase.log.dir=. -hbase.log.file=hbase.log -hbase.log.level=INFO - -# Define the root logger to the system property "hbase.root.logger". -log4j.rootLogger=${hbase.root.logger} - -# Logging Threshold -log4j.threshold=ALL - -# -# Daily Rolling File Appender -# -log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender -log4j.appender.DRFA.File=${hbase.log.dir}/${hbase.log.file} - -# Rollver at midnight -log4j.appender.DRFA.DatePattern=.yyyy-MM-dd - -# 30-day backup -#log4j.appender.DRFA.MaxBackupIndex=30 -log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout - -# Pattern format: Date LogLevel LoggerName LogMessage -log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %.1000m%n - -# Rolling File Appender properties -hbase.log.maxfilesize=256MB -hbase.log.maxbackupindex=20 - -# Rolling File Appender -log4j.appender.RFA=org.apache.log4j.RollingFileAppender -log4j.appender.RFA.File=${hbase.log.dir}/${hbase.log.file} - -log4j.appender.RFA.MaxFileSize=${hbase.log.maxfilesize} -log4j.appender.RFA.MaxBackupIndex=${hbase.log.maxbackupindex} - -log4j.appender.RFA.layout=org.apache.log4j.PatternLayout -log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %.1000m%n - -# -# Security audit appender -# -hbase.security.log.file=SecurityAuth.audit -hbase.security.log.maxfilesize=256MB -hbase.security.log.maxbackupindex=20 -log4j.appender.RFAS=org.apache.log4j.RollingFileAppender -log4j.appender.RFAS.File=${hbase.log.dir}/${hbase.security.log.file} -log4j.appender.RFAS.MaxFileSize=${hbase.security.log.maxfilesize} -log4j.appender.RFAS.MaxBackupIndex=${hbase.security.log.maxbackupindex} -log4j.appender.RFAS.layout=org.apache.log4j.PatternLayout -log4j.appender.RFAS.layout.ConversionPattern=%d{ISO8601} %p %c: %.1000m%n -log4j.category.SecurityLogger=${hbase.security.logger} -log4j.additivity.SecurityLogger=false -#log4j.logger.SecurityLogger.org.apache.hadoop.hbase.security.access.AccessController=TRACE -#log4j.logger.SecurityLogger.org.apache.hadoop.hbase.security.visibility.VisibilityController=TRACE - -# -# Null Appender -# -log4j.appender.NullAppender=org.apache.log4j.varia.NullAppender - -# -# console -# Add "console" to rootlogger above if you want to use this -# -log4j.appender.console=org.apache.log4j.ConsoleAppender -log4j.appender.console.target=System.err -log4j.appender.console.layout=org.apache.log4j.PatternLayout -log4j.appender.console.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %.1000m%n - -log4j.appender.asyncconsole=org.apache.hadoop.hbase.AsyncConsoleAppender -log4j.appender.asyncconsole.target=System.err - -# Custom Logging levels - -log4j.logger.org.apache.zookeeper=${hbase.log.level} -#log4j.logger.org.apache.hadoop.fs.FSNamesystem=DEBUG -log4j.logger.org.apache.hadoop.hbase=${hbase.log.level} -log4j.logger.org.apache.hadoop.hbase.META=${hbase.log.level} -# Make these two classes INFO-level. Make them DEBUG to see more zk debug. -log4j.logger.org.apache.hadoop.hbase.zookeeper.ZKUtil=${hbase.log.level} -log4j.logger.org.apache.hadoop.hbase.zookeeper.ZKWatcher=${hbase.log.level} -#log4j.logger.org.apache.hadoop.dfs=DEBUG -# Set this class to log INFO only otherwise its OTT -# Enable this to get detailed connection error/retry logging. -# log4j.logger.org.apache.hadoop.hbase.client.ConnectionImplementation=TRACE - - -# Uncomment this line to enable tracing on _every_ RPC call (this can be a lot of output) -#log4j.logger.org.apache.hadoop.ipc.HBaseServer.trace=DEBUG - -# Uncomment the below if you want to remove logging of client region caching' -# and scan of hbase:meta messages -# log4j.logger.org.apache.hadoop.hbase.client.ConnectionImplementation=INFO - -# EventCounter -# Add "EventCounter" to rootlogger if you want to use this -# Uncomment the line below to add EventCounter information -# log4j.appender.EventCounter=org.apache.hadoop.log.metrics.EventCounter - -# Prevent metrics subsystem start/stop messages (HBASE-17722) -log4j.logger.org.apache.hadoop.metrics2.impl.MetricsConfig=WARN -log4j.logger.org.apache.hadoop.metrics2.impl.MetricsSinkAdapter=WARN -log4j.logger.org.apache.hadoop.metrics2.impl.MetricsSystemImpl=WARN - -# Disable request log by default, you can enable this by changing the appender -log4j.category.http.requests=INFO,NullAppender -log4j.additivity.http.requests=false -# Replace the above with this configuration if you want an http access.log -#log4j.appender.accessRFA=org.apache.log4j.RollingFileAppender -#log4j.appender.accessRFA.File=/var/log/hbase/access.log -#log4j.appender.accessRFA.layout=org.apache.log4j.PatternLayout -#log4j.appender.accessRFA.layout.ConversionPattern=%m%n -#log4j.appender.accessRFA.MaxFileSize=200MB -#log4j.appender.accessRFA.MaxBackupIndex=10 -# route http.requests to the accessRFA appender -#log4j.logger.http.requests=INFO,accessRFA -# disable http.requests.* entries going up to the root logger -#log4j.additivity.http.requests=false diff --git a/conf/log4j2-hbtop.properties b/conf/log4j2-hbtop.properties new file mode 100644 index 000000000000..de2f97641da7 --- /dev/null +++ b/conf/log4j2-hbtop.properties @@ -0,0 +1,35 @@ +#/** +# * Licensed to the Apache Software Foundation (ASF) under one +# * or more contributor license agreements. See the NOTICE file +# * distributed with this work for additional information +# * regarding copyright ownership. The ASF licenses this file +# * to you under the Apache License, Version 2.0 (the +# * "License"); you may not use this file except in compliance +# * with the License. You may obtain a copy of the License at +# * +# * http://www.apache.org/licenses/LICENSE-2.0 +# * +# * Unless required by applicable law or agreed to in writing, software +# * distributed under the License is distributed on an "AS IS" BASIS, +# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# * See the License for the specific language governing permissions and +# * limitations under the License. +# */ + +status = warn +dest = err +name = PropertiesConfig + +# console +appender.console.type = Console +appender.console.target = SYSTEM_ERR +appender.console.name = console +appender.console.layout.type = PatternLayout +appender.console.layout.pattern = %d{ISO8601} %-5p [%t] %c{2}: %m%n + +rootLogger = WARN,console + +# ZooKeeper will still put stuff at WARN +logger.zookeeper.name = org.apache.zookeeper +logger.zookeeper.level = ERROR + diff --git a/conf/log4j2.properties b/conf/log4j2.properties new file mode 100644 index 000000000000..5ffcfda24176 --- /dev/null +++ b/conf/log4j2.properties @@ -0,0 +1,137 @@ +#/** +# * Licensed to the Apache Software Foundation (ASF) under one +# * or more contributor license agreements. See the NOTICE file +# * distributed with this work for additional information +# * regarding copyright ownership. The ASF licenses this file +# * to you under the Apache License, Version 2.0 (the +# * "License"); you may not use this file except in compliance +# * with the License. You may obtain a copy of the License at +# * +# * http://www.apache.org/licenses/LICENSE-2.0 +# * +# * Unless required by applicable law or agreed to in writing, software +# * distributed under the License is distributed on an "AS IS" BASIS, +# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# * See the License for the specific language governing permissions and +# * limitations under the License. +# */ + +status = warn +dest = err +name = PropertiesConfig + +# Console appender +appender.console.type = Console +appender.console.target = SYSTEM_ERR +appender.console.name = console +appender.console.layout.type = PatternLayout +appender.console.layout.pattern = %d{ISO8601} %-5p [%t] %c{2}: %.1000m%n + +# Daily Rolling File Appender +appender.DRFA.type = RollingFile +appender.DRFA.name = DRFA +appender.DRFA.fileName = ${sys:hbase.log.dir:-.}/${sys:hbase.log.file:-hbase.log} +appender.DRFA.filePattern = ${sys:hbase.log.dir:-.}/${sys:hbase.log.file:-hbase.log}.%d{yyyy-MM-dd} +appender.DRFA.createOnDemand = true +appender.DRFA.layout.type = PatternLayout +appender.DRFA.layout.pattern = %d{ISO8601} %-5p [%t] %c{2}: %.1000m%n +appender.DRFA.policies.type = Policies +appender.DRFA.policies.time.type = TimeBasedTriggeringPolicy +appender.DRFA.policies.time.interval = 1 +appender.DRFA.policies.time.modulate = true +appender.DRFA.policies.size.type = SizeBasedTriggeringPolicy +appender.DRFA.policies.size.size = ${sys:hbase.log.maxfilesize:-256MB} +appender.DRFA.strategy.type = DefaultRolloverStrategy +appender.DRFA.strategy.max = ${sys:hbase.log.maxbackupindex:-20} + +# Rolling File Appender +appender.RFA.type = RollingFile +appender.RFA.name = RFA +appender.RFA.fileName = ${sys:hbase.log.dir:-.}/${sys:hbase.log.file:-hbase.log} +appender.RFA.filePattern = ${sys:hbase.log.dir:-.}/${sys:hbase.log.file:-hbase.log}.%i +appender.RFA.createOnDemand = true +appender.RFA.layout.type = PatternLayout +appender.RFA.layout.pattern = %d{ISO8601} %-5p [%t] %c{2}: %.1000m%n +appender.RFA.policies.type = Policies +appender.RFA.policies.size.type = SizeBasedTriggeringPolicy +appender.RFA.policies.size.size = ${sys:hbase.log.maxfilesize:-256MB} +appender.RFA.strategy.type = DefaultRolloverStrategy +appender.RFA.strategy.max = ${sys:hbase.log.maxbackupindex:-20} + +# Security Audit Appender +appender.RFAS.type = RollingFile +appender.RFAS.name = RFAS +appender.RFAS.fileName = ${sys:hbase.log.dir:-.}/${sys:hbase.security.log.file:-SecurityAuth.audit} +appender.RFAS.filePattern = ${sys:hbase.log.dir:-.}/${sys:hbase.security.log.file:-SecurityAuth.audit}.%i +appender.RFAS.createOnDemand = true +appender.RFAS.layout.type = PatternLayout +appender.RFAS.layout.pattern = %d{ISO8601} %-5p [%t] %c{2}: %.1000m%n +appender.RFAS.policies.type = Policies +appender.RFAS.policies.size.type = SizeBasedTriggeringPolicy +appender.RFAS.policies.size.size = ${sys:hbase.security.log.maxfilesize:-256MB} +appender.RFAS.strategy.type = DefaultRolloverStrategy +appender.RFAS.strategy.max = ${sys:hbase.security.log.maxbackupindex:-20} + +# Http Access Log RFA, uncomment this if you want an http access.log +# appender.AccessRFA.type = RollingFile +# appender.AccessRFA.name = AccessRFA +# appender.AccessRFA.fileName = /var/log/hbase/access.log +# appender.AccessRFA.filePattern = /var/log/hbase/access.log.%i +# appender.AccessRFA.createOnDemand = true +# appender.AccessRFA.layout.type = PatternLayout +# appender.AccessRFA.layout.pattern = %m%n +# appender.AccessRFA.policies.type = Policies +# appender.AccessRFA.policies.size.type = SizeBasedTriggeringPolicy +# appender.AccessRFA.policies.size.size = 200MB +# appender.AccessRFA.strategy.type = DefaultRolloverStrategy +# appender.AccessRFA.strategy.max = 10 + +# Null Appender +appender.NullAppender.type = Null +appender.NullAppender.name = NullAppender + +rootLogger = ${sys:hbase.root.logger:-INFO,console} + +logger.SecurityLogger.name = SecurityLogger +logger.SecurityLogger = ${sys:hbase.security.logger:-INFO,console} +logger.SecurityLogger.additivity = false + +# Custom Logging levels +# logger.zookeeper.name = org.apache.zookeeper +# logger.zookeeper.level = ERROR + +# logger.FSNamesystem.name = org.apache.hadoop.fs.FSNamesystem +# logger.FSNamesystem.level = DEBUG + +# logger.hbase.name = org.apache.hadoop.hbase +# logger.hbase.level = DEBUG + +# logger.META.name = org.apache.hadoop.hbase.META +# logger.META.level = DEBUG + +# Make these two classes below DEBUG to see more zk debug. +# logger.ZKUtil.name = org.apache.hadoop.hbase.zookeeper.ZKUtil +# logger.ZKUtil.level = DEBUG + +# logger.ZKWatcher.name = org.apache.hadoop.hbase.zookeeper.ZKWatcher +# logger.ZKWatcher.level = DEBUG + +# logger.dfs.name = org.apache.hadoop.dfs +# logger.dfs.level = DEBUG + +# Prevent metrics subsystem start/stop messages (HBASE-17722) +logger.MetricsConfig.name = org.apache.hadoop.metrics2.impl.MetricsConfig +logger.MetricsConfig.level = WARN + +logger.MetricsSinkAdapte.name = org.apache.hadoop.metrics2.impl.MetricsSinkAdapter +logger.MetricsSinkAdapte.level = WARN + +logger.MetricsSystemImpl.name = org.apache.hadoop.metrics2.impl.MetricsSystemImpl +logger.MetricsSystemImpl.level = WARN + +# Disable request log by default, you can enable this by changing the appender +logger.http.name = http.requests +logger.http.additivity = false +logger.http = INFO,NullAppender +# Replace the above with this configuration if you want an http access.log +# logger.http = INFO,AccessRFA diff --git a/hbase-archetypes/hbase-client-project/pom.xml b/hbase-archetypes/hbase-client-project/pom.xml index e8eea1b8af36..40cfab27f123 100644 --- a/hbase-archetypes/hbase-client-project/pom.xml +++ b/hbase-archetypes/hbase-client-project/pom.xml @@ -64,13 +64,23 @@ runtime - org.slf4j - slf4j-reload4j + org.apache.logging.log4j + log4j-api + runtime + + + org.apache.logging.log4j + log4j-core + runtime + + + org.apache.logging.log4j + log4j-slf4j-impl runtime - ch.qos.reload4j - reload4j + org.apache.logging.log4j + log4j-1.2-api runtime diff --git a/hbase-archetypes/hbase-client-project/src/main/resources/log4j.properties b/hbase-archetypes/hbase-client-project/src/main/resources/log4j.properties deleted file mode 100644 index 0b01e57e6ea6..000000000000 --- a/hbase-archetypes/hbase-client-project/src/main/resources/log4j.properties +++ /dev/null @@ -1,121 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -# Define some default values that can be overridden by system properties -hbase.root.logger=INFO,console -hbase.security.logger=INFO,console -hbase.log.dir=. -hbase.log.file=hbase.log - -# Define the root logger to the system property "hbase.root.logger". -log4j.rootLogger=${hbase.root.logger} - -# Logging Threshold -log4j.threshold=ALL - -# -# Daily Rolling File Appender -# -log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender -log4j.appender.DRFA.File=${hbase.log.dir}/${hbase.log.file} - -# Rollver at midnight -log4j.appender.DRFA.DatePattern=.yyyy-MM-dd - -# 30-day backup -#log4j.appender.DRFA.MaxBackupIndex=30 -log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout - -# Pattern format: Date LogLevel LoggerName LogMessage -log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %m%n - -# Rolling File Appender properties -hbase.log.maxfilesize=256MB -hbase.log.maxbackupindex=20 - -# Rolling File Appender -log4j.appender.RFA=org.apache.log4j.RollingFileAppender -log4j.appender.RFA.File=${hbase.log.dir}/${hbase.log.file} - -log4j.appender.RFA.MaxFileSize=${hbase.log.maxfilesize} -log4j.appender.RFA.MaxBackupIndex=${hbase.log.maxbackupindex} - -log4j.appender.RFA.layout=org.apache.log4j.PatternLayout -log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %m%n - -# -# Security audit appender -# -hbase.security.log.file=SecurityAuth.audit -hbase.security.log.maxfilesize=256MB -hbase.security.log.maxbackupindex=20 -log4j.appender.RFAS=org.apache.log4j.RollingFileAppender -log4j.appender.RFAS.File=${hbase.log.dir}/${hbase.security.log.file} -log4j.appender.RFAS.MaxFileSize=${hbase.security.log.maxfilesize} -log4j.appender.RFAS.MaxBackupIndex=${hbase.security.log.maxbackupindex} -log4j.appender.RFAS.layout=org.apache.log4j.PatternLayout -log4j.appender.RFAS.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n -log4j.category.SecurityLogger=${hbase.security.logger} -log4j.additivity.SecurityLogger=false -#log4j.logger.SecurityLogger.org.apache.hadoop.hbase.security.access.AccessController=TRACE -#log4j.logger.SecurityLogger.org.apache.hadoop.hbase.security.visibility.VisibilityController=TRACE - -# -# Null Appender -# -log4j.appender.NullAppender=org.apache.log4j.varia.NullAppender - -# -# console -# Add "console" to rootlogger above if you want to use this -# -log4j.appender.console=org.apache.log4j.ConsoleAppender -log4j.appender.console.target=System.err -log4j.appender.console.layout=org.apache.log4j.PatternLayout -log4j.appender.console.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %m%n - -# Custom Logging levels - -log4j.logger.org.apache.zookeeper=INFO -#log4j.logger.org.apache.hadoop.fs.FSNamesystem=DEBUG -log4j.logger.org.apache.hadoop.hbase=INFO -# Make these two classes INFO-level. Make them DEBUG to see more zk debug. -log4j.logger.org.apache.hadoop.hbase.zookeeper.ZKUtil=INFO -log4j.logger.org.apache.hadoop.hbase.zookeeper.ZKWatcher=INFO -#log4j.logger.org.apache.hadoop.dfs=DEBUG -# Set this class to log INFO only otherwise its OTT -# Enable this to get detailed connection error/retry logging. -# log4j.logger.org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation=TRACE - - -# Uncomment this line to enable tracing on _every_ RPC call (this can be a lot of output) -#log4j.logger.org.apache.hadoop.ipc.HBaseServer.trace=DEBUG - -# Uncomment the below if you want to remove logging of client region caching' -# and scan of hbase:meta messages -# log4j.logger.org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation=INFO -# log4j.logger.org.apache.hadoop.hbase.client.MetaScanner=INFO - -# EventCounter -# Add "EventCounter" to rootlogger if you want to use this -# Uncomment the line below to add EventCounter information -# log4j.appender.EventCounter=org.apache.hadoop.log.metrics.EventCounter - -# Prevent metrics subsystem start/stop messages (HBASE-17722) -log4j.logger.org.apache.hadoop.metrics2.impl.MetricsConfig=WARN -log4j.logger.org.apache.hadoop.metrics2.impl.MetricsSinkAdapter=WARN -log4j.logger.org.apache.hadoop.metrics2.impl.MetricsSystemImpl=WARN diff --git a/hbase-archetypes/hbase-client-project/src/main/resources/log4j2.properties b/hbase-archetypes/hbase-client-project/src/main/resources/log4j2.properties new file mode 100644 index 000000000000..5ffcfda24176 --- /dev/null +++ b/hbase-archetypes/hbase-client-project/src/main/resources/log4j2.properties @@ -0,0 +1,137 @@ +#/** +# * Licensed to the Apache Software Foundation (ASF) under one +# * or more contributor license agreements. See the NOTICE file +# * distributed with this work for additional information +# * regarding copyright ownership. The ASF licenses this file +# * to you under the Apache License, Version 2.0 (the +# * "License"); you may not use this file except in compliance +# * with the License. You may obtain a copy of the License at +# * +# * http://www.apache.org/licenses/LICENSE-2.0 +# * +# * Unless required by applicable law or agreed to in writing, software +# * distributed under the License is distributed on an "AS IS" BASIS, +# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# * See the License for the specific language governing permissions and +# * limitations under the License. +# */ + +status = warn +dest = err +name = PropertiesConfig + +# Console appender +appender.console.type = Console +appender.console.target = SYSTEM_ERR +appender.console.name = console +appender.console.layout.type = PatternLayout +appender.console.layout.pattern = %d{ISO8601} %-5p [%t] %c{2}: %.1000m%n + +# Daily Rolling File Appender +appender.DRFA.type = RollingFile +appender.DRFA.name = DRFA +appender.DRFA.fileName = ${sys:hbase.log.dir:-.}/${sys:hbase.log.file:-hbase.log} +appender.DRFA.filePattern = ${sys:hbase.log.dir:-.}/${sys:hbase.log.file:-hbase.log}.%d{yyyy-MM-dd} +appender.DRFA.createOnDemand = true +appender.DRFA.layout.type = PatternLayout +appender.DRFA.layout.pattern = %d{ISO8601} %-5p [%t] %c{2}: %.1000m%n +appender.DRFA.policies.type = Policies +appender.DRFA.policies.time.type = TimeBasedTriggeringPolicy +appender.DRFA.policies.time.interval = 1 +appender.DRFA.policies.time.modulate = true +appender.DRFA.policies.size.type = SizeBasedTriggeringPolicy +appender.DRFA.policies.size.size = ${sys:hbase.log.maxfilesize:-256MB} +appender.DRFA.strategy.type = DefaultRolloverStrategy +appender.DRFA.strategy.max = ${sys:hbase.log.maxbackupindex:-20} + +# Rolling File Appender +appender.RFA.type = RollingFile +appender.RFA.name = RFA +appender.RFA.fileName = ${sys:hbase.log.dir:-.}/${sys:hbase.log.file:-hbase.log} +appender.RFA.filePattern = ${sys:hbase.log.dir:-.}/${sys:hbase.log.file:-hbase.log}.%i +appender.RFA.createOnDemand = true +appender.RFA.layout.type = PatternLayout +appender.RFA.layout.pattern = %d{ISO8601} %-5p [%t] %c{2}: %.1000m%n +appender.RFA.policies.type = Policies +appender.RFA.policies.size.type = SizeBasedTriggeringPolicy +appender.RFA.policies.size.size = ${sys:hbase.log.maxfilesize:-256MB} +appender.RFA.strategy.type = DefaultRolloverStrategy +appender.RFA.strategy.max = ${sys:hbase.log.maxbackupindex:-20} + +# Security Audit Appender +appender.RFAS.type = RollingFile +appender.RFAS.name = RFAS +appender.RFAS.fileName = ${sys:hbase.log.dir:-.}/${sys:hbase.security.log.file:-SecurityAuth.audit} +appender.RFAS.filePattern = ${sys:hbase.log.dir:-.}/${sys:hbase.security.log.file:-SecurityAuth.audit}.%i +appender.RFAS.createOnDemand = true +appender.RFAS.layout.type = PatternLayout +appender.RFAS.layout.pattern = %d{ISO8601} %-5p [%t] %c{2}: %.1000m%n +appender.RFAS.policies.type = Policies +appender.RFAS.policies.size.type = SizeBasedTriggeringPolicy +appender.RFAS.policies.size.size = ${sys:hbase.security.log.maxfilesize:-256MB} +appender.RFAS.strategy.type = DefaultRolloverStrategy +appender.RFAS.strategy.max = ${sys:hbase.security.log.maxbackupindex:-20} + +# Http Access Log RFA, uncomment this if you want an http access.log +# appender.AccessRFA.type = RollingFile +# appender.AccessRFA.name = AccessRFA +# appender.AccessRFA.fileName = /var/log/hbase/access.log +# appender.AccessRFA.filePattern = /var/log/hbase/access.log.%i +# appender.AccessRFA.createOnDemand = true +# appender.AccessRFA.layout.type = PatternLayout +# appender.AccessRFA.layout.pattern = %m%n +# appender.AccessRFA.policies.type = Policies +# appender.AccessRFA.policies.size.type = SizeBasedTriggeringPolicy +# appender.AccessRFA.policies.size.size = 200MB +# appender.AccessRFA.strategy.type = DefaultRolloverStrategy +# appender.AccessRFA.strategy.max = 10 + +# Null Appender +appender.NullAppender.type = Null +appender.NullAppender.name = NullAppender + +rootLogger = ${sys:hbase.root.logger:-INFO,console} + +logger.SecurityLogger.name = SecurityLogger +logger.SecurityLogger = ${sys:hbase.security.logger:-INFO,console} +logger.SecurityLogger.additivity = false + +# Custom Logging levels +# logger.zookeeper.name = org.apache.zookeeper +# logger.zookeeper.level = ERROR + +# logger.FSNamesystem.name = org.apache.hadoop.fs.FSNamesystem +# logger.FSNamesystem.level = DEBUG + +# logger.hbase.name = org.apache.hadoop.hbase +# logger.hbase.level = DEBUG + +# logger.META.name = org.apache.hadoop.hbase.META +# logger.META.level = DEBUG + +# Make these two classes below DEBUG to see more zk debug. +# logger.ZKUtil.name = org.apache.hadoop.hbase.zookeeper.ZKUtil +# logger.ZKUtil.level = DEBUG + +# logger.ZKWatcher.name = org.apache.hadoop.hbase.zookeeper.ZKWatcher +# logger.ZKWatcher.level = DEBUG + +# logger.dfs.name = org.apache.hadoop.dfs +# logger.dfs.level = DEBUG + +# Prevent metrics subsystem start/stop messages (HBASE-17722) +logger.MetricsConfig.name = org.apache.hadoop.metrics2.impl.MetricsConfig +logger.MetricsConfig.level = WARN + +logger.MetricsSinkAdapte.name = org.apache.hadoop.metrics2.impl.MetricsSinkAdapter +logger.MetricsSinkAdapte.level = WARN + +logger.MetricsSystemImpl.name = org.apache.hadoop.metrics2.impl.MetricsSystemImpl +logger.MetricsSystemImpl.level = WARN + +# Disable request log by default, you can enable this by changing the appender +logger.http.name = http.requests +logger.http.additivity = false +logger.http = INFO,NullAppender +# Replace the above with this configuration if you want an http access.log +# logger.http = INFO,AccessRFA diff --git a/hbase-archetypes/hbase-shaded-client-project/pom.xml b/hbase-archetypes/hbase-shaded-client-project/pom.xml index ad163b422aba..73305ea12de7 100644 --- a/hbase-archetypes/hbase-shaded-client-project/pom.xml +++ b/hbase-archetypes/hbase-shaded-client-project/pom.xml @@ -70,13 +70,23 @@ runtime - org.slf4j - slf4j-reload4j + org.apache.logging.log4j + log4j-api + runtime + + + org.apache.logging.log4j + log4j-core + runtime + + + org.apache.logging.log4j + log4j-slf4j-impl runtime - ch.qos.reload4j - reload4j + org.apache.logging.log4j + log4j-1.2-api runtime diff --git a/hbase-archetypes/hbase-shaded-client-project/src/main/resources/log4j.properties b/hbase-archetypes/hbase-shaded-client-project/src/main/resources/log4j.properties deleted file mode 100644 index 0b01e57e6ea6..000000000000 --- a/hbase-archetypes/hbase-shaded-client-project/src/main/resources/log4j.properties +++ /dev/null @@ -1,121 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -# Define some default values that can be overridden by system properties -hbase.root.logger=INFO,console -hbase.security.logger=INFO,console -hbase.log.dir=. -hbase.log.file=hbase.log - -# Define the root logger to the system property "hbase.root.logger". -log4j.rootLogger=${hbase.root.logger} - -# Logging Threshold -log4j.threshold=ALL - -# -# Daily Rolling File Appender -# -log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender -log4j.appender.DRFA.File=${hbase.log.dir}/${hbase.log.file} - -# Rollver at midnight -log4j.appender.DRFA.DatePattern=.yyyy-MM-dd - -# 30-day backup -#log4j.appender.DRFA.MaxBackupIndex=30 -log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout - -# Pattern format: Date LogLevel LoggerName LogMessage -log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %m%n - -# Rolling File Appender properties -hbase.log.maxfilesize=256MB -hbase.log.maxbackupindex=20 - -# Rolling File Appender -log4j.appender.RFA=org.apache.log4j.RollingFileAppender -log4j.appender.RFA.File=${hbase.log.dir}/${hbase.log.file} - -log4j.appender.RFA.MaxFileSize=${hbase.log.maxfilesize} -log4j.appender.RFA.MaxBackupIndex=${hbase.log.maxbackupindex} - -log4j.appender.RFA.layout=org.apache.log4j.PatternLayout -log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %m%n - -# -# Security audit appender -# -hbase.security.log.file=SecurityAuth.audit -hbase.security.log.maxfilesize=256MB -hbase.security.log.maxbackupindex=20 -log4j.appender.RFAS=org.apache.log4j.RollingFileAppender -log4j.appender.RFAS.File=${hbase.log.dir}/${hbase.security.log.file} -log4j.appender.RFAS.MaxFileSize=${hbase.security.log.maxfilesize} -log4j.appender.RFAS.MaxBackupIndex=${hbase.security.log.maxbackupindex} -log4j.appender.RFAS.layout=org.apache.log4j.PatternLayout -log4j.appender.RFAS.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n -log4j.category.SecurityLogger=${hbase.security.logger} -log4j.additivity.SecurityLogger=false -#log4j.logger.SecurityLogger.org.apache.hadoop.hbase.security.access.AccessController=TRACE -#log4j.logger.SecurityLogger.org.apache.hadoop.hbase.security.visibility.VisibilityController=TRACE - -# -# Null Appender -# -log4j.appender.NullAppender=org.apache.log4j.varia.NullAppender - -# -# console -# Add "console" to rootlogger above if you want to use this -# -log4j.appender.console=org.apache.log4j.ConsoleAppender -log4j.appender.console.target=System.err -log4j.appender.console.layout=org.apache.log4j.PatternLayout -log4j.appender.console.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %m%n - -# Custom Logging levels - -log4j.logger.org.apache.zookeeper=INFO -#log4j.logger.org.apache.hadoop.fs.FSNamesystem=DEBUG -log4j.logger.org.apache.hadoop.hbase=INFO -# Make these two classes INFO-level. Make them DEBUG to see more zk debug. -log4j.logger.org.apache.hadoop.hbase.zookeeper.ZKUtil=INFO -log4j.logger.org.apache.hadoop.hbase.zookeeper.ZKWatcher=INFO -#log4j.logger.org.apache.hadoop.dfs=DEBUG -# Set this class to log INFO only otherwise its OTT -# Enable this to get detailed connection error/retry logging. -# log4j.logger.org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation=TRACE - - -# Uncomment this line to enable tracing on _every_ RPC call (this can be a lot of output) -#log4j.logger.org.apache.hadoop.ipc.HBaseServer.trace=DEBUG - -# Uncomment the below if you want to remove logging of client region caching' -# and scan of hbase:meta messages -# log4j.logger.org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation=INFO -# log4j.logger.org.apache.hadoop.hbase.client.MetaScanner=INFO - -# EventCounter -# Add "EventCounter" to rootlogger if you want to use this -# Uncomment the line below to add EventCounter information -# log4j.appender.EventCounter=org.apache.hadoop.log.metrics.EventCounter - -# Prevent metrics subsystem start/stop messages (HBASE-17722) -log4j.logger.org.apache.hadoop.metrics2.impl.MetricsConfig=WARN -log4j.logger.org.apache.hadoop.metrics2.impl.MetricsSinkAdapter=WARN -log4j.logger.org.apache.hadoop.metrics2.impl.MetricsSystemImpl=WARN diff --git a/hbase-archetypes/hbase-shaded-client-project/src/main/resources/log4j2.properties b/hbase-archetypes/hbase-shaded-client-project/src/main/resources/log4j2.properties new file mode 100644 index 000000000000..5ffcfda24176 --- /dev/null +++ b/hbase-archetypes/hbase-shaded-client-project/src/main/resources/log4j2.properties @@ -0,0 +1,137 @@ +#/** +# * Licensed to the Apache Software Foundation (ASF) under one +# * or more contributor license agreements. See the NOTICE file +# * distributed with this work for additional information +# * regarding copyright ownership. The ASF licenses this file +# * to you under the Apache License, Version 2.0 (the +# * "License"); you may not use this file except in compliance +# * with the License. You may obtain a copy of the License at +# * +# * http://www.apache.org/licenses/LICENSE-2.0 +# * +# * Unless required by applicable law or agreed to in writing, software +# * distributed under the License is distributed on an "AS IS" BASIS, +# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# * See the License for the specific language governing permissions and +# * limitations under the License. +# */ + +status = warn +dest = err +name = PropertiesConfig + +# Console appender +appender.console.type = Console +appender.console.target = SYSTEM_ERR +appender.console.name = console +appender.console.layout.type = PatternLayout +appender.console.layout.pattern = %d{ISO8601} %-5p [%t] %c{2}: %.1000m%n + +# Daily Rolling File Appender +appender.DRFA.type = RollingFile +appender.DRFA.name = DRFA +appender.DRFA.fileName = ${sys:hbase.log.dir:-.}/${sys:hbase.log.file:-hbase.log} +appender.DRFA.filePattern = ${sys:hbase.log.dir:-.}/${sys:hbase.log.file:-hbase.log}.%d{yyyy-MM-dd} +appender.DRFA.createOnDemand = true +appender.DRFA.layout.type = PatternLayout +appender.DRFA.layout.pattern = %d{ISO8601} %-5p [%t] %c{2}: %.1000m%n +appender.DRFA.policies.type = Policies +appender.DRFA.policies.time.type = TimeBasedTriggeringPolicy +appender.DRFA.policies.time.interval = 1 +appender.DRFA.policies.time.modulate = true +appender.DRFA.policies.size.type = SizeBasedTriggeringPolicy +appender.DRFA.policies.size.size = ${sys:hbase.log.maxfilesize:-256MB} +appender.DRFA.strategy.type = DefaultRolloverStrategy +appender.DRFA.strategy.max = ${sys:hbase.log.maxbackupindex:-20} + +# Rolling File Appender +appender.RFA.type = RollingFile +appender.RFA.name = RFA +appender.RFA.fileName = ${sys:hbase.log.dir:-.}/${sys:hbase.log.file:-hbase.log} +appender.RFA.filePattern = ${sys:hbase.log.dir:-.}/${sys:hbase.log.file:-hbase.log}.%i +appender.RFA.createOnDemand = true +appender.RFA.layout.type = PatternLayout +appender.RFA.layout.pattern = %d{ISO8601} %-5p [%t] %c{2}: %.1000m%n +appender.RFA.policies.type = Policies +appender.RFA.policies.size.type = SizeBasedTriggeringPolicy +appender.RFA.policies.size.size = ${sys:hbase.log.maxfilesize:-256MB} +appender.RFA.strategy.type = DefaultRolloverStrategy +appender.RFA.strategy.max = ${sys:hbase.log.maxbackupindex:-20} + +# Security Audit Appender +appender.RFAS.type = RollingFile +appender.RFAS.name = RFAS +appender.RFAS.fileName = ${sys:hbase.log.dir:-.}/${sys:hbase.security.log.file:-SecurityAuth.audit} +appender.RFAS.filePattern = ${sys:hbase.log.dir:-.}/${sys:hbase.security.log.file:-SecurityAuth.audit}.%i +appender.RFAS.createOnDemand = true +appender.RFAS.layout.type = PatternLayout +appender.RFAS.layout.pattern = %d{ISO8601} %-5p [%t] %c{2}: %.1000m%n +appender.RFAS.policies.type = Policies +appender.RFAS.policies.size.type = SizeBasedTriggeringPolicy +appender.RFAS.policies.size.size = ${sys:hbase.security.log.maxfilesize:-256MB} +appender.RFAS.strategy.type = DefaultRolloverStrategy +appender.RFAS.strategy.max = ${sys:hbase.security.log.maxbackupindex:-20} + +# Http Access Log RFA, uncomment this if you want an http access.log +# appender.AccessRFA.type = RollingFile +# appender.AccessRFA.name = AccessRFA +# appender.AccessRFA.fileName = /var/log/hbase/access.log +# appender.AccessRFA.filePattern = /var/log/hbase/access.log.%i +# appender.AccessRFA.createOnDemand = true +# appender.AccessRFA.layout.type = PatternLayout +# appender.AccessRFA.layout.pattern = %m%n +# appender.AccessRFA.policies.type = Policies +# appender.AccessRFA.policies.size.type = SizeBasedTriggeringPolicy +# appender.AccessRFA.policies.size.size = 200MB +# appender.AccessRFA.strategy.type = DefaultRolloverStrategy +# appender.AccessRFA.strategy.max = 10 + +# Null Appender +appender.NullAppender.type = Null +appender.NullAppender.name = NullAppender + +rootLogger = ${sys:hbase.root.logger:-INFO,console} + +logger.SecurityLogger.name = SecurityLogger +logger.SecurityLogger = ${sys:hbase.security.logger:-INFO,console} +logger.SecurityLogger.additivity = false + +# Custom Logging levels +# logger.zookeeper.name = org.apache.zookeeper +# logger.zookeeper.level = ERROR + +# logger.FSNamesystem.name = org.apache.hadoop.fs.FSNamesystem +# logger.FSNamesystem.level = DEBUG + +# logger.hbase.name = org.apache.hadoop.hbase +# logger.hbase.level = DEBUG + +# logger.META.name = org.apache.hadoop.hbase.META +# logger.META.level = DEBUG + +# Make these two classes below DEBUG to see more zk debug. +# logger.ZKUtil.name = org.apache.hadoop.hbase.zookeeper.ZKUtil +# logger.ZKUtil.level = DEBUG + +# logger.ZKWatcher.name = org.apache.hadoop.hbase.zookeeper.ZKWatcher +# logger.ZKWatcher.level = DEBUG + +# logger.dfs.name = org.apache.hadoop.dfs +# logger.dfs.level = DEBUG + +# Prevent metrics subsystem start/stop messages (HBASE-17722) +logger.MetricsConfig.name = org.apache.hadoop.metrics2.impl.MetricsConfig +logger.MetricsConfig.level = WARN + +logger.MetricsSinkAdapte.name = org.apache.hadoop.metrics2.impl.MetricsSinkAdapter +logger.MetricsSinkAdapte.level = WARN + +logger.MetricsSystemImpl.name = org.apache.hadoop.metrics2.impl.MetricsSystemImpl +logger.MetricsSystemImpl.level = WARN + +# Disable request log by default, you can enable this by changing the appender +logger.http.name = http.requests +logger.http.additivity = false +logger.http = INFO,NullAppender +# Replace the above with this configuration if you want an http access.log +# logger.http = INFO,AccessRFA diff --git a/hbase-assembly/pom.xml b/hbase-assembly/pom.xml index e6898ea63f46..eee87f7b6a55 100644 --- a/hbase-assembly/pom.xml +++ b/hbase-assembly/pom.xml @@ -352,12 +352,16 @@ jul-to-slf4j - org.slf4j - slf4j-reload4j + org.apache.logging.log4j + log4j-api + + + org.apache.logging.log4j + log4j-core - ch.qos.reload4j - reload4j + org.apache.logging.log4j + log4j-slf4j-impl @@ -365,6 +369,10 @@ opentelemetry-javaagent all + + org.apache.logging.log4j + log4j-1.2-api + diff --git a/hbase-assembly/src/main/assembly/client.xml b/hbase-assembly/src/main/assembly/client.xml index 41aeea577eb1..9a7f55248610 100644 --- a/hbase-assembly/src/main/assembly/client.xml +++ b/hbase-assembly/src/main/assembly/client.xml @@ -62,12 +62,17 @@ org.apache.htrace:htrace-core4 org.apache.htrace:htrace-core org.apache.yetus:audience-annotations +<<<<<<< HEAD org.slf4j:slf4j-api org.slf4j:jcl-over-slf4j org.slf4j:jul-to-slf4j org.slf4j:slf4j-log4j12 org.slf4j:slf4j-reload4j io.opentelemetry.javaagent:* +======= + org.slf4j:* + org.apache.logging.log4j:* +>>>>>>> ba3610d097... HBASE-19577 Use log4j2 instead of log4j for logging (#1708) @@ -152,6 +157,7 @@ org.apache.htrace:htrace-core4 org.apache.htrace:htrace-core org.apache.yetus:audience-annotations +<<<<<<< HEAD org.slf4j:slf4j-api org.slf4j:jcl-over-slf4j org.slf4j:jul-to-slf4j @@ -163,6 +169,10 @@ lib/trace io.opentelemetry.javaagent:* +======= + org.slf4j:* + org.apache.logging.log4j:* +>>>>>>> ba3610d097... HBASE-19577 Use log4j2 instead of log4j for logging (#1708) diff --git a/hbase-assembly/src/main/assembly/hadoop-three-compat.xml b/hbase-assembly/src/main/assembly/hadoop-three-compat.xml index d24f58224277..d5e32ac9f6a4 100644 --- a/hbase-assembly/src/main/assembly/hadoop-three-compat.xml +++ b/hbase-assembly/src/main/assembly/hadoop-three-compat.xml @@ -49,11 +49,9 @@ org.apache.hbase:hbase-metrics org.apache.hbase:hbase-metrics-api org.apache.hbase:hbase-procedure - org.apache.hbase:hbase-protocol org.apache.hbase:hbase-protocol-shaded org.apache.hbase:hbase-replication org.apache.hbase:hbase-rest - org.apache.hbase:hbase-rsgroup org.apache.hbase:hbase-server org.apache.hbase:hbase-shell org.apache.hbase:hbase-testing-util @@ -111,10 +109,15 @@ org.apache.htrace:htrace-core4 org.apache.htrace:htrace-core org.apache.yetus:audience-annotations +<<<<<<< HEAD org.slf4j:slf4j-api org.slf4j:slf4j-log4j12 org.slf4j:slf4j-reload4j io.opentelemetry.javaagent:* +======= + org.slf4j:* + org.apache.logging.log4j:* +>>>>>>> ba3610d097... HBASE-19577 Use log4j2 instead of log4j for logging (#1708) @@ -211,11 +214,16 @@ org.apache.htrace:htrace-core4 org.apache.htrace:htrace-core org.apache.yetus:audience-annotations +<<<<<<< HEAD org.slf4j:slf4j-api org.slf4j:jcl-over-slf4j org.slf4j:jul-to-slf4j org.slf4j:slf4j-reload4j io.opentelemetry:* +======= + org.slf4j:* + org.apache.logging.log4j:* +>>>>>>> ba3610d097... HBASE-19577 Use log4j2 instead of log4j for logging (#1708) diff --git a/hbase-asyncfs/pom.xml b/hbase-asyncfs/pom.xml index 2cb984012b63..26cab77a20fc 100644 --- a/hbase-asyncfs/pom.xml +++ b/hbase-asyncfs/pom.xml @@ -149,13 +149,23 @@ test - org.slf4j - slf4j-reload4j + org.apache.logging.log4j + log4j-api + test + + + org.apache.logging.log4j + log4j-core + test + + + org.apache.logging.log4j + log4j-slf4j-impl test - ch.qos.reload4j - reload4j + org.apache.logging.log4j + log4j-1.2-api test diff --git a/hbase-asyncfs/src/test/java/org/apache/hadoop/hbase/io/asyncfs/AsyncFSTestBase.java b/hbase-asyncfs/src/test/java/org/apache/hadoop/hbase/io/asyncfs/AsyncFSTestBase.java index fc148e8de796..e1bc83ca684c 100644 --- a/hbase-asyncfs/src/test/java/org/apache/hadoop/hbase/io/asyncfs/AsyncFSTestBase.java +++ b/hbase-asyncfs/src/test/java/org/apache/hadoop/hbase/io/asyncfs/AsyncFSTestBase.java @@ -96,11 +96,6 @@ protected static void startMiniDFSCluster(int servers) throws IOException { createDirsAndSetProperties(); Configuration conf = UTIL.getConfiguration(); - // Error level to skip some warnings specific to the minicluster. See HBASE-4709 - org.apache.log4j.Logger.getLogger(org.apache.hadoop.metrics2.util.MBeans.class) - .setLevel(org.apache.log4j.Level.ERROR); - org.apache.log4j.Logger.getLogger(org.apache.hadoop.metrics2.impl.MetricsSystemImpl.class) - .setLevel(org.apache.log4j.Level.ERROR); CLUSTER = new MiniDFSCluster.Builder(conf).numDataNodes(servers).build(); CLUSTER.waitClusterUp(); diff --git a/hbase-balancer/pom.xml b/hbase-balancer/pom.xml new file mode 100644 index 000000000000..c321af556b16 --- /dev/null +++ b/hbase-balancer/pom.xml @@ -0,0 +1,168 @@ + + + + 4.0.0 + + hbase-build-configuration + org.apache.hbase + 3.0.0-SNAPSHOT + ../hbase-build-configuration + + + hbase-balancer + Apache HBase - Balancer + HBase Balancer Support + + + + + + + org.apache.maven.plugins + maven-source-plugin + + + + maven-assembly-plugin + + true + + + + net.revelc.code + warbucks-maven-plugin + + + org.apache.maven.plugins + maven-checkstyle-plugin + + true + + + + + + + + org.apache.hbase + hbase-common + test-jar + test + + + org.apache.hbase + hbase-annotations + test-jar + test + + + org.apache.hbase + hbase-common + + + org.apache.hbase + hbase-client + + + org.slf4j + slf4j-api + + + com.github.stephenc.findbugs + findbugs-annotations + compile + true + + + junit + junit + test + + + org.slf4j + jcl-over-slf4j + test + + + org.slf4j + jul-to-slf4j + test + + + org.apache.logging.log4j + log4j-api + test + + + org.apache.logging.log4j + log4j-core + test + + + org.apache.logging.log4j + log4j-slf4j-impl + test + + + + + + + hadoop-3.0 + + !hadoop.profile + + + + org.apache.hadoop + hadoop-common + + + + + eclipse-specific + + + m2e.version + + + + + + + + org.eclipse.m2e + lifecycle-mapping + + + + + + + + + + + + + diff --git a/hbase-client/pom.xml b/hbase-client/pom.xml index f47349787144..20b2ef7ceb55 100644 --- a/hbase-client/pom.xml +++ b/hbase-client/pom.xml @@ -177,13 +177,18 @@ test - org.slf4j - slf4j-reload4j + org.apache.logging.log4j + log4j-api + test + + + org.apache.logging.log4j + log4j-core test - ch.qos.reload4j - reload4j + org.apache.logging.log4j + log4j-slf4j-impl test diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestFailedServersLog.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestFailedServersLog.java index fa44022f8d09..dc94e91f4fde 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestFailedServersLog.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestFailedServersLog.java @@ -17,73 +17,82 @@ */ package org.apache.hadoop.hbase.ipc; -import static org.hamcrest.CoreMatchers.is; -import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.Assert.assertEquals; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; +import java.util.concurrent.atomic.AtomicReference; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.net.Address; import org.apache.hadoop.hbase.testclassification.ClientTests; import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.apache.log4j.Appender; -import org.apache.log4j.Level; -import org.apache.log4j.LogManager; -import org.apache.log4j.spi.LoggingEvent; import org.junit.After; import org.junit.Before; import org.junit.ClassRule; import org.junit.Test; import org.junit.experimental.categories.Category; -import org.junit.runner.RunWith; -import org.mockito.ArgumentCaptor; -import org.mockito.Captor; -import org.mockito.Mock; -import org.mockito.Mockito; -import org.mockito.runners.MockitoJUnitRunner; +import org.mockito.invocation.InvocationOnMock; +import org.mockito.stubbing.Answer; -@RunWith(MockitoJUnitRunner.class) @Category({ ClientTests.class, SmallTests.class }) public class TestFailedServersLog { @ClassRule public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestFailedServersLog.class); + HBaseClassTestRule.forClass(TestFailedServersLog.class); static final int TEST_PORT = 9999; - private Address addr; - @Mock - private Appender mockAppender; + private Address addr; - @Captor - private ArgumentCaptor captorLoggingEvent; + private org.apache.logging.log4j.core.Appender mockAppender; @Before public void setup() { - LogManager.getRootLogger().addAppender(mockAppender); + mockAppender = mock(org.apache.logging.log4j.core.Appender.class); + when(mockAppender.getName()).thenReturn("mockAppender"); + when(mockAppender.isStarted()).thenReturn(true); + ((org.apache.logging.log4j.core.Logger) org.apache.logging.log4j.LogManager + .getLogger(FailedServers.class)).addAppender(mockAppender); + } @After public void teardown() { - LogManager.getRootLogger().removeAppender(mockAppender); + ((org.apache.logging.log4j.core.Logger) org.apache.logging.log4j.LogManager + .getLogger(FailedServers.class)).removeAppender(mockAppender); } @Test public void testAddToFailedServersLogging() { - Throwable nullException = new NullPointerException(); + AtomicReference level = new AtomicReference<>(); + AtomicReference msg = new AtomicReference(); + doAnswer(new Answer() { + @Override + public Void answer(InvocationOnMock invocation) throws Throwable { + org.apache.logging.log4j.core.LogEvent logEvent = + invocation.getArgument(0, org.apache.logging.log4j.core.LogEvent.class); + level.set(logEvent.getLevel()); + msg.set(logEvent.getMessage().getFormattedMessage()); + return null; + } + }).when(mockAppender).append(any(org.apache.logging.log4j.core.LogEvent.class)); + + Throwable nullException = new NullPointerException(); FailedServers fs = new FailedServers(new Configuration()); addr = Address.fromParts("localhost", TEST_PORT); fs.addToFailedServers(addr, nullException); - Mockito.verify(mockAppender).doAppend((LoggingEvent) captorLoggingEvent.capture()); - LoggingEvent loggingEvent = (LoggingEvent) captorLoggingEvent.getValue(); - assertThat(loggingEvent.getLevel(), is(Level.DEBUG)); - assertEquals("Added failed server with address " + addr.toString() + " to list caused by " - + nullException.toString(), - loggingEvent.getRenderedMessage()); + verify(mockAppender, times(1)).append(any(org.apache.logging.log4j.core.LogEvent.class)); + assertEquals(org.apache.logging.log4j.Level.DEBUG, level.get()); + assertEquals("Added failed server with address " + addr.toString() + " to list caused by " + + nullException.toString(), msg.get()); } - } diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.java index 2252c215fa68..538a9b91c3c5 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.java @@ -30,7 +30,6 @@ import java.io.IOException; import java.net.InetAddress; import java.util.Map; - import javax.security.auth.callback.Callback; import javax.security.auth.callback.NameCallback; import javax.security.auth.callback.PasswordCallback; @@ -39,7 +38,6 @@ import javax.security.sasl.RealmCallback; import javax.security.sasl.Sasl; import javax.security.sasl.SaslClient; - import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseConfiguration; @@ -55,16 +53,15 @@ import org.apache.hadoop.io.WritableUtils; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.TokenIdentifier; -import org.apache.log4j.Level; -import org.apache.log4j.Logger; import org.junit.Assert; -import org.junit.BeforeClass; import org.junit.ClassRule; import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.ExpectedException; import org.mockito.Mockito; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hbase.thirdparty.com.google.common.base.Strings; @@ -83,17 +80,12 @@ public class TestHBaseSaslRpcClient { static final String DEFAULT_USER_NAME = "principal"; static final String DEFAULT_USER_PASSWORD = "password"; - private static final Logger LOG = Logger.getLogger(TestHBaseSaslRpcClient.class); + private static final Logger LOG = LoggerFactory.getLogger(TestHBaseSaslRpcClient.class); @Rule public ExpectedException exception = ExpectedException.none(); - @BeforeClass - public static void before() { - Logger.getRootLogger().setLevel(Level.DEBUG); - } - @Test public void testSaslClientUsesGivenRpcProtection() throws Exception { Token token = createTokenMockWithCredentials(DEFAULT_USER_NAME, diff --git a/hbase-common/pom.xml b/hbase-common/pom.xml index f983c4c143a5..ce242286f4a9 100644 --- a/hbase-common/pom.xml +++ b/hbase-common/pom.xml @@ -235,13 +235,18 @@ test - org.slf4j - slf4j-reload4j + org.apache.logging.log4j + log4j-api + test + + + org.apache.logging.log4j + log4j-core test - ch.qos.reload4j - reload4j + org.apache.logging.log4j + log4j-slf4j-impl test diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/logging/TestJul2Slf4j.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/logging/TestJul2Slf4j.java index cf654f583b89..f67ce616e2e2 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/logging/TestJul2Slf4j.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/logging/TestJul2Slf4j.java @@ -17,27 +17,26 @@ */ package org.apache.hadoop.hbase.logging; -import static org.hamcrest.CoreMatchers.is; -import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.Assert.assertEquals; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; import java.io.IOException; +import java.util.concurrent.atomic.AtomicReference; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.apache.log4j.Appender; -import org.apache.log4j.Level; -import org.apache.log4j.LogManager; -import org.apache.log4j.spi.LoggingEvent; import org.junit.After; import org.junit.Before; import org.junit.ClassRule; import org.junit.Test; import org.junit.experimental.categories.Category; -import org.mockito.ArgumentCaptor; +import org.mockito.invocation.InvocationOnMock; +import org.mockito.stubbing.Answer; /** * This should be in the hbase-logging module but the {@link HBaseClassTestRule} is in hbase-common @@ -56,27 +55,42 @@ public class TestJul2Slf4j { private String loggerName = getClass().getName(); - private Appender mockAppender; + private org.apache.logging.log4j.core.Appender mockAppender; @Before public void setUp() { - mockAppender = mock(Appender.class); - LogManager.getRootLogger().addAppender(mockAppender); + mockAppender = mock(org.apache.logging.log4j.core.Appender.class); + when(mockAppender.getName()).thenReturn("mockAppender"); + when(mockAppender.isStarted()).thenReturn(true); + ((org.apache.logging.log4j.core.Logger) org.apache.logging.log4j.LogManager + .getLogger(loggerName)).addAppender(mockAppender); } @After public void tearDown() { - LogManager.getRootLogger().removeAppender(mockAppender); + ((org.apache.logging.log4j.core.Logger) org.apache.logging.log4j.LogManager + .getLogger(loggerName)).removeAppender(mockAppender); } @Test public void test() throws IOException { + AtomicReference level = new AtomicReference<>(); + AtomicReference msg = new AtomicReference(); + doAnswer(new Answer() { + + @Override + public Void answer(InvocationOnMock invocation) throws Throwable { + org.apache.logging.log4j.core.LogEvent logEvent = + invocation.getArgument(0, org.apache.logging.log4j.core.LogEvent.class); + level.set(logEvent.getLevel()); + msg.set(logEvent.getMessage().getFormattedMessage()); + return null; + } + }).when(mockAppender).append(any(org.apache.logging.log4j.core.LogEvent.class)); java.util.logging.Logger logger = java.util.logging.Logger.getLogger(loggerName); logger.info(loggerName); - ArgumentCaptor captor = ArgumentCaptor.forClass(LoggingEvent.class); - verify(mockAppender, times(1)).doAppend(captor.capture()); - LoggingEvent loggingEvent = captor.getValue(); - assertThat(loggingEvent.getLevel(), is(Level.INFO)); - assertEquals(loggerName, loggingEvent.getRenderedMessage()); + verify(mockAppender, times(1)).append(any(org.apache.logging.log4j.core.LogEvent.class)); + assertEquals(org.apache.logging.log4j.Level.INFO, level.get()); + assertEquals(loggerName, msg.get()); } -} +} \ No newline at end of file diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/logging/TestLog4jUtils.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/logging/TestLog4jUtils.java index 89931de7128f..806107b55c66 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/logging/TestLog4jUtils.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/logging/TestLog4jUtils.java @@ -24,9 +24,6 @@ import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.apache.log4j.Level; -import org.apache.log4j.LogManager; -import org.apache.log4j.Logger; import org.junit.ClassRule; import org.junit.Test; import org.junit.experimental.categories.Category; @@ -44,23 +41,29 @@ public class TestLog4jUtils { @Test public void test() { - Logger zk = LogManager.getLogger("org.apache.zookeeper"); - Level zkLevel = zk.getEffectiveLevel(); - Logger hbaseZk = LogManager.getLogger("org.apache.hadoop.hbase.zookeeper"); - Level hbaseZkLevel = hbaseZk.getEffectiveLevel(); - Logger client = LogManager.getLogger("org.apache.hadoop.hbase.client"); - Level clientLevel = client.getEffectiveLevel(); + org.apache.logging.log4j.Logger zk = + org.apache.logging.log4j.LogManager.getLogger("org.apache.zookeeper"); + org.apache.logging.log4j.Level zkLevel = zk.getLevel(); + org.apache.logging.log4j.Logger hbaseZk = + org.apache.logging.log4j.LogManager.getLogger("org.apache.hadoop.hbase.zookeeper"); + org.apache.logging.log4j.Level hbaseZkLevel = hbaseZk.getLevel(); + org.apache.logging.log4j.Logger client = + org.apache.logging.log4j.LogManager.getLogger("org.apache.hadoop.hbase.client"); + org.apache.logging.log4j.Level clientLevel = client.getLevel(); Log4jUtils.disableZkAndClientLoggers(); - assertEquals(Level.OFF, zk.getLevel()); - assertEquals(Level.OFF.toString(), Log4jUtils.getEffectiveLevel(zk.getName())); - assertEquals(Level.OFF, hbaseZk.getLevel()); - assertEquals(Level.OFF.toString(), Log4jUtils.getEffectiveLevel(hbaseZk.getName())); - assertEquals(Level.OFF, client.getLevel()); - assertEquals(Level.OFF.toString(), Log4jUtils.getEffectiveLevel(client.getName())); + assertEquals(org.apache.logging.log4j.Level.OFF, zk.getLevel()); + assertEquals(org.apache.logging.log4j.Level.OFF.toString(), + Log4jUtils.getEffectiveLevel(zk.getName())); + assertEquals(org.apache.logging.log4j.Level.OFF, hbaseZk.getLevel()); + assertEquals(org.apache.logging.log4j.Level.OFF.toString(), + Log4jUtils.getEffectiveLevel(hbaseZk.getName())); + assertEquals(org.apache.logging.log4j.Level.OFF, client.getLevel()); + assertEquals(org.apache.logging.log4j.Level.OFF.toString(), + Log4jUtils.getEffectiveLevel(client.getName())); // restore the level - zk.setLevel(zkLevel); - hbaseZk.setLevel(hbaseZkLevel); - client.setLevel(clientLevel); + org.apache.logging.log4j.core.config.Configurator.setLevel(zk.getName(), zkLevel); + org.apache.logging.log4j.core.config.Configurator.setLevel(hbaseZk.getName(), hbaseZkLevel); + org.apache.logging.log4j.core.config.Configurator.setLevel(client.getName(), clientLevel); } @Test diff --git a/hbase-compression/hbase-compression-aircompressor/pom.xml b/hbase-compression/hbase-compression-aircompressor/pom.xml index ccb4f272fc72..6fc5282674dc 100644 --- a/hbase-compression/hbase-compression-aircompressor/pom.xml +++ b/hbase-compression/hbase-compression-aircompressor/pom.xml @@ -135,13 +135,18 @@ test - org.slf4j - slf4j-reload4j + org.apache.logging.log4j + log4j-api + test + + + org.apache.logging.log4j + log4j-core test - ch.qos.reload4j - reload4j + org.apache.logging.log4j + log4j-slf4j-impl test diff --git a/hbase-compression/hbase-compression-lz4/pom.xml b/hbase-compression/hbase-compression-lz4/pom.xml index 6489aee169c9..92b546a9af70 100644 --- a/hbase-compression/hbase-compression-lz4/pom.xml +++ b/hbase-compression/hbase-compression-lz4/pom.xml @@ -124,13 +124,18 @@ test - org.slf4j - slf4j-reload4j + org.apache.logging.log4j + log4j-api + test + + + org.apache.logging.log4j + log4j-core test - ch.qos.reload4j - reload4j + org.apache.logging.log4j + log4j-slf4j-impl test diff --git a/hbase-compression/hbase-compression-snappy/pom.xml b/hbase-compression/hbase-compression-snappy/pom.xml index 6ccf48ac7704..1e66ccd41ae8 100644 --- a/hbase-compression/hbase-compression-snappy/pom.xml +++ b/hbase-compression/hbase-compression-snappy/pom.xml @@ -124,13 +124,18 @@ test - org.slf4j - slf4j-reload4j + org.apache.logging.log4j + log4j-api + test + + + org.apache.logging.log4j + log4j-core test - ch.qos.reload4j - reload4j + org.apache.logging.log4j + log4j-slf4j-impl test diff --git a/hbase-compression/hbase-compression-xz/pom.xml b/hbase-compression/hbase-compression-xz/pom.xml index 425366f7d163..1884a5c74547 100644 --- a/hbase-compression/hbase-compression-xz/pom.xml +++ b/hbase-compression/hbase-compression-xz/pom.xml @@ -108,13 +108,18 @@ test - org.slf4j - slf4j-reload4j + org.apache.logging.log4j + log4j-api + test + + + org.apache.logging.log4j + log4j-core test - ch.qos.reload4j - reload4j + org.apache.logging.log4j + log4j-slf4j-impl test diff --git a/hbase-compression/hbase-compression-zstd/pom.xml b/hbase-compression/hbase-compression-zstd/pom.xml index 8afe65677fde..4f13758e54ec 100644 --- a/hbase-compression/hbase-compression-zstd/pom.xml +++ b/hbase-compression/hbase-compression-zstd/pom.xml @@ -124,13 +124,18 @@ test - org.slf4j - slf4j-reload4j + org.apache.logging.log4j + log4j-api + test + + + org.apache.logging.log4j + log4j-core test - ch.qos.reload4j - reload4j + org.apache.logging.log4j + log4j-slf4j-impl test diff --git a/hbase-endpoint/pom.xml b/hbase-endpoint/pom.xml index 42ed82d29bb4..ccdb823f26a9 100644 --- a/hbase-endpoint/pom.xml +++ b/hbase-endpoint/pom.xml @@ -228,13 +228,23 @@ test - org.slf4j - slf4j-reload4j + org.apache.logging.log4j + log4j-api + test + + + org.apache.logging.log4j + log4j-core + test + + + org.apache.logging.log4j + log4j-slf4j-impl test - ch.qos.reload4j - reload4j + org.apache.logging.log4j + log4j-1.2-api test diff --git a/hbase-examples/pom.xml b/hbase-examples/pom.xml index f9600c8048ea..31c526dc9bbf 100644 --- a/hbase-examples/pom.xml +++ b/hbase-examples/pom.xml @@ -225,13 +225,23 @@ test - org.slf4j - slf4j-reload4j + org.apache.logging.log4j + log4j-api + test + + + org.apache.logging.log4j + log4j-core + test + + + org.apache.logging.log4j + log4j-slf4j-impl test - ch.qos.reload4j - reload4j + org.apache.logging.log4j + log4j-1.2-api test diff --git a/hbase-hadoop-compat/pom.xml b/hbase-hadoop-compat/pom.xml index ee74fd91d867..4bc74fe8836c 100644 --- a/hbase-hadoop-compat/pom.xml +++ b/hbase-hadoop-compat/pom.xml @@ -1,5 +1,7 @@ - + - 4.0.0 - - hbase-build-configuration - org.apache.hbase - 2.6.0-SNAPSHOT - ../hbase-build-configuration - + 4.0.0 + + hbase-build-configuration + org.apache.hbase + 2.6.0-SNAPSHOT + ../hbase-build-configuration + - hbase-hadoop-compat - Apache HBase - Hadoop Compatibility - + hbase-hadoop-compat + Apache HBase - Hadoop Compatibility + Interfaces to be implemented in order to smooth over hadoop version differences - + - - + + maven-assembly-plugin @@ -43,117 +45,126 @@ true - - - org.apache.maven.plugins - maven-source-plugin - - - net.revelc.code - warbucks-maven-plugin - - - - - - - org.apache.hbase - hbase-annotations - test-jar - test - - - org.apache.hbase - hbase-logging - test-jar - test - - - org.apache.hbase - hbase-common - test-jar - test - - - org.apache.hbase.thirdparty - hbase-shaded-miscellaneous - - - - org.slf4j - slf4j-api - - - org.apache.hbase - hbase-metrics-api - - - junit - junit - test - - - org.slf4j - jcl-over-slf4j - test - - - org.slf4j - jul-to-slf4j - test - - - org.slf4j - slf4j-reload4j - test - - - ch.qos.reload4j - reload4j - test - - + + + org.apache.maven.plugins + maven-source-plugin + + + net.revelc.code + warbucks-maven-plugin + + + - + + + org.apache.hbase + hbase-annotations + test-jar + test + + + org.apache.hbase + hbase-logging + test-jar + test + + + org.apache.hbase + hbase-common + test-jar + test + + + org.apache.hbase.thirdparty + hbase-shaded-miscellaneous + + + + org.slf4j + slf4j-api + + + org.apache.hbase + hbase-metrics-api + + + junit + junit + test + + + org.slf4j + jcl-over-slf4j + test + + + org.slf4j + jul-to-slf4j + test + + + org.apache.logging.log4j + log4j-api + test + + + org.apache.logging.log4j + log4j-core + test + + + org.apache.logging.log4j + log4j-slf4j-impl + test + + + org.apache.logging.log4j + log4j-1.2-api + test + + + - - skipHadoopCompatTests - - - skipHadoopCompatTests - - - - true - true - - - - eclipse-specific - - - m2e.version - - - - - + + skipHadoopCompatTests + + + skipHadoopCompatTests + + + + true + true + + + + eclipse-specific + + + m2e.version + + + + + - - org.eclipse.m2e - lifecycle-mapping - - - - - - - - - - - - + + org.eclipse.m2e + lifecycle-mapping + + + + + + + + + + + + diff --git a/hbase-hadoop2-compat/pom.xml b/hbase-hadoop2-compat/pom.xml index 54174481f0fe..3fc7503add20 100644 --- a/hbase-hadoop2-compat/pom.xml +++ b/hbase-hadoop2-compat/pom.xml @@ -180,13 +180,18 @@ limitations under the License. test - org.slf4j - slf4j-reload4j + org.apache.logging.log4j + log4j-api + test + + + org.apache.logging.log4j + log4j-core test - ch.qos.reload4j - reload4j + org.apache.logging.log4j + log4j-slf4j-impl test diff --git a/hbase-hbtop/pom.xml b/hbase-hbtop/pom.xml index a1e62180c022..20a40faf8b33 100644 --- a/hbase-hbtop/pom.xml +++ b/hbase-hbtop/pom.xml @@ -92,13 +92,18 @@ test - org.slf4j - slf4j-reload4j + org.apache.logging.log4j + log4j-api + test + + + org.apache.logging.log4j + log4j-core test - ch.qos.reload4j - reload4j + org.apache.logging.log4j + log4j-slf4j-impl test diff --git a/hbase-http/pom.xml b/hbase-http/pom.xml index bc91be32b6ae..6088f3804c69 100644 --- a/hbase-http/pom.xml +++ b/hbase-http/pom.xml @@ -246,13 +246,18 @@ test - org.slf4j - slf4j-reload4j + org.apache.logging.log4j + log4j-api + test + + + org.apache.logging.log4j + log4j-core test - ch.qos.reload4j - reload4j + org.apache.logging.log4j + log4j-slf4j-impl test diff --git a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/log/LogLevel.java b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/log/LogLevel.java index 819581735a89..611316d9ec67 100644 --- a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/log/LogLevel.java +++ b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/log/LogLevel.java @@ -45,7 +45,6 @@ import org.apache.hadoop.util.ServletUtil; import org.apache.hadoop.util.Tool; import org.apache.yetus.audience.InterfaceAudience; -import org.apache.yetus.audience.InterfaceStability; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -307,8 +306,7 @@ private void process(String urlString) throws Exception { /** * A servlet implementation */ - @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"}) - @InterfaceStability.Unstable + @InterfaceAudience.Private public static class Servlet extends HttpServlet { private static final long serialVersionUID = 1L; diff --git a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/log/TestLogLevel.java b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/log/TestLogLevel.java index b52129ccdbf3..7019b207ec61 100644 --- a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/log/TestLogLevel.java +++ b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/log/TestLogLevel.java @@ -22,6 +22,7 @@ import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; + import java.io.File; import java.io.IOException; import java.net.BindException; @@ -53,9 +54,6 @@ import org.apache.hadoop.security.ssl.SSLFactory; import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.util.StringUtils; -import org.apache.log4j.Level; -import org.apache.log4j.LogManager; -import org.apache.log4j.Logger; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.ClassRule; @@ -65,11 +63,11 @@ /** * Test LogLevel. */ -@Category({MiscTests.class, SmallTests.class}) +@Category({ MiscTests.class, SmallTests.class }) public class TestLogLevel { @ClassRule public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestLogLevel.class); + HBaseClassTestRule.forClass(TestLogLevel.class); private static String keystoresDir; private static String sslConfDir; @@ -79,9 +77,10 @@ public class TestLogLevel { private static final String logName = TestLogLevel.class.getName(); private static final String protectedPrefix = "protected"; private static final String protectedLogName = protectedPrefix + "." + logName; - private static final Logger log = LogManager.getLogger(logName); + private static final org.apache.logging.log4j.Logger log = + org.apache.logging.log4j.LogManager.getLogger(logName); private final static String PRINCIPAL = "loglevel.principal"; - private final static String KEYTAB = "loglevel.keytab"; + private final static String KEYTAB = "loglevel.keytab"; private static MiniKdc kdc; @@ -111,8 +110,7 @@ public static void setUp() throws Exception { } /** - * Sets up {@link MiniKdc} for testing security. - * Copied from HBaseTestingUtility#setupMiniKdc(). + * Sets up {@link MiniKdc} for testing security. Copied from HBaseTestingUtility#setupMiniKdc(). */ static private MiniKdc setupMiniKdc() throws Exception { Properties conf = MiniKdc.createConf(); @@ -130,7 +128,7 @@ static private MiniKdc setupMiniKdc() throws Exception { kdc = new MiniKdc(conf, dir); kdc.start(); } catch (BindException e) { - FileUtils.deleteDirectory(dir); // clean directory + FileUtils.deleteDirectory(dir); // clean directory numTries++; if (numTries == 3) { log.error("Failed setting up MiniKDC. Tried " + numTries + " times."); @@ -156,15 +154,15 @@ static private void setupSSL(File base) throws Exception { } /** - * Get the SSL configuration. - * This method is copied from KeyStoreTestUtil#getSslConfig() in Hadoop. + * Get the SSL configuration. This method is copied from KeyStoreTestUtil#getSslConfig() in + * Hadoop. * @return {@link Configuration} instance with ssl configs loaded. * @param conf to pull client/server SSL settings filename from */ - private static Configuration getSslConfig(Configuration conf){ + private static Configuration getSslConfig(Configuration conf) { Configuration sslConf = new Configuration(false); String sslServerConfFile = conf.get(SSLFactory.SSL_SERVER_CONF_KEY); - String sslClientConfFile = conf.get(SSLFactory.SSL_CLIENT_CONF_KEY); + String sslClientConfFile = conf.get(SSLFactory.SSL_CLIENT_CONF_KEY); sslConf.addResource(sslServerConfFile); sslConf.addResource(sslClientConfFile); sslConf.set(SSLFactory.SSL_SERVER_CONF_KEY, sslServerConfFile); @@ -189,36 +187,29 @@ public static void tearDown() { public void testCommandOptions() throws Exception { final String className = this.getClass().getName(); - assertFalse(validateCommand(new String[] {"-foo" })); + assertFalse(validateCommand(new String[] { "-foo" })); // fail due to insufficient number of arguments assertFalse(validateCommand(new String[] {})); - assertFalse(validateCommand(new String[] {"-getlevel" })); - assertFalse(validateCommand(new String[] {"-setlevel" })); - assertFalse(validateCommand(new String[] {"-getlevel", "foo.bar:8080" })); + assertFalse(validateCommand(new String[] { "-getlevel" })); + assertFalse(validateCommand(new String[] { "-setlevel" })); + assertFalse(validateCommand(new String[] { "-getlevel", "foo.bar:8080" })); // valid command arguments - assertTrue(validateCommand( - new String[] {"-getlevel", "foo.bar:8080", className })); - assertTrue(validateCommand( - new String[] {"-setlevel", "foo.bar:8080", className, "DEBUG" })); - assertTrue(validateCommand( - new String[] {"-getlevel", "foo.bar:8080", className })); - assertTrue(validateCommand( - new String[] {"-setlevel", "foo.bar:8080", className, "DEBUG" })); + assertTrue(validateCommand(new String[] { "-getlevel", "foo.bar:8080", className })); + assertTrue(validateCommand(new String[] { "-setlevel", "foo.bar:8080", className, "DEBUG" })); + assertTrue(validateCommand(new String[] { "-getlevel", "foo.bar:8080", className })); + assertTrue(validateCommand(new String[] { "-setlevel", "foo.bar:8080", className, "DEBUG" })); // fail due to the extra argument - assertFalse(validateCommand( - new String[] {"-getlevel", "foo.bar:8080", className, "blah" })); - assertFalse(validateCommand( - new String[] {"-setlevel", "foo.bar:8080", className, "DEBUG", "blah" })); - assertFalse(validateCommand( - new String[] {"-getlevel", "foo.bar:8080", className, "-setlevel", "foo.bar:8080", - className })); + assertFalse(validateCommand(new String[] { "-getlevel", "foo.bar:8080", className, "blah" })); + assertFalse( + validateCommand(new String[] { "-setlevel", "foo.bar:8080", className, "DEBUG", "blah" })); + assertFalse(validateCommand(new String[] { "-getlevel", "foo.bar:8080", className, "-setlevel", + "foo.bar:8080", className })); } /** * Check to see if a command can be accepted. - * * @param args a String array of arguments * @return true if the command can be accepted, false if not. */ @@ -237,40 +228,32 @@ private boolean validateCommand(String[] args) { } /** - * Creates and starts a Jetty server binding at an ephemeral port to run - * LogLevel servlet. + * Creates and starts a Jetty server binding at an ephemeral port to run LogLevel servlet. * @param protocol "http" or "https" * @param isSpnego true if SPNEGO is enabled * @return a created HttpServer object * @throws Exception if unable to create or start a Jetty server */ - private HttpServer createServer(String protocol, boolean isSpnego) - throws Exception { - HttpServer.Builder builder = new HttpServer.Builder() - .setName("..") - .addEndpoint(new URI(protocol + "://localhost:0")) - .setFindPort(true) - .setConf(serverConf); + private HttpServer createServer(String protocol, boolean isSpnego) throws Exception { + HttpServer.Builder builder = new HttpServer.Builder().setName("..") + .addEndpoint(new URI(protocol + "://localhost:0")).setFindPort(true).setConf(serverConf); if (isSpnego) { // Set up server Kerberos credentials. // Since the server may fall back to simple authentication, // use ACL to make sure the connection is Kerberos/SPNEGO authenticated. - builder.setSecurityEnabled(true) - .setUsernameConfKey(PRINCIPAL) - .setKeytabConfKey(KEYTAB) - .setACL(new AccessControlList("client")); + builder.setSecurityEnabled(true).setUsernameConfKey(PRINCIPAL).setKeytabConfKey(KEYTAB) + .setACL(new AccessControlList("client")); } // if using HTTPS, configure keystore/truststore properties. if (protocol.equals(LogLevel.PROTOCOL_HTTPS)) { - builder = builder. - keyPassword(sslConf.get("ssl.server.keystore.keypassword")) - .keyStore(sslConf.get("ssl.server.keystore.location"), - sslConf.get("ssl.server.keystore.password"), - sslConf.get("ssl.server.keystore.type", "jks")) - .trustStore(sslConf.get("ssl.server.truststore.location"), - sslConf.get("ssl.server.truststore.password"), - sslConf.get("ssl.server.truststore.type", "jks")); + builder = builder.keyPassword(sslConf.get("ssl.server.keystore.keypassword")) + .keyStore(sslConf.get("ssl.server.keystore.location"), + sslConf.get("ssl.server.keystore.password"), + sslConf.get("ssl.server.keystore.type", "jks")) + .trustStore(sslConf.get("ssl.server.truststore.location"), + sslConf.get("ssl.server.truststore.password"), + sslConf.get("ssl.server.truststore.type", "jks")); } HttpServer server = builder.build(); @@ -279,38 +262,38 @@ private HttpServer createServer(String protocol, boolean isSpnego) } private void testDynamicLogLevel(final String bindProtocol, final String connectProtocol, - final boolean isSpnego) - throws Exception { - testDynamicLogLevel(bindProtocol, connectProtocol, isSpnego, logName, Level.DEBUG.toString()); + final boolean isSpnego) throws Exception { + testDynamicLogLevel(bindProtocol, connectProtocol, isSpnego, + logName, + org.apache.logging.log4j.Level.DEBUG.toString()); } private void testDynamicLogLevel(final String bindProtocol, final String connectProtocol, - final boolean isSpnego, final String newLevel) - throws Exception { - testDynamicLogLevel(bindProtocol, connectProtocol, isSpnego, logName, newLevel); + final boolean isSpnego, final String newLevel) throws Exception { + testDynamicLogLevel(bindProtocol, connectProtocol, isSpnego, + logName, + newLevel); } /** * Run both client and server using the given protocol. - * * @param bindProtocol specify either http or https for server * @param connectProtocol specify either http or https for client * @param isSpnego true if SPNEGO is enabled * @throws Exception if client can't accesss server. */ private void testDynamicLogLevel(final String bindProtocol, final String connectProtocol, - final boolean isSpnego, final String loggerName, final String newLevel) - throws Exception { + final boolean isSpnego, final String loggerName, final String newLevel) throws Exception { if (!LogLevel.isValidProtocol(bindProtocol)) { throw new Exception("Invalid server protocol " + bindProtocol); } if (!LogLevel.isValidProtocol(connectProtocol)) { throw new Exception("Invalid client protocol " + connectProtocol); } - Logger log = LogManager.getLogger(loggerName); - Level oldLevel = log.getLevel(); + org.apache.logging.log4j.Logger log = org.apache.logging.log4j.LogManager.getLogger(loggerName); + org.apache.logging.log4j.Level oldLevel = log.getLevel(); assertNotEquals("Get default Log Level which shouldn't be ERROR.", - Level.ERROR, oldLevel); + org.apache.logging.log4j.Level.ERROR, oldLevel); // configs needed for SPNEGO at server side if (isSpnego) { @@ -331,8 +314,8 @@ private void testDynamicLogLevel(final String bindProtocol, final String connect String keytabFilePath = keyTabFile.getAbsolutePath(); - UserGroupInformation clientUGI = UserGroupInformation. - loginUserFromKeytabAndReturnUGI(clientPrincipal, keytabFilePath); + UserGroupInformation clientUGI = + UserGroupInformation.loginUserFromKeytabAndReturnUGI(clientPrincipal, keytabFilePath); try { clientUGI.doAs((PrivilegedExceptionAction) () -> { // client command line @@ -346,41 +329,36 @@ private void testDynamicLogLevel(final String bindProtocol, final String connect } // restore log level - GenericTestUtils.setLogLevel(log, oldLevel); + org.apache.logging.log4j.core.config.Configurator.setLevel(log.getName(), oldLevel); } /** - * Run LogLevel command line to start a client to get log level of this test - * class. - * + * Run LogLevel command line to start a client to get log level of this test class. * @param protocol specify either http or https * @param authority daemon's web UI address * @throws Exception if unable to connect */ private void getLevel(String protocol, String authority, String logName) throws Exception { - String[] getLevelArgs = {"-getlevel", authority, logName, "-protocol", protocol}; + String[] getLevelArgs = { "-getlevel", authority, logName, "-protocol", protocol }; CLI cli = new CLI(protocol.equalsIgnoreCase("https") ? sslConf : clientConf); cli.run(getLevelArgs); } /** - * Run LogLevel command line to start a client to set log level of this test - * class to debug. - * + * Run LogLevel command line to start a client to set log level of this test class to debug. * @param protocol specify either http or https * @param authority daemon's web UI address * @throws Exception if unable to run or log level does not change as expected */ - private void setLevel(String protocol, String authority, String logName, String newLevel) - throws Exception { - String[] setLevelArgs = {"-setlevel", authority, logName, newLevel, "-protocol", protocol}; + private void setLevel(String protocol, String authority, String logName, String newLevel) throws Exception { + String[] setLevelArgs = { "-setlevel", authority, logName, newLevel, "-protocol", protocol }; CLI cli = new CLI(protocol.equalsIgnoreCase("https") ? sslConf : clientConf); cli.run(setLevelArgs); - Logger log = LogManager.getLogger(logName); + org.apache.logging.log4j.Logger logger = org.apache.logging.log4j.LogManager.getLogger(logName); assertEquals("new level not equal to expected: ", newLevel.toUpperCase(), - log.getEffectiveLevel().toString()); + logger.getLevel().toString()); } @Test @@ -397,7 +375,6 @@ public void testSettingProtectedLogLevel() throws Exception { /** * Test setting log level to "Info". - * * @throws Exception if client can't set log level to INFO. */ @Test @@ -407,7 +384,6 @@ public void testInfoLogLevel() throws Exception { /** * Test setting log level to "Error". - * * @throws Exception if client can't set log level to ERROR. */ @Test @@ -417,18 +393,15 @@ public void testErrorLogLevel() throws Exception { /** * Server runs HTTP, no SPNEGO. - * - * @throws Exception if http client can't access http server, - * or http client can access https server. + * @throws Exception if http client can't access http server, or http client can access https + * server. */ @Test public void testLogLevelByHttp() throws Exception { testDynamicLogLevel(LogLevel.PROTOCOL_HTTP, LogLevel.PROTOCOL_HTTP, false); try { - testDynamicLogLevel(LogLevel.PROTOCOL_HTTP, LogLevel.PROTOCOL_HTTPS, - false); - fail("An HTTPS Client should not have succeeded in connecting to a " + - "HTTP server"); + testDynamicLogLevel(LogLevel.PROTOCOL_HTTP, LogLevel.PROTOCOL_HTTPS, false); + fail("An HTTPS Client should not have succeeded in connecting to a " + "HTTP server"); } catch (SSLException e) { exceptionShouldContains("Unrecognized SSL message", e); } @@ -436,18 +409,15 @@ public void testLogLevelByHttp() throws Exception { /** * Server runs HTTP + SPNEGO. - * - * @throws Exception if http client can't access http server, - * or http client can access https server. + * @throws Exception if http client can't access http server, or http client can access https + * server. */ @Test public void testLogLevelByHttpWithSpnego() throws Exception { testDynamicLogLevel(LogLevel.PROTOCOL_HTTP, LogLevel.PROTOCOL_HTTP, true); try { - testDynamicLogLevel(LogLevel.PROTOCOL_HTTP, LogLevel.PROTOCOL_HTTPS, - true); - fail("An HTTPS Client should not have succeeded in connecting to a " + - "HTTP server"); + testDynamicLogLevel(LogLevel.PROTOCOL_HTTP, LogLevel.PROTOCOL_HTTPS, true); + fail("An HTTPS Client should not have succeeded in connecting to a " + "HTTP server"); } catch (SSLException e) { exceptionShouldContains("Unrecognized SSL message", e); } @@ -455,19 +425,15 @@ public void testLogLevelByHttpWithSpnego() throws Exception { /** * Server runs HTTPS, no SPNEGO. - * - * @throws Exception if https client can't access https server, - * or https client can access http server. + * @throws Exception if https client can't access https server, or https client can access http + * server. */ @Test public void testLogLevelByHttps() throws Exception { - testDynamicLogLevel(LogLevel.PROTOCOL_HTTPS, LogLevel.PROTOCOL_HTTPS, - false); + testDynamicLogLevel(LogLevel.PROTOCOL_HTTPS, LogLevel.PROTOCOL_HTTPS, false); try { - testDynamicLogLevel(LogLevel.PROTOCOL_HTTPS, LogLevel.PROTOCOL_HTTP, - false); - fail("An HTTP Client should not have succeeded in connecting to a " + - "HTTPS server"); + testDynamicLogLevel(LogLevel.PROTOCOL_HTTPS, LogLevel.PROTOCOL_HTTP, false); + fail("An HTTP Client should not have succeeded in connecting to a " + "HTTPS server"); } catch (SocketException e) { exceptionShouldContains("Unexpected end of file from server", e); } @@ -475,32 +441,27 @@ public void testLogLevelByHttps() throws Exception { /** * Server runs HTTPS + SPNEGO. - * - * @throws Exception if https client can't access https server, - * or https client can access http server. + * @throws Exception if https client can't access https server, or https client can access http + * server. */ @Test public void testLogLevelByHttpsWithSpnego() throws Exception { - testDynamicLogLevel(LogLevel.PROTOCOL_HTTPS, LogLevel.PROTOCOL_HTTPS, - true); + testDynamicLogLevel(LogLevel.PROTOCOL_HTTPS, LogLevel.PROTOCOL_HTTPS, true); try { - testDynamicLogLevel(LogLevel.PROTOCOL_HTTPS, LogLevel.PROTOCOL_HTTP, - true); - fail("An HTTP Client should not have succeeded in connecting to a " + - "HTTPS server"); - } catch (SocketException e) { + testDynamicLogLevel(LogLevel.PROTOCOL_HTTPS, LogLevel.PROTOCOL_HTTP, true); + fail("An HTTP Client should not have succeeded in connecting to a " + "HTTPS server"); + } catch (SocketException e) { exceptionShouldContains("Unexpected end of file from server", e); } } /** - * Assert that a throwable or one of its causes should contain the substr in its message. - * - * Ideally we should use {@link GenericTestUtils#assertExceptionContains(String, Throwable)} util - * method which asserts t.toString() contains the substr. As the original throwable may have been - * wrapped in Hadoop3 because of HADOOP-12897, it's required to check all the wrapped causes. - * After stop supporting Hadoop2, this method can be removed and assertion in tests can use - * t.getCause() directly, similar to HADOOP-15280. + * Assert that a throwable or one of its causes should contain the substr in its message. Ideally + * we should use {@link GenericTestUtils#assertExceptionContains(String, Throwable)} util method + * which asserts t.toString() contains the substr. As the original throwable may have been wrapped + * in Hadoop3 because of HADOOP-12897, it's required to check all the wrapped causes. After stop + * supporting Hadoop2, this method can be removed and assertion in tests can use t.getCause() + * directly, similar to HADOOP-15280. */ private static void exceptionShouldContains(String substr, Throwable throwable) { Throwable t = throwable; @@ -512,6 +473,6 @@ private static void exceptionShouldContains(String substr, Throwable throwable) t = t.getCause(); } throw new AssertionError("Expected to find '" + substr + "' but got unexpected exception:" + - StringUtils.stringifyException(throwable), throwable); + StringUtils.stringifyException(throwable), throwable); } -} +} \ No newline at end of file diff --git a/hbase-it/pom.xml b/hbase-it/pom.xml index eddc3a40be6c..d9b580c85511 100644 --- a/hbase-it/pom.xml +++ b/hbase-it/pom.xml @@ -268,13 +268,23 @@ test - org.slf4j - slf4j-reload4j + org.apache.logging.log4j + log4j-api + test + + + org.apache.logging.log4j + log4j-core + test + + + org.apache.logging.log4j + log4j-slf4j-impl test - ch.qos.reload4j - reload4j + org.apache.logging.log4j + log4j-1.2-api test diff --git a/hbase-logging/pom.xml b/hbase-logging/pom.xml index 91703bff9a58..fc183ef2cf23 100644 --- a/hbase-logging/pom.xml +++ b/hbase-logging/pom.xml @@ -38,7 +38,7 @@ src/test/resources - log4j.properties + log4j2.xml @@ -80,7 +80,7 @@ org.slf4j - slf4j-reload4j + jcl-over-slf4j test @@ -89,9 +89,24 @@ provided - ch.qos.reload4j - reload4j + org.apache.logging.log4j + log4j-api provided + + org.apache.logging.log4j + log4j-core + provided + + + org.apache.logging.log4j + log4j-slf4j-impl + test + + + org.apache.logging.log4j + log4j-1.2-api + test + diff --git a/hbase-logging/src/main/java/org/apache/hadoop/hbase/AsyncConsoleAppender.java b/hbase-logging/src/main/java/org/apache/hadoop/hbase/AsyncConsoleAppender.java deleted file mode 100644 index 939b453c8d4b..000000000000 --- a/hbase-logging/src/main/java/org/apache/hadoop/hbase/AsyncConsoleAppender.java +++ /dev/null @@ -1,46 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hbase; - -import org.apache.yetus.audience.InterfaceAudience; - -/** - * Logger class that buffers before trying to log to the specified console. - */ -@InterfaceAudience.Private -public class AsyncConsoleAppender extends org.apache.log4j.AsyncAppender { - private final org.apache.log4j.ConsoleAppender consoleAppender; - - public AsyncConsoleAppender() { - super(); - consoleAppender = new org.apache.log4j.ConsoleAppender( - new org.apache.log4j.PatternLayout("%d{ISO8601} %-5p [%t] %c{2}: %m%n")); - this.addAppender(consoleAppender); - } - - public void setTarget(String value) { - consoleAppender.setTarget(value); - } - - @Override - public void activateOptions() { - consoleAppender.activateOptions(); - super.activateOptions(); - } - -} diff --git a/hbase-logging/src/main/java/org/apache/hadoop/hbase/logging/InternalLog4jUtils.java b/hbase-logging/src/main/java/org/apache/hadoop/hbase/logging/InternalLog4jUtils.java index 28d29bf30131..b0711d7e8f1a 100644 --- a/hbase-logging/src/main/java/org/apache/hadoop/hbase/logging/InternalLog4jUtils.java +++ b/hbase-logging/src/main/java/org/apache/hadoop/hbase/logging/InternalLog4jUtils.java @@ -19,16 +19,15 @@ import java.io.File; import java.io.IOException; -import java.util.Enumeration; import java.util.HashSet; import java.util.Set; import org.apache.yetus.audience.InterfaceAudience; /** - * The actual class for operating on log4j. + * The actual class for operating on log4j2. *

* This class will depend on log4j directly, so callers should not use this class directly to avoid - * introducing log4j dependencies to downstream users. Please call the methods in + * introducing log4j2 dependencies to downstream users. Please call the methods in * {@link Log4jUtils}, as they will call the methods here through reflection. */ @InterfaceAudience.Private @@ -38,32 +37,53 @@ private InternalLog4jUtils() { } static void setLogLevel(String loggerName, String levelName) { - org.apache.log4j.Logger logger = org.apache.log4j.LogManager.getLogger(loggerName); - org.apache.log4j.Level level = org.apache.log4j.Level.toLevel(levelName.toUpperCase()); + org.apache.logging.log4j.Level level = + org.apache.logging.log4j.Level.toLevel(levelName.toUpperCase()); if (!level.toString().equalsIgnoreCase(levelName)) { throw new IllegalArgumentException("Unsupported log level " + levelName); } - logger.setLevel(level); + org.apache.logging.log4j.core.config.Configurator.setLevel(loggerName, level); } static String getEffectiveLevel(String loggerName) { - org.apache.log4j.Logger logger = org.apache.log4j.LogManager.getLogger(loggerName); - return logger.getEffectiveLevel().toString(); + org.apache.logging.log4j.Logger logger = + org.apache.logging.log4j.LogManager.getLogger(loggerName); + return logger.getLevel().name(); } static Set getActiveLogFiles() throws IOException { Set ret = new HashSet<>(); - org.apache.log4j.Appender a; - @SuppressWarnings("unchecked") - Enumeration e = - org.apache.log4j.Logger.getRootLogger().getAllAppenders(); - while (e.hasMoreElements()) { - a = e.nextElement(); - if (a instanceof org.apache.log4j.FileAppender) { - org.apache.log4j.FileAppender fa = (org.apache.log4j.FileAppender) a; - String filename = fa.getFile(); - ret.add(new File(filename)); - } + org.apache.logging.log4j.Logger logger = org.apache.logging.log4j.LogManager.getRootLogger(); + if (!(logger instanceof org.apache.logging.log4j.core.Logger)) { + return ret; + } + org.apache.logging.log4j.core.Logger coreLogger = (org.apache.logging.log4j.core.Logger) logger; + for (org.apache.logging.log4j.core.Appender appender : coreLogger.getAppenders().values()) { + if (appender instanceof org.apache.logging.log4j.core.appender.FileAppender) { + String fileName = + ((org.apache.logging.log4j.core.appender.FileAppender) appender).getFileName(); + ret.add(new File(fileName)); + } else if (appender instanceof org.apache.logging.log4j.core.appender.AbstractFileAppender) { + String fileName = + ((org.apache.logging.log4j.core.appender.AbstractFileAppender) appender).getFileName(); + ret.add(new File(fileName)); + } else if (appender instanceof org.apache.logging.log4j.core.appender.RollingFileAppender) { + String fileName = + ((org.apache.logging.log4j.core.appender.RollingFileAppender) appender).getFileName(); + ret.add(new File(fileName)); + } else + if (appender instanceof org.apache.logging.log4j.core.appender.RandomAccessFileAppender) { + String fileName = + ((org.apache.logging.log4j.core.appender.RandomAccessFileAppender) appender) + .getFileName(); + ret.add(new File(fileName)); + } else + if (appender instanceof org.apache.logging.log4j.core.appender.MemoryMappedFileAppender) { + String fileName = + ((org.apache.logging.log4j.core.appender.MemoryMappedFileAppender) appender) + .getFileName(); + ret.add(new File(fileName)); + } } return ret; } diff --git a/hbase-logging/src/test/java/org/apache/log4j/FileAppender.java b/hbase-logging/src/test/java/org/apache/log4j/FileAppender.java new file mode 100644 index 000000000000..7b3876ce0833 --- /dev/null +++ b/hbase-logging/src/test/java/org/apache/log4j/FileAppender.java @@ -0,0 +1,288 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.log4j; + +import java.io.BufferedWriter; +import java.io.File; +import java.io.FileNotFoundException; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.InterruptedIOException; +import java.io.Writer; + +/** + * Just a copy of the old log4j12 FileAppender. The ContainerLogAppender for YARN NodeManager needs + * this class but the log4j-1.2-api bridge does not provide it which causes the UTs in + * hbase-mapreduce module to fail if we start a separated MR cluster. + */ +public class FileAppender extends WriterAppender { + + /** + * Controls file truncatation. The default value for this variable is true, meaning + * that by default a FileAppender will append to an existing file and not truncate + * it. + *

+ * This option is meaningful only if the FileAppender opens the file. + */ + protected boolean fileAppend = true; + + /** + * The name of the log file. + */ + protected String fileName = null; + + /** + * Do we do bufferedIO? + */ + protected boolean bufferedIO = false; + + /** + * Determines the size of IO buffer be. Default is 8K. + */ + protected int bufferSize = 8 * 1024; + + /** + * The default constructor does not do anything. + */ + public FileAppender() { + } + + /** + * Instantiate a FileAppender and open the file designated by fileName. + * The opened filename will become the output destination for this appender. + *

+ * If the append parameter is true, the file will be appended to. Otherwise, the file + * designated by fileName will be truncated before being opened. + *

+ * If the bufferedIO parameter is true, then buffered IO will be used to + * write to the output file. + */ + public FileAppender(Layout layout, String fileName, boolean append, boolean bufferedIO, + int bufferSize) throws IOException { + this.layout = layout; + this.setFile(fileName, append, bufferedIO, bufferSize); + } + + /** + * Instantiate a FileAppender and open the file designated by fileName. The opened + * filename will become the output destination for this appender. + *

+ * If the append parameter is true, the file will be appended to. Otherwise, the file + * designated by fileName will be truncated before being opened. + */ + public FileAppender(Layout layout, String fileName, boolean append) throws IOException { + this.layout = layout; + this.setFile(fileName, append, false, bufferSize); + } + + /** + * Instantiate a FileAppender and open the file designated by filename. The opened + * filename will become the output destination for this appender. + *

+ * The file will be appended to. + */ + public FileAppender(Layout layout, String fileName) throws IOException { + this(layout, fileName, true); + } + + /** + * The File property takes a string value which should be the name of the file to append + * to. + *

+ * Note that the special values "System.out" or "System.err" are no + * longer honored. + *

+ * Note: Actual opening of the file is made when {@link #activateOptions} is called, not when the + * options are set. + */ + public void setFile(String file) { + // Trim spaces from both ends. The users probably does not want + // trailing spaces in file names. + String val = file.trim(); + fileName = val; + } + + /** + * Returns the value of the Append option. + */ + public boolean getAppend() { + return fileAppend; + } + + /** Returns the value of the File option. */ + public String getFile() { + return fileName; + } + + /** + * If the value of File is not null, then {@link #setFile} is called with the + * values of File and Append properties. + * @since 0.8.1 + */ + @Override + public void activateOptions() { + if (fileName != null) { + try { + setFile(fileName, fileAppend, bufferedIO, bufferSize); + } catch (java.io.IOException e) { + errorHandler.error("setFile(" + fileName + "," + fileAppend + ") call failed.", e, + org.apache.log4j.spi.ErrorCode.FILE_OPEN_FAILURE); + } + } + } + + /** + * Closes the previously opened file. + */ + protected void closeFile() { + if (this.qw != null) { + try { + this.qw.close(); + } catch (java.io.IOException e) { + if (e instanceof InterruptedIOException) { + Thread.currentThread().interrupt(); + } + // Exceptionally, it does not make sense to delegate to an + // ErrorHandler. Since a closed appender is basically dead. + } + } + } + + /** + * Get the value of the BufferedIO option. + *

+ * BufferedIO will significatnly increase performance on heavily loaded systems. + */ + public boolean getBufferedIO() { + return this.bufferedIO; + } + + /** + * Get the size of the IO buffer. + */ + public int getBufferSize() { + return this.bufferSize; + } + + /** + * The Append option takes a boolean value. It is set to true by default. If + * true, then File will be opened in append mode by {@link #setFile setFile} (see + * above). Otherwise, {@link #setFile setFile} will open File in truncate mode. + *

+ * Note: Actual opening of the file is made when {@link #activateOptions} is called, not when the + * options are set. + */ + public void setAppend(boolean flag) { + fileAppend = flag; + } + + /** + * The BufferedIO option takes a boolean value. It is set to false by default. + * If true, then File will be opened and the resulting {@link java.io.Writer} wrapped + * around a {@link BufferedWriter}. BufferedIO will significatnly increase performance on heavily + * loaded systems. + */ + public void setBufferedIO(boolean bufferedIO) { + this.bufferedIO = bufferedIO; + if (bufferedIO) { + immediateFlush = false; + } + } + + /** + * Set the size of the IO buffer. + */ + public void setBufferSize(int bufferSize) { + this.bufferSize = bufferSize; + } + + /** + *

+ * Sets and opens the file where the log output will go. The specified file must be + * writable. + *

+ * If there was already an opened file, then the previous file is closed first. + *

+ * Do not use this method directly. To configure a FileAppender or one of its subclasses, set + * its properties one by one and then call activateOptions. + * @param fileName The path to the log file. + * @param append If true will append to fileName. Otherwise will truncate fileName. + */ + public synchronized void setFile(String fileName, boolean append, boolean bufferedIO, + int bufferSize) throws IOException { + + // It does not make sense to have immediate flush and bufferedIO. + if (bufferedIO) { + setImmediateFlush(false); + } + + reset(); + FileOutputStream ostream = null; + try { + // + // attempt to create file + // + ostream = new FileOutputStream(fileName, append); + } catch (FileNotFoundException ex) { + // + // if parent directory does not exist then + // attempt to create it and try to create file + // see bug 9150 + // + String parentName = new File(fileName).getParent(); + if (parentName != null) { + File parentDir = new File(parentName); + if (!parentDir.exists() && parentDir.mkdirs()) { + ostream = new FileOutputStream(fileName, append); + } else { + throw ex; + } + } else { + throw ex; + } + } + Writer fw = createWriter(ostream); + if (bufferedIO) { + fw = new BufferedWriter(fw, bufferSize); + } + this.setQWForFiles(fw); + this.fileName = fileName; + this.fileAppend = append; + this.bufferedIO = bufferedIO; + this.bufferSize = bufferSize; + writeHeader(); + } + + /** + * Sets the quiet writer being used. This method is overriden by {@code RollingFileAppender}. + */ + protected void setQWForFiles(Writer writer) { + this.qw = new org.apache.log4j.helpers.QuietWriter(writer, errorHandler); + } + + /** + * Close any previously opened file and call the parent's reset. + */ + @Override + protected void reset() { + closeFile(); + this.fileName = null; + super.reset(); + } +} diff --git a/hbase-logging/src/test/resources/log4j.properties b/hbase-logging/src/test/resources/log4j.properties deleted file mode 100644 index c322699ced24..000000000000 --- a/hbase-logging/src/test/resources/log4j.properties +++ /dev/null @@ -1,68 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Define some default values that can be overridden by system properties -hbase.root.logger=INFO,console -hbase.log.dir=. -hbase.log.file=hbase.log - -# Define the root logger to the system property "hbase.root.logger". -log4j.rootLogger=${hbase.root.logger} - -# Logging Threshold -log4j.threshold=ALL - -# -# Daily Rolling File Appender -# -log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender -log4j.appender.DRFA.File=${hbase.log.dir}/${hbase.log.file} - -# Rollver at midnight -log4j.appender.DRFA.DatePattern=.yyyy-MM-dd - -# 30-day backup -#log4j.appender.DRFA.MaxBackupIndex=30 -log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout -# Debugging Pattern format -log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %C{2}(%L): %m%n - - -# -# console -# Add "console" to rootlogger above if you want to use this -# -log4j.appender.console=org.apache.log4j.ConsoleAppender -log4j.appender.console.target=System.err -log4j.appender.console.layout=org.apache.log4j.PatternLayout -log4j.appender.console.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %C{2}(%L): %m%n - -# Custom Logging levels - -#log4j.logger.org.apache.hadoop.fs.FSNamesystem=DEBUG - -log4j.logger.org.apache.hadoop=WARN -log4j.logger.org.apache.zookeeper=ERROR -log4j.logger.org.apache.hadoop.hbase=DEBUG - -#These settings are workarounds against spurious logs from the minicluster. -#See HBASE-4709 -log4j.logger.org.apache.hadoop.metrics2.impl.MetricsConfig=WARN -log4j.logger.org.apache.hadoop.metrics2.impl.MetricsSinkAdapter=WARN -log4j.logger.org.apache.hadoop.metrics2.impl.MetricsSystemImpl=WARN -log4j.logger.org.apache.hadoop.metrics2.util.MBeans=WARN -# Enable this to get detailed connection error/retry logging. -# log4j.logger.org.apache.hadoop.hbase.client.ConnectionImplementation=TRACE diff --git a/hbase-logging/src/test/resources/log4j2.properties b/hbase-logging/src/test/resources/log4j2.properties new file mode 100644 index 000000000000..f63c8701e35f --- /dev/null +++ b/hbase-logging/src/test/resources/log4j2.properties @@ -0,0 +1,68 @@ +#/** +# * Licensed to the Apache Software Foundation (ASF) under one +# * or more contributor license agreements. See the NOTICE file +# * distributed with this work for additional information +# * regarding copyright ownership. The ASF licenses this file +# * to you under the Apache License, Version 2.0 (the +# * "License"); you may not use this file except in compliance +# * with the License. You may obtain a copy of the License at +# * +# * http://www.apache.org/licenses/LICENSE-2.0 +# * +# * Unless required by applicable law or agreed to in writing, software +# * distributed under the License is distributed on an "AS IS" BASIS, +# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# * See the License for the specific language governing permissions and +# * limitations under the License. +# */ + +status = debug +dest = err +name = PropertiesConfig + +appender.console.type = Console +appender.console.target = SYSTEM_ERR +appender.console.name = Console +appender.console.layout.type = PatternLayout +appender.console.layout.pattern = %d{ISO8601} %-5p [%t] %C{2}(%L): %m%n + +rootLogger = INFO,Console + +logger.hadoop.name = org.apache.hadoop +logger.hadoop.level = WARN + +logger.zookeeper.name = org.apache.zookeeper +logger.zookeeper.level = ERROR + +logger.hbase.name = org.apache.hadoop.hbase +logger.hbase.level = DEBUG + +# These settings are workarounds against spurious logs from the minicluster. See HBASE-4709 +logger.MetricsConfig.name = org.apache.hadoop.metrics2.impl.MetricsConfig +logger.MetricsConfig.level = WARN + +logger.MetricsSinkAdapter.name = org.apache.hadoop.metrics2.impl.MetricsSinkAdapter +logger.MetricsSinkAdapter.level = WARN + +logger.MetricsSystemImpl.name = org.apache.hadoop.metrics2.impl.MetricsSystemImpl +logger.MetricsSystemImpl.level = WARN + +logger.MBeans.name = org.apache.hadoop.metrics2.util.MBeans +logger.MBeans.level = WARN + +logger.directory.name = org.apache.directory +logger.directory.level = WARN +logger.directory.additivity = false + +logger.netty.name = org.apache.hbase.thirdparty.io.netty.channel +logger.netty.level = DEBUG + +# For testing where we want to capture the log message of these special loggers +logger.FailedServers.name = org.apache.hadoop.hbase.ipc.FailedServers +logger.FailedServers.level = DEBUG + +logger.RSRpcServices.name = org.apache.hadoop.hbase.regionserver.RSRpcServices +logger.RSRpcServices.level = DEBUG + +logger.TestJul2Slf4j.name = org.apache.hadoop.hbase.logging.TestJul2Slf4j +logger.TestJul2Slf4j.level = DEBUG diff --git a/hbase-mapreduce/pom.xml b/hbase-mapreduce/pom.xml index 5ae021cd2f4a..b2183b36c52f 100644 --- a/hbase-mapreduce/pom.xml +++ b/hbase-mapreduce/pom.xml @@ -306,13 +306,23 @@ test - org.slf4j - slf4j-reload4j + org.apache.logging.log4j + log4j-api + test + + + org.apache.logging.log4j + log4j-core + test + + + org.apache.logging.log4j + log4j-slf4j-impl test - ch.qos.reload4j - reload4j + org.apache.logging.log4j + log4j-1.2-api test diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java index 63c1760626f0..7614b8376d07 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java @@ -25,24 +25,16 @@ import java.util.List; import java.util.Properties; import java.util.concurrent.atomic.AtomicReference; - import javax.crypto.spec.SecretKeySpec; - import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; -import org.apache.log4j.Level; -import org.apache.log4j.LogManager; -import org.apache.yetus.audience.InterfaceAudience; -import org.apache.zookeeper.ZooKeeper; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.Durability; @@ -52,6 +44,7 @@ import org.apache.hadoop.hbase.io.crypto.Encryption; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.log.HBaseMarkers; +import org.apache.hadoop.hbase.logging.Log4jUtils; import org.apache.hadoop.hbase.regionserver.BloomType; import org.apache.hadoop.hbase.security.EncryptionUtil; import org.apache.hadoop.hbase.security.HBaseKerberosUtils; @@ -61,6 +54,10 @@ import org.apache.hadoop.hbase.util.test.LoadTestDataGenerator; import org.apache.hadoop.hbase.util.test.LoadTestDataGeneratorWithACL; import org.apache.hadoop.util.ToolRunner; +import org.apache.yetus.audience.InterfaceAudience; +import org.apache.zookeeper.ZooKeeper; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hbase.thirdparty.org.apache.commons.cli.AlreadySelectedException; import org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine; @@ -583,7 +580,7 @@ public void initTestTable() throws IOException { @Override protected int doWork() throws IOException { if (!isVerbose) { - LogManager.getLogger(ZooKeeper.class.getName()).setLevel(Level.WARN); + Log4jUtils.setLogLevel(ZooKeeper.class.getName(), "WARN"); } if (numTables > 1) { return parallelLoadTables(); diff --git a/hbase-metrics-api/pom.xml b/hbase-metrics-api/pom.xml index 9bd2f13b0d73..2754f5fdf9d0 100644 --- a/hbase-metrics-api/pom.xml +++ b/hbase-metrics-api/pom.xml @@ -133,13 +133,18 @@ test - org.slf4j - slf4j-reload4j + org.apache.logging.log4j + log4j-api + test + + + org.apache.logging.log4j + log4j-core test - ch.qos.reload4j - reload4j + org.apache.logging.log4j + log4j-slf4j-impl test diff --git a/hbase-metrics/pom.xml b/hbase-metrics/pom.xml index 280c0c80fd5d..8c04df45bcf6 100644 --- a/hbase-metrics/pom.xml +++ b/hbase-metrics/pom.xml @@ -141,13 +141,18 @@ test - org.slf4j - slf4j-reload4j + org.apache.logging.log4j + log4j-api + test + + + org.apache.logging.log4j + log4j-core test - ch.qos.reload4j - reload4j + org.apache.logging.log4j + log4j-slf4j-impl test diff --git a/hbase-procedure/pom.xml b/hbase-procedure/pom.xml index d9e784c8f960..36f6cdb90f79 100644 --- a/hbase-procedure/pom.xml +++ b/hbase-procedure/pom.xml @@ -130,13 +130,18 @@ test - org.slf4j - slf4j-reload4j + org.apache.logging.log4j + log4j-api + test + + + org.apache.logging.log4j + log4j-core test - ch.qos.reload4j - reload4j + org.apache.logging.log4j + log4j-slf4j-impl test diff --git a/hbase-replication/pom.xml b/hbase-replication/pom.xml index 946bffdc4464..973ee627b315 100644 --- a/hbase-replication/pom.xml +++ b/hbase-replication/pom.xml @@ -136,13 +136,18 @@ test - org.slf4j - slf4j-reload4j + org.apache.logging.log4j + log4j-api + test + + + org.apache.logging.log4j + log4j-core test - ch.qos.reload4j - reload4j + org.apache.logging.log4j + log4j-slf4j-impl test diff --git a/hbase-rest/pom.xml b/hbase-rest/pom.xml index c88cee8a040b..271d0398965b 100644 --- a/hbase-rest/pom.xml +++ b/hbase-rest/pom.xml @@ -372,13 +372,23 @@ test - org.slf4j - slf4j-reload4j + org.apache.logging.log4j + log4j-api + test + + + org.apache.logging.log4j + log4j-core + test + + + org.apache.logging.log4j + log4j-slf4j-impl test - ch.qos.reload4j - reload4j + org.apache.logging.log4j + log4j-1.2-api test diff --git a/hbase-rsgroup/pom.xml b/hbase-rsgroup/pom.xml index d5a5a0587c64..a3da37907158 100644 --- a/hbase-rsgroup/pom.xml +++ b/hbase-rsgroup/pom.xml @@ -181,13 +181,23 @@ test - org.slf4j - slf4j-reload4j + org.apache.logging.log4j + log4j-api + test + + + org.apache.logging.log4j + log4j-core + test + + + org.apache.logging.log4j + log4j-slf4j-impl test - ch.qos.reload4j - reload4j + org.apache.logging.log4j + log4j-1.2-api test diff --git a/hbase-server/pom.xml b/hbase-server/pom.xml index 53e5c32a6dc4..6148f8448244 100644 --- a/hbase-server/pom.xml +++ b/hbase-server/pom.xml @@ -533,13 +533,23 @@ test - org.slf4j - slf4j-reload4j + org.apache.logging.log4j + log4j-api + test + + + org.apache.logging.log4j + log4j-core + test + + + org.apache.logging.log4j + log4j-slf4j-impl test - ch.qos.reload4j - reload4j + org.apache.logging.log4j + log4j-1.2-api test diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java index 72e12043c0e2..20ed888e056e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java @@ -695,7 +695,6 @@ public MiniDFSCluster startMiniDFSClusterForTestWAL(int namenodePort) throws IOE * This is used before starting HDFS and map-reduce mini-clusters Run something like the below to * check for the likes of '/tmp' references -- i.e. references outside of the test data dir -- in * the conf. - * *

    * Configuration conf = TEST_UTIL.getConfiguration();
    * for (Iterator<Map.Entry<String, String>> i = conf.iterator(); i.hasNext();) {
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableBatchRetryImmediately.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableBatchRetryImmediately.java
index 57cfbeca6e29..028b8fd8c30f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableBatchRetryImmediately.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableBatchRetryImmediately.java
@@ -29,11 +29,10 @@
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.logging.Log4jUtils;
 import org.apache.hadoop.hbase.testclassification.ClientTests;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.log4j.Level;
-import org.apache.log4j.LogManager;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.ClassRule;
@@ -61,10 +60,13 @@ public class TestAsyncTableBatchRetryImmediately {
 
   private static AsyncConnection CONN;
 
+  private static String LOG_LEVEL;
+
   @BeforeClass
   public static void setUp() throws Exception {
     // disable the debug log to avoid flooding the output
-    LogManager.getLogger(AsyncRegionLocatorHelper.class).setLevel(Level.INFO);
+    LOG_LEVEL = Log4jUtils.getEffectiveLevel(AsyncRegionLocatorHelper.class.getName());
+    Log4jUtils.setLogLevel(AsyncRegionLocatorHelper.class.getName(), "INFO");
     UTIL.getConfiguration().setLong(HConstants.HBASE_SERVER_SCANNER_MAX_RESULT_SIZE_KEY, 1024);
     UTIL.startMiniCluster(1);
     Table table = UTIL.createTable(TABLE_NAME, FAMILY);
@@ -79,6 +81,9 @@ public static void setUp() throws Exception {
 
   @AfterClass
   public static void tearDown() throws Exception {
+    if (LOG_LEVEL != null) {
+      Log4jUtils.setLogLevel(AsyncRegionLocatorHelper.class.getName(), LOG_LEVEL);
+    }
     CONN.close();
     UTIL.shutdownMiniCluster();
   }
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiRespectsLimits.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiRespectsLimits.java
index d8baa8d1cb16..34707f0a0eee 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiRespectsLimits.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiRespectsLimits.java
@@ -35,6 +35,7 @@
 import org.apache.hadoop.hbase.Waiter;
 import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
 import org.apache.hadoop.hbase.ipc.RpcServerInterface;
+import org.apache.hadoop.hbase.logging.Log4jUtils;
 import org.apache.hadoop.hbase.metrics.BaseSource;
 import org.apache.hadoop.hbase.regionserver.HRegionServer;
 import org.apache.hadoop.hbase.test.MetricsAssertHelper;
@@ -65,15 +66,18 @@ public class TestMultiRespectsLimits {
       CompatibilityFactory.getInstance(MetricsAssertHelper.class);
   private final static byte[] FAMILY = Bytes.toBytes("D");
   public static final int MAX_SIZE = 100;
+  private static String LOG_LEVEL;
 
   @Rule
   public TestName name = new TestName();
 
   @BeforeClass
   public static void setUpBeforeClass() throws Exception {
-    TEST_UTIL.getConfiguration().setLong(
-        HConstants.HBASE_SERVER_SCANNER_MAX_RESULT_SIZE_KEY,
-        MAX_SIZE);
+    // disable the debug log to avoid flooding the output
+    LOG_LEVEL = Log4jUtils.getEffectiveLevel(AsyncRegionLocatorHelper.class.getName());
+    Log4jUtils.setLogLevel(AsyncRegionLocatorHelper.class.getName(), "INFO");
+    TEST_UTIL.getConfiguration().setLong(HConstants.HBASE_SERVER_SCANNER_MAX_RESULT_SIZE_KEY,
+      MAX_SIZE);
 
     // Only start on regionserver so that all regions are on the same server.
     TEST_UTIL.startMiniCluster(1);
@@ -81,6 +85,9 @@ public static void setUpBeforeClass() throws Exception {
 
   @AfterClass
   public static void tearDownAfterClass() throws Exception {
+    if (LOG_LEVEL != null) {
+      Log4jUtils.setLogLevel(AsyncRegionLocatorHelper.class.getName(), LOG_LEVEL);
+    }
     TEST_UTIL.shutdownMiniCluster();
   }
 
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestProtoBufRpc.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestProtoBufRpc.java
index 941d921481d3..a45804a45159 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestProtoBufRpc.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestProtoBufRpc.java
@@ -30,6 +30,7 @@
 import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.logging.Log4jUtils;
 import org.apache.hadoop.hbase.testclassification.RPCTests;
 import org.apache.hadoop.hbase.testclassification.SmallTests;
 import org.junit.After;
@@ -82,10 +83,8 @@ public void setUp() throws IOException { // Setup server for both protocols
     this.conf = HBaseConfiguration.create();
     this.conf.set(RpcServerFactory.CUSTOM_RPC_SERVER_IMPL_CONF_KEY,
         rpcServerImpl);
-    org.apache.log4j.Logger.getLogger("org.apache.hadoop.ipc.HBaseServer")
-      .setLevel(org.apache.log4j.Level.ERROR);
-    org.apache.log4j.Logger.getLogger("org.apache.hadoop.ipc.HBaseServer.trace")
-      .setLevel(org.apache.log4j.Level.TRACE);
+    Log4jUtils.setLogLevel("org.apache.hadoop.ipc.HBaseServer", "ERROR");
+    Log4jUtils.setLogLevel("org.apache.hadoop.ipc.HBaseServer.trace", "TRACE");
     // Create server side implementation
     // Get RPC server for server side implementation
     this.server = RpcServerFactory.createRpcServer(null, "testrpc",
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcServerTraceLogging.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcServerTraceLogging.java
index 2d66106a3d2b..122517574f7c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcServerTraceLogging.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcServerTraceLogging.java
@@ -35,17 +35,19 @@
 public class TestRpcServerTraceLogging {
 
   @ClassRule
-  public static final HBaseClassTestRule CLASS_RULE = HBaseClassTestRule
-      .forClass(TestRpcServerTraceLogging.class);
+  public static final HBaseClassTestRule CLASS_RULE =
+    HBaseClassTestRule.forClass(TestRpcServerTraceLogging.class);
 
-  static org.apache.log4j.Logger rpcServerLog = org.apache.log4j.Logger.getLogger(RpcServer.class);
+  private static final org.apache.logging.log4j.core.Logger rpcServerLog =
+    (org.apache.logging.log4j.core.Logger) org.apache.logging.log4j.LogManager
+      .getLogger(RpcServer.class);
 
   static final String TRACE_LOG_MSG =
-      "This is dummy message for testing:: region { type: REGION_NAME value: \"hbase:meta,,1\" }"
-          + " scan { column { family: \"info\" } time_range { from: 0 to: 9223372036854775807 } "
-      + "max_versions: 1 cache_blocks: true max_result_size: 2097152 caching: 2147483647 } "
-      + "number_of_rows: 2147483647 close_scanner: false client_handles_partials: "
-      + "true client_handles_heartbeats: true track_scan_metrics: false";
+    "This is dummy message for testing:: region { type: REGION_NAME value: \"hbase:meta,,1\" }" +
+      " scan { column { family: \"info\" } time_range { from: 0 to: 9223372036854775807 } " +
+      "max_versions: 1 cache_blocks: true max_result_size: 2097152 caching: 2147483647 } " +
+      "number_of_rows: 2147483647 close_scanner: false client_handles_partials: " +
+      "true client_handles_heartbeats: true track_scan_metrics: false";
 
   static final int TRACE_LOG_LENGTH = TRACE_LOG_MSG.length();
 
@@ -62,7 +64,7 @@ public static void setUp() {
   @Test
   public void testLoggingWithTraceOff() {
     conf.setInt("hbase.ipc.trace.log.max.length", 250);
-    rpcServerLog.setLevel(org.apache.log4j.Level.DEBUG);
+    rpcServerLog.setLevel(org.apache.logging.log4j.Level.DEBUG);
     String truncatedString = mockRpcServer.truncateTraceLog(TRACE_LOG_MSG);
 
     assertEquals(150 + RpcServer.KEY_WORD_TRUNCATED.length(), truncatedString.length());
@@ -72,7 +74,7 @@ public void testLoggingWithTraceOff() {
   @Test
   public void testLoggingWithTraceOn() {
     conf.setInt("hbase.ipc.trace.log.max.length", 250);
-    rpcServerLog.setLevel(org.apache.log4j.Level.TRACE);
+    rpcServerLog.setLevel(org.apache.logging.log4j.Level.TRACE);
     String truncatedString = mockRpcServer.truncateTraceLog(TRACE_LOG_MSG);
 
     assertEquals(250 + RpcServer.KEY_WORD_TRUNCATED.length(), truncatedString.length());
@@ -82,7 +84,7 @@ public void testLoggingWithTraceOn() {
   @Test
   public void testLoggingWithTraceOnLargeMax() {
     conf.setInt("hbase.ipc.trace.log.max.length", 2000);
-    rpcServerLog.setLevel(org.apache.log4j.Level.TRACE);
+    rpcServerLog.setLevel(org.apache.logging.log4j.Level.TRACE);
     String truncatedString = mockRpcServer.truncateTraceLog(TRACE_LOG_MSG);
 
     assertEquals(TRACE_LOG_LENGTH, truncatedString.length());
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiLogThreshold.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiLogThreshold.java
index e2dcac08122c..5e2679e41118 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiLogThreshold.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiLogThreshold.java
@@ -20,14 +20,16 @@
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
-import static org.mockito.Mockito.atLeastOnce;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.Mockito.doAnswer;
 import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.reset;
-import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
 
 import java.io.IOException;
 import java.util.Arrays;
 import java.util.List;
+import java.util.concurrent.BlockingDeque;
+import java.util.concurrent.LinkedBlockingDeque;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
@@ -36,10 +38,6 @@
 import org.apache.hadoop.hbase.ipc.HBaseRpcController;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.log4j.Appender;
-import org.apache.log4j.Level;
-import org.apache.log4j.LogManager;
-import org.apache.log4j.spi.LoggingEvent;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.ClassRule;
@@ -47,8 +45,9 @@
 import org.junit.experimental.categories.Category;
 import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
-import org.mockito.ArgumentCaptor;
 import org.mockito.Mockito;
+import org.mockito.invocation.InvocationOnMock;
+import org.mockito.stubbing.Answer;
 
 import org.apache.hbase.thirdparty.com.google.protobuf.RpcController;
 import org.apache.hbase.thirdparty.com.google.protobuf.ServiceException;
@@ -80,7 +79,7 @@ public class TestMultiLogThreshold {
   private HRegionServer rs;
   private RSRpcServices services;
 
-  private Appender appender;
+  private org.apache.logging.log4j.core.Appender appender;
 
   @Parameterized.Parameter
   public static boolean rejectLargeBatchOp;
@@ -90,6 +89,21 @@ public static List params() {
     return Arrays.asList(new Object[] { false }, new Object[] { true });
   }
 
+  private final class LevelAndMessage {
+    final org.apache.logging.log4j.Level level;
+
+    final String msg;
+
+    public LevelAndMessage(org.apache.logging.log4j.Level level, String msg) {
+      this.level = level;
+      this.msg = msg;
+    }
+
+  }
+
+  // log4j2 will reuse the LogEvent so we need to copy the level and message out.
+  private BlockingDeque logs = new LinkedBlockingDeque<>();
+
   @Before
   public void setupTest() throws Exception {
     util = new HBaseTestingUtility();
@@ -100,13 +114,28 @@ public void setupTest() throws Exception {
     util.startMiniCluster();
     util.createTable(NAME, TEST_FAM);
     rs = util.getRSForFirstRegionInTable(NAME);
-    appender = mock(Appender.class);
-    LogManager.getLogger(RSRpcServices.class).addAppender(appender);
+    appender = mock(org.apache.logging.log4j.core.Appender.class);
+    when(appender.getName()).thenReturn("mockAppender");
+    when(appender.isStarted()).thenReturn(true);
+    doAnswer(new Answer() {
+
+      @Override
+      public Void answer(InvocationOnMock invocation) throws Throwable {
+        org.apache.logging.log4j.core.LogEvent logEvent =
+          invocation.getArgument(0, org.apache.logging.log4j.core.LogEvent.class);
+        logs.add(
+          new LevelAndMessage(logEvent.getLevel(), logEvent.getMessage().getFormattedMessage()));
+        return null;
+      }
+    }).when(appender).append(any(org.apache.logging.log4j.core.LogEvent.class));
+    ((org.apache.logging.log4j.core.Logger) org.apache.logging.log4j.LogManager
+      .getLogger(RSRpcServices.class)).addAppender(appender);
   }
 
   @After
   public void tearDown() throws Exception {
-    LogManager.getLogger(RSRpcServices.class).removeAppender(appender);
+    ((org.apache.logging.log4j.core.Logger) org.apache.logging.log4j.LogManager
+      .getLogger(RSRpcServices.class)).removeAppender(appender);
     util.shutdownMiniCluster();
   }
 
@@ -149,17 +178,16 @@ private void sendMultiRequest(int rows, ActionType actionType)
   }
 
   private void assertLogBatchWarnings(boolean expected) {
-    ArgumentCaptor captor = ArgumentCaptor.forClass(LoggingEvent.class);
-    verify(appender, atLeastOnce()).doAppend(captor.capture());
+    assertFalse(logs.isEmpty());
     boolean actual = false;
-    for (LoggingEvent event : captor.getAllValues()) {
-      if (event.getLevel() == Level.WARN &&
-        event.getRenderedMessage().contains("Large batch operation detected")) {
+    for (LevelAndMessage event : logs) {
+      if (event.level == org.apache.logging.log4j.Level.WARN &&
+        event.msg.contains("Large batch operation detected")) {
         actual = true;
         break;
       }
     }
-    reset(appender);
+    logs.clear();
     assertEquals(expected, actual);
   }
 
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerReportForDuty.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerReportForDuty.java
index 4d9bb68ac0c0..3524a72a64cf 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerReportForDuty.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerReportForDuty.java
@@ -42,11 +42,6 @@
 import org.apache.hadoop.hbase.util.JVMClusterUtil.MasterThread;
 import org.apache.hadoop.hbase.util.JVMClusterUtil.RegionServerThread;
 import org.apache.hadoop.hbase.util.Threads;
-import org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;
-import org.apache.log4j.Appender;
-import org.apache.log4j.Layout;
-import org.apache.log4j.PatternLayout;
-import org.apache.log4j.WriterAppender;
 import org.apache.zookeeper.KeeperException;
 import org.junit.After;
 import org.junit.Before;
@@ -56,6 +51,8 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;
+
 @Category(LargeTests.class)
 public class TestRegionServerReportForDuty {
 
@@ -91,26 +88,15 @@ public void tearDown() throws Exception {
     testUtil.shutdownMiniDFSCluster();
   }
 
-  /**
-   * LogCapturer is similar to {@link org.apache.hadoop.test.GenericTestUtils.LogCapturer}
-   * except that this implementation has a default appender to the root logger.
-   * Hadoop 2.8+ supports the default appender in the LogCapture it ships and this can be replaced.
-   * TODO: This class can be removed after we upgrade Hadoop dependency.
-   */
-  static class LogCapturer {
+  private static class LogCapturer {
     private StringWriter sw = new StringWriter();
-    private WriterAppender appender;
-    private org.apache.log4j.Logger logger;
+    private org.apache.logging.log4j.core.appender.WriterAppender appender;
+    private org.apache.logging.log4j.core.Logger logger;
 
-    LogCapturer(org.apache.log4j.Logger logger) {
+    LogCapturer(org.apache.logging.log4j.core.Logger logger) {
       this.logger = logger;
-      Appender defaultAppender = org.apache.log4j.Logger.getRootLogger().getAppender("stdout");
-      if (defaultAppender == null) {
-        defaultAppender = org.apache.log4j.Logger.getRootLogger().getAppender("console");
-      }
-      final Layout layout = (defaultAppender == null) ? new PatternLayout() :
-          defaultAppender.getLayout();
-      this.appender = new WriterAppender(layout, sw);
+      this.appender = org.apache.logging.log4j.core.appender.WriterAppender.newBuilder()
+        .setName("test").setTarget(sw).build();
       this.logger.addAppender(this.appender);
     }
 
@@ -146,7 +132,9 @@ public void testReportForDutyBackoff() throws IOException, InterruptedException
     master = cluster.addMaster();
     master.start();
 
-    LogCapturer capturer = new LogCapturer(org.apache.log4j.Logger.getLogger(HRegionServer.class));
+    LogCapturer capturer =
+      new LogCapturer((org.apache.logging.log4j.core.Logger) org.apache.logging.log4j.LogManager
+        .getLogger(HRegionServer.class));
     // Set sleep interval relatively low so that exponential backoff is more demanding.
     int msginterval = 100;
     cluster.getConfiguration().setInt("hbase.regionserver.msginterval", msginterval);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/PerfTestCompactionPolicies.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/PerfTestCompactionPolicies.java
index 13c7a6bc1039..d5b7951285a5 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/PerfTestCompactionPolicies.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/PerfTestCompactionPolicies.java
@@ -26,24 +26,36 @@
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.List;
-
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.client.RegionInfoBuilder;
+import org.apache.hadoop.hbase.logging.Log4jUtils;
 import org.apache.hadoop.hbase.regionserver.HStore;
 import org.apache.hadoop.hbase.regionserver.HStoreFile;
 import org.apache.hadoop.hbase.regionserver.StoreConfigInformation;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.testclassification.RegionServerTests;
 import org.apache.hadoop.hbase.util.ReflectionUtils;
+import org.junit.ClassRule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
 
+/**
+ * This is not a unit test. It is not run as part of the general unit test suite. It is for
+ * comparing compaction policies. You must run it explicitly;
+ * e.g. mvn test -Dtest=PerfTestCompactionPolicies
+ */
 @Category({RegionServerTests.class, MediumTests.class})
 @RunWith(Parameterized.class)
 public class PerfTestCompactionPolicies extends MockStoreFileGenerator {
 
+  @ClassRule
+  public static final HBaseClassTestRule CLASS_RULE =
+    HBaseClassTestRule.forClass(PerfTestCompactionPolicies.class);
+
   private final RatioBasedCompactionPolicy cp;
   private final StoreFileListGenerator generator;
   private final HStore store;
@@ -120,12 +132,9 @@ public PerfTestCompactionPolicies(
     this.ratio = inRatio;
 
     // Hide lots of logging so the system out is usable as a tab delimited file.
-    org.apache.log4j.Logger.getLogger(CompactionConfiguration.class).
-        setLevel(org.apache.log4j.Level.ERROR);
-    org.apache.log4j.Logger.getLogger(RatioBasedCompactionPolicy.class).
-        setLevel(org.apache.log4j.Level.ERROR);
-
-    org.apache.log4j.Logger.getLogger(cpClass).setLevel(org.apache.log4j.Level.ERROR);
+    Log4jUtils.setLogLevel(CompactionConfiguration.class.getName(), "ERROR");
+    Log4jUtils.setLogLevel(RatioBasedCompactionPolicy.class.getName(), "ERROR");
+    Log4jUtils.setLogLevel(cpClass.getName(), "ERROR");
 
 
     Configuration configuration = HBaseConfiguration.create();
@@ -197,7 +206,8 @@ private HStore createMockStore() {
     HStore s = mock(HStore.class);
     when(s.getStoreFileTtl()).thenReturn(Long.MAX_VALUE);
     when(s.getBlockingFileCount()).thenReturn(7L);
+    when(s.getRegionInfo()).thenReturn(RegionInfoBuilder.FIRST_META_REGIONINFO);
     return s;
   }
 
-}
+}
\ No newline at end of file
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestCanaryTool.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestCanaryTool.java
index 546643542aaa..9eb543858d1d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestCanaryTool.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestCanaryTool.java
@@ -27,15 +27,17 @@
 import static org.mockito.ArgumentMatchers.eq;
 import static org.mockito.ArgumentMatchers.isA;
 import static org.mockito.Mockito.atLeastOnce;
+import static org.mockito.Mockito.mock;
 import static org.mockito.Mockito.never;
 import static org.mockito.Mockito.spy;
 import static org.mockito.Mockito.times;
 import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
 import java.util.List;
 import java.util.Map;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.ScheduledThreadPoolExecutor;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.HBaseConfiguration;
@@ -50,9 +52,6 @@
 import org.apache.hadoop.hbase.testclassification.LargeTests;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.util.ToolRunner;
-import org.apache.log4j.Appender;
-import org.apache.log4j.LogManager;
-import org.apache.log4j.spi.LoggingEvent;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.ClassRule;
@@ -60,19 +59,14 @@
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 import org.junit.rules.TestName;
-import org.junit.runner.RunWith;
 import org.mockito.ArgumentMatcher;
-import org.mockito.Mock;
-import org.mockito.junit.MockitoJUnitRunner;
-import org.apache.hbase.thirdparty.com.google.common.collect.Iterables;
 
-@RunWith(MockitoJUnitRunner.class)
-@Category({LargeTests.class})
+@Category({ LargeTests.class })
 public class TestCanaryTool {
 
   @ClassRule
   public static final HBaseClassTestRule CLASS_RULE =
-      HBaseClassTestRule.forClass(TestCanaryTool.class);
+    HBaseClassTestRule.forClass(TestCanaryTool.class);
 
   private HBaseTestingUtility testingUtility;
   private static final byte[] FAMILY = Bytes.toBytes("f");
@@ -81,22 +75,26 @@ public class TestCanaryTool {
   @Rule
   public TestName name = new TestName();
 
+  private org.apache.logging.log4j.core.Appender mockAppender;
+
   @Before
   public void setUp() throws Exception {
     testingUtility = new HBaseTestingUtility();
     testingUtility.startMiniCluster();
-    LogManager.getRootLogger().addAppender(mockAppender);
+    mockAppender = mock(org.apache.logging.log4j.core.Appender.class);
+    when(mockAppender.getName()).thenReturn("mockAppender");
+    when(mockAppender.isStarted()).thenReturn(true);
+    ((org.apache.logging.log4j.core.Logger) org.apache.logging.log4j.LogManager
+      .getLogger("org.apache.hadoop.hbase")).addAppender(mockAppender);
   }
 
   @After
   public void tearDown() throws Exception {
     testingUtility.shutdownMiniCluster();
-    LogManager.getRootLogger().removeAppender(mockAppender);
+    ((org.apache.logging.log4j.core.Logger) org.apache.logging.log4j.LogManager
+      .getLogger("org.apache.hadoop.hbase")).removeAppender(mockAppender);
   }
 
-  @Mock
-  Appender mockAppender;
-
   @Test
   public void testBasicZookeeperCanaryWorks() throws Exception {
     final String[] args = { "-t", "10000", "-zookeeper" };
@@ -105,7 +103,8 @@ public void testBasicZookeeperCanaryWorks() throws Exception {
 
   @Test
   public void testZookeeperCanaryPermittedFailuresArgumentWorks() throws Exception {
-    final String[] args = { "-t", "10000", "-zookeeper", "-treatFailureAsError", "-permittedZookeeperFailures", "1" };
+    final String[] args =
+      { "-t", "10000", "-zookeeper", "-treatFailureAsError", "-permittedZookeeperFailures", "1" };
     testZookeeperCanaryWithArgs(args);
   }
 
@@ -114,7 +113,7 @@ public void testBasicCanaryWorks() throws Exception {
     final TableName tableName = TableName.valueOf(name.getMethodName());
     Table table = testingUtility.createTable(tableName, new byte[][] { FAMILY });
     // insert some test rows
-    for (int i=0; i<1000; i++) {
+    for (int i = 0; i < 1000; i++) {
       byte[] iBytes = Bytes.toBytes(i);
       Put p = new Put(iBytes);
       p.addColumn(FAMILY, COLUMN, iBytes);
@@ -155,7 +154,7 @@ public void testCanaryRegionTaskReadAllCF() throws Exception {
       // the test table has two column family. If readAllCF set true,
       // we expect read count is double of region count
       int expectedReadCount =
-          readAllCF ? 2 * sink.getTotalExpectedRegions() : sink.getTotalExpectedRegions();
+        readAllCF ? 2 * sink.getTotalExpectedRegions() : sink.getTotalExpectedRegions();
       assertEquals("canary region success count should equal total expected read count",
         expectedReadCount, sink.getReadSuccessCount());
       Map> regionMap = sink.getRegionMap();
@@ -183,7 +182,7 @@ public void testCanaryRegionTaskResult() throws Exception {
     TableName tableName = TableName.valueOf("testCanaryRegionTaskResult");
     Table table = testingUtility.createTable(tableName, new byte[][] { FAMILY });
     // insert some test rows
-    for (int i=0; i<1000; i++) {
+    for (int i = 0; i < 1000; i++) {
       byte[] iBytes = Bytes.toBytes(i);
       Put p = new Put(iBytes);
       p.addColumn(FAMILY, COLUMN, iBytes);
@@ -212,7 +211,7 @@ public void testCanaryRegionTaskResult() throws Exception {
     assertFalse("verify region map has size > 0", regionMap.isEmpty());
 
     for (String regionName : regionMap.keySet()) {
-      for (CanaryTool.RegionTaskResult res: regionMap.get(regionName)) {
+      for (CanaryTool.RegionTaskResult res : regionMap.get(regionName)) {
         assertNotNull("verify getRegionNameAsString()", regionName);
         assertNotNull("verify getRegionInfo()", res.getRegionInfo());
         assertNotNull("verify getTableName()", res.getTableName());
@@ -235,24 +234,25 @@ public void testCanaryRegionTaskResult() throws Exception {
 
   // Ignore this test. It fails w/ the below on some mac os x.
   // [ERROR] Failures:
-  // [ERROR]   TestCanaryTool.testReadTableTimeouts:216
+  // [ERROR] TestCanaryTool.testReadTableTimeouts:216
   // Argument(s) are different! Wanted:
   // mockAppender.doAppend(
   // 
-  //      );
-  //      -> at org.apache.hadoop.hbase.tool.TestCanaryTool
-  //          .testReadTableTimeouts(TestCanaryTool.java:216)
-  //      Actual invocations have different arguments:
-  //      mockAppender.doAppend(
-  //          org.apache.log4j.spi.LoggingEvent@2055cfc1
-  //          );
-  //      )
-  //  )
+  // );
+  // -> at org.apache.hadoop.hbase.tool.TestCanaryTool
+  // .testReadTableTimeouts(TestCanaryTool.java:216)
+  // Actual invocations have different arguments:
+  // mockAppender.doAppend(
+  // org.apache.log4j.spi.LoggingEvent@2055cfc1
+  // );
+  // )
+  // )
   //
-  @org.junit.Ignore @Test
+  @org.junit.Ignore
+  @Test
   public void testReadTableTimeouts() throws Exception {
-    final TableName [] tableNames = new TableName[] {TableName.valueOf(name.getMethodName() + "1"),
-      TableName.valueOf(name.getMethodName() + "2")};
+    final TableName[] tableNames = new TableName[] { TableName.valueOf(name.getMethodName() + "1"),
+      TableName.valueOf(name.getMethodName() + "2") };
     // Create 2 test tables.
     for (int j = 0; j < 2; j++) {
       Table table = testingUtility.createTable(tableNames[j], new byte[][] { FAMILY });
@@ -269,8 +269,8 @@ public void testReadTableTimeouts() throws Exception {
     CanaryTool canary = new CanaryTool(executor, sink);
     String configuredTimeoutStr = tableNames[0].getNameAsString() + "=" + Long.MAX_VALUE + "," +
       tableNames[1].getNameAsString() + "=0";
-    String[] args = {"-readTableTimeouts", configuredTimeoutStr, name.getMethodName() + "1",
-      name.getMethodName() + "2"};
+    String[] args = { "-readTableTimeouts", configuredTimeoutStr, name.getMethodName() + "1",
+      name.getMethodName() + "2" };
     assertEquals(0, ToolRunner.run(testingUtility.getConfiguration(), canary, args));
     verify(sink, times(tableNames.length)).initializeAndGetReadLatencyForTable(isA(String.class));
     for (int i = 0; i < 2; i++) {
@@ -280,18 +280,21 @@ public void testReadTableTimeouts() throws Exception {
         sink.getReadLatencyMap().get(tableNames[i].getNameAsString()));
     }
     // One table's timeout is set for 0 ms and thus, should lead to an error.
-    verify(mockAppender, times(1)).doAppend(argThat(new ArgumentMatcher() {
-      @Override
-      public boolean matches(LoggingEvent argument) {
-        return argument.getRenderedMessage().contains("exceeded the configured read timeout.");
-      }
-    }));
-    verify(mockAppender, times(2)).doAppend(argThat(new ArgumentMatcher() {
-      @Override
-      public boolean matches(LoggingEvent argument) {
-        return argument.getRenderedMessage().contains("Configured read timeout");
-      }
-    }));
+    verify(mockAppender, times(1))
+      .append(argThat(new ArgumentMatcher() {
+        @Override
+        public boolean matches(org.apache.logging.log4j.core.LogEvent argument) {
+          return argument.getMessage().getFormattedMessage()
+            .contains("exceeded the configured read timeout.");
+        }
+      }));
+    verify(mockAppender, times(2))
+      .append(argThat(new ArgumentMatcher() {
+        @Override
+        public boolean matches(org.apache.logging.log4j.core.LogEvent argument) {
+          return argument.getMessage().getFormattedMessage().contains("Configured read timeout");
+        }
+      }));
   }
 
   @Test
@@ -299,43 +302,47 @@ public void testWriteTableTimeout() throws Exception {
     ExecutorService executor = new ScheduledThreadPoolExecutor(1);
     CanaryTool.RegionStdOutSink sink = spy(new CanaryTool.RegionStdOutSink());
     CanaryTool canary = new CanaryTool(executor, sink);
-    String[] args = { "-writeSniffing", "-writeTableTimeout", String.valueOf(Long.MAX_VALUE)};
+    String[] args = { "-writeSniffing", "-writeTableTimeout", String.valueOf(Long.MAX_VALUE) };
     assertEquals(0, ToolRunner.run(testingUtility.getConfiguration(), canary, args));
     assertNotEquals("verify non-null write latency", null, sink.getWriteLatency());
     assertNotEquals("verify non-zero write latency", 0L, sink.getWriteLatency());
-    verify(mockAppender, times(1)).doAppend(argThat(
-        new ArgumentMatcher() {
-          @Override
-          public boolean matches(LoggingEvent argument) {
-            return argument.getRenderedMessage().contains("Configured write timeout");
-          }
-        }));
+    verify(mockAppender, times(1))
+      .append(argThat(new ArgumentMatcher() {
+        @Override
+        public boolean matches(org.apache.logging.log4j.core.LogEvent argument) {
+          return argument.getMessage().getFormattedMessage().contains("Configured write timeout");
+        }
+      }));
   }
 
-  //no table created, so there should be no regions
+  // no table created, so there should be no regions
   @Test
   public void testRegionserverNoRegions() throws Exception {
     runRegionserverCanary();
-    verify(mockAppender).doAppend(argThat(new ArgumentMatcher() {
-      @Override
-      public boolean matches(LoggingEvent argument) {
-        return argument.getRenderedMessage().contains("Regionserver not serving any regions");
-      }
-    }));
+    verify(mockAppender)
+      .append(argThat(new ArgumentMatcher() {
+        @Override
+        public boolean matches(org.apache.logging.log4j.core.LogEvent argument) {
+          return argument.getMessage().getFormattedMessage()
+            .contains("Regionserver not serving any regions");
+        }
+      }));
   }
 
-  //by creating a table, there shouldn't be any region servers not serving any regions
+  // by creating a table, there shouldn't be any region servers not serving any regions
   @Test
   public void testRegionserverWithRegions() throws Exception {
     final TableName tableName = TableName.valueOf(name.getMethodName());
     testingUtility.createTable(tableName, new byte[][] { FAMILY });
     runRegionserverCanary();
-    verify(mockAppender, never()).doAppend(argThat(new ArgumentMatcher() {
-      @Override
-      public boolean matches(LoggingEvent argument) {
-        return argument.getRenderedMessage().contains("Regionserver not serving any regions");
-      }
-    }));
+    verify(mockAppender, never())
+      .append(argThat(new ArgumentMatcher() {
+        @Override
+        public boolean matches(org.apache.logging.log4j.core.LogEvent argument) {
+          return argument.getMessage().getFormattedMessage()
+            .contains("Regionserver not serving any regions");
+        }
+      }));
   }
 
   @Test
@@ -343,7 +350,7 @@ public void testRawScanConfig() throws Exception {
     final TableName tableName = TableName.valueOf(name.getMethodName());
     Table table = testingUtility.createTable(tableName, new byte[][] { FAMILY });
     // insert some test rows
-    for (int i=0; i<1000; i++) {
+    for (int i = 0; i < 1000; i++) {
       byte[] iBytes = Bytes.toBytes(i);
       Put p = new Put(iBytes);
       p.addColumn(FAMILY, COLUMN, iBytes);
@@ -357,23 +364,20 @@ public void testRawScanConfig() throws Exception {
       new org.apache.hadoop.conf.Configuration(testingUtility.getConfiguration());
     conf.setBoolean(HConstants.HBASE_CANARY_READ_RAW_SCAN_KEY, true);
     assertEquals(0, ToolRunner.run(conf, canary, args));
-    verify(sink, atLeastOnce())
-        .publishReadTiming(isA(ServerName.class), isA(RegionInfo.class),
-        isA(ColumnFamilyDescriptor.class), anyLong());
+    verify(sink, atLeastOnce()).publishReadTiming(isA(ServerName.class), isA(RegionInfo.class),
+      isA(ColumnFamilyDescriptor.class), anyLong());
     assertEquals("verify no read error count", 0, canary.getReadFailures().size());
   }
 
   private void runRegionserverCanary() throws Exception {
     ExecutorService executor = new ScheduledThreadPoolExecutor(1);
     CanaryTool canary = new CanaryTool(executor, new CanaryTool.RegionServerStdOutSink());
-    String[] args = { "-t", "10000", "-regionserver"};
+    String[] args = { "-t", "10000", "-regionserver" };
     assertEquals(0, ToolRunner.run(testingUtility.getConfiguration(), canary, args));
     assertEquals("verify no read error count", 0, canary.getReadFailures().size());
   }
 
   private void testZookeeperCanaryWithArgs(String[] args) throws Exception {
-    Integer port =
-      Iterables.getOnlyElement(testingUtility.getZkCluster().getClientPortList(), null);
     String hostPort = testingUtility.getZkCluster().getAddress().toString();
     testingUtility.getConfiguration().set(HConstants.ZOOKEEPER_QUORUM, hostPort + "/hbase");
     ExecutorService executor = new ScheduledThreadPoolExecutor(2);
@@ -381,8 +385,8 @@ private void testZookeeperCanaryWithArgs(String[] args) throws Exception {
     CanaryTool canary = new CanaryTool(executor, sink);
     assertEquals(0, ToolRunner.run(testingUtility.getConfiguration(), canary, args));
 
-    String baseZnode = testingUtility.getConfiguration()
-      .get(HConstants.ZOOKEEPER_ZNODE_PARENT, HConstants.DEFAULT_ZOOKEEPER_ZNODE_PARENT);
+    String baseZnode = testingUtility.getConfiguration().get(HConstants.ZOOKEEPER_ZNODE_PARENT,
+      HConstants.DEFAULT_ZOOKEEPER_ZNODE_PARENT);
     verify(sink, atLeastOnce()).publishReadTiming(eq(baseZnode), eq(hostPort), anyLong());
   }
 }
diff --git a/hbase-shaded/hbase-shaded-check-invariants/pom.xml b/hbase-shaded/hbase-shaded-check-invariants/pom.xml
index 071f899d5607..b07c66dfd046 100644
--- a/hbase-shaded/hbase-shaded-check-invariants/pom.xml
+++ b/hbase-shaded/hbase-shaded-check-invariants/pom.xml
@@ -46,12 +46,10 @@
     
       org.apache.hbase
       hbase-shaded-mapreduce
-      ${project.version}
     
     
       org.apache.hbase
       hbase-shaded-client-byo-hadoop
-      ${project.version}
     
     
     
@@ -60,8 +58,18 @@
       provided
     
     
-      ch.qos.reload4j
-      reload4j
+      org.apache.logging.log4j
+      log4j-api
+      provided
+    
+    
+      org.apache.logging.log4j
+      log4j-core
+      provided
+    
+    
+      org.apache.logging.log4j
+      log4j-slf4j-impl
       provided
     
     
@@ -108,8 +116,7 @@
                   
                     
                     org.slf4j:*
-                    log4j:*
-                    ch.qos.reload4j:*
+                    org.apache.logging.log4j:*
                     commons-logging:*
                     
                     com.google.code.findbugs:*
diff --git a/hbase-shaded/hbase-shaded-client-byo-hadoop/pom.xml b/hbase-shaded/hbase-shaded-client-byo-hadoop/pom.xml
index b575b53e5778..5c699583faba 100644
--- a/hbase-shaded/hbase-shaded-client-byo-hadoop/pom.xml
+++ b/hbase-shaded/hbase-shaded-client-byo-hadoop/pom.xml
@@ -1,6 +1,6 @@
 
+  xmlns:xsi="https://www.w3.org/2001/XMLSchema-instance"
+  xsi:schemaLocation="https://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
     
-    4.0.0
-    
-        hbase-shaded
-        org.apache.hbase
-        2.6.0-SNAPSHOT
-        ..
-    
-    hbase-shaded-client-byo-hadoop
-    Apache HBase - Shaded - Client
-    
-        
-            
-                org.apache.maven.plugins
-                maven-site-plugin
-                
-                    true
-                
-            
-            
-                
-                maven-assembly-plugin
-                
-                    true
-                
-            
-            
-                org.apache.maven.plugins
-                maven-shade-plugin
-            
-        
-    
-    
-        
-            org.apache.hbase
-            hbase-client
-        
-    
+  4.0.0
+  
+    hbase-shaded
+    org.apache.hbase
+    2.6.0-SNAPSHOT
+    ..
+  
+  hbase-shaded-client-byo-hadoop
+  Apache HBase - Shaded - Client
+  
+    
+      
+        org.apache.maven.plugins
+        maven-site-plugin
+        
+          true
+        
+      
+      
+        
+        maven-assembly-plugin
+        
+          true
+        
+      
+      
+        org.apache.maven.plugins
+        maven-shade-plugin
+      
+    
+  
+  
+    
+      org.apache.hadoop
+      hadoop-auth
+      provided
+    
+  
 
-    
+  
       
-      
-        hadoop-2.0
-        
-          
+    
+      hadoop-2.0
+      
+        
               
-              !hadoop.profile
-          
-        
-        
-          
-            org.apache.hadoop
-            hadoop-auth
-            provided
-          
-          
-            org.apache.hadoop
-            hadoop-common
-            provided
-          
-          
-            org.codehaus.jackson
-            jackson-jaxrs
-            1.9.13
-            provided
-            
-              
-                org.codehaus.jackson
-                jackson-mapper-asl
-              
-              
-                org.codehaus.jackson
-                jackson-core-asl
-              
-            
-          
-          
-            org.codehaus.jackson
-            jackson-xc
-            1.9.13
-            provided
-            
-              
-                org.codehaus.jackson
-                jackson-mapper-asl
-              
-              
-                org.codehaus.jackson
-                jackson-core-asl
-              
-            
-          
-        
-      
+              
+          !hadoop.profile
+        
+      
+      
+        
+          org.apache.hadoop
+          hadoop-auth
+          provided
+        
+        
+          org.apache.hadoop
+          hadoop-common
+          provided
+        
+        
+          org.codehaus.jackson
+          jackson-jaxrs
+          1.9.13
+          provided
+          
+            
+              org.codehaus.jackson
+              jackson-mapper-asl
+            
+            
+              org.codehaus.jackson
+              jackson-core-asl
+            
+          
+        
+        
+          org.codehaus.jackson
+          jackson-xc
+          1.9.13
+          provided
+          
+            
+              org.codehaus.jackson
+              jackson-mapper-asl
+            
+            
+              org.codehaus.jackson
+              jackson-core-asl
+            
+          
+        
+      
+    
 
       
-      
-        hadoop-3.0
-        
-          
-            hadoop.profile
-            3.0
-          
-        
-        
-          
-            org.apache.hadoop
-            hadoop-auth
-            provided
-          
-          
-            org.apache.hadoop
-            hadoop-common
-            provided
-          
-          
-            org.codehaus.jackson
-            jackson-jaxrs
-            1.9.13
-            provided
-            
-              
-                org.codehaus.jackson
-                jackson-mapper-asl
-              
-              
-                org.codehaus.jackson
-                jackson-core-asl
-              
-            
-          
-          
-            org.codehaus.jackson
-            jackson-xc
-            1.9.13
-            provided
-            
-              
-                org.codehaus.jackson
-                jackson-mapper-asl
-              
-              
-                org.codehaus.jackson
-                jackson-core-asl
-              
-            
-          
-        
-      
-    
+    
+      hadoop-3.0
+      
+        
+          hadoop.profile
+          3.0
+        
+      
+      
+        
+          org.apache.hadoop
+          hadoop-auth
+          provided
+        
+        
+          org.apache.hadoop
+          hadoop-common
+          provided
+        
+        
+          org.codehaus.jackson
+          jackson-jaxrs
+          1.9.13
+          provided
+          
+            
+              org.codehaus.jackson
+              jackson-mapper-asl
+            
+            
+              org.codehaus.jackson
+              jackson-core-asl
+            
+          
+        
+        
+          org.codehaus.jackson
+          jackson-xc
+          1.9.13
+          provided
+          
+            
+              org.codehaus.jackson
+              jackson-mapper-asl
+            
+            
+              org.codehaus.jackson
+              jackson-core-asl
+            
+          
+        
+      
+    
+  
 
diff --git a/hbase-shaded/hbase-shaded-client/pom.xml b/hbase-shaded/hbase-shaded-client/pom.xml
index de9a070ded87..e6a3a2efc1d2 100644
--- a/hbase-shaded/hbase-shaded-client/pom.xml
+++ b/hbase-shaded/hbase-shaded-client/pom.xml
@@ -1,6 +1,6 @@
 
+  xmlns:xsi="https://www.w3.org/2001/XMLSchema-instance"
+  xsi:schemaLocation="https://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
     
-    4.0.0
-    
-        hbase-shaded
-        org.apache.hbase
-        2.6.0-SNAPSHOT
-        ..
-    
-    hbase-shaded-client
-    Apache HBase - Shaded - Client (with Hadoop bundled)
-    
-        
-            
-                org.apache.maven.plugins
-                maven-site-plugin
-                
-                    true
-                
-            
-            
+  4.0.0
+  
+    hbase-shaded
+    org.apache.hbase
+    2.6.0-SNAPSHOT
+    ..
+  
+  hbase-shaded-client
+  Apache HBase - Shaded - Client (with Hadoop bundled)
+  
+    
+      
+        org.apache.maven.plugins
+        maven-site-plugin
+        
+          true
+        
+      
+      
                 
-                maven-assembly-plugin
-                
-                    true
-                
-            
-            
-                org.apache.maven.plugins
-                maven-shade-plugin
-                
-                    
-                        aggregate-into-a-jar-with-relocated-third-parties
-                        
-                            
-                                
-                                    
-                                    javax.annotation:javax.annotation-api
-                                    javax.activation:javax.activation-api
-                                    jakarta.activation:jakarta.activation-api 
-                                    jakarta.ws.rs:jakarta.ws.rs-api
-                                    jakarta.annotation:jakarta.annotation-api
-                                    jakarta.validation:jakarta.validation-api
-                                    org.glassfish.hk2.external:jakarta.inject
-                                    
-                                    
-                                    org.apache.hbase:hbase-resource-bundle
-                                    org.slf4j:*
-                                    com.google.code.findbugs:*
-                                    com.github.stephenc.findbugs:*
-                                    com.github.spotbugs:*
-                                    org.apache.htrace:*
-                                    org.apache.yetus:*
-                                    log4j:*
-                                    ch.qos.reload4j:*
-                                    commons-logging:*
-                                    org.javassist:*
-                                    io.opentelemetry:*
-                                
-                            
-                        
-                    
-                
-            
-        
-    
-    
-        
-            org.apache.hbase
-            hbase-client
-        
-    
-
+        maven-assembly-plugin
+        
+          true
+        
+      
+      
+        org.apache.maven.plugins
+        maven-shade-plugin
+        
+          
+            aggregate-into-a-jar-with-relocated-third-parties
+            
+              
+                
+                  
+                  javax.annotation:javax.annotation-api
+                  javax.activation:javax.activation-api
+                  jakarta.activation:jakarta.activation-api 
+                  jakarta.ws.rs:jakarta.ws.rs-api
+                  jakarta.annotation:jakarta.annotation-api
+                  jakarta.validation:jakarta.validation-api
+                  org.glassfish.hk2.external:jakarta.inject
+                  
+                  
+                  org.apache.hbase:hbase-resource-bundle
+                  org.slf4j:*
+                  com.google.code.findbugs:*
+                  com.github.stephenc.findbugs:*
+                  com.github.spotbugs:*
+                  org.apache.htrace:*
+                  org.apache.yetus:*
+                  org.apache.logging.log4j:*
+                  commons-logging:*
+                  org.javassist:*
+                  io.opentelemetry:*
+                
+              
+            
+          
+        
+      
+    
+  
+  
+    
+      org.apache.hbase
+      hbase-client
+    
+  
 
diff --git a/hbase-shaded/hbase-shaded-mapreduce/pom.xml b/hbase-shaded/hbase-shaded-mapreduce/pom.xml
index 10d943109afc..0a000a2f4d40 100644
--- a/hbase-shaded/hbase-shaded-mapreduce/pom.xml
+++ b/hbase-shaded/hbase-shaded-mapreduce/pom.xml
@@ -1,6 +1,6 @@
 
+  xmlns:xsi="https://www.w3.org/2001/XMLSchema-instance"
+  xsi:schemaLocation="https://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
     
-    4.0.0
-    
-        hbase-shaded
-        org.apache.hbase
-        2.6.0-SNAPSHOT
-        ..
-    
-    hbase-shaded-mapreduce
-    Apache HBase - Shaded - MapReduce
-    
-        
-            
-                org.apache.maven.plugins
-                maven-site-plugin
-                
-                    true
-                
-            
-            
+  4.0.0
+  
+    hbase-shaded
+    org.apache.hbase
+    2.6.0-SNAPSHOT
+    ..
+  
+  hbase-shaded-mapreduce
+  Apache HBase - Shaded - MapReduce
+  
+    
+      
+        org.apache.maven.plugins
+        maven-site-plugin
+        
+          true
+        
+      
+      
                 
-                maven-assembly-plugin
-                
-                    true
-                
-            
-            
-                org.apache.maven.plugins
-                maven-jar-plugin
-                
-                    
-                        
+        maven-assembly-plugin
+        
+          true
+        
+      
+      
+        org.apache.maven.plugins
+        maven-jar-plugin
+        
+          
+            
                             
-                            org/apache/hadoop/hbase/mapreduce/Driver
-                        
-                    
-                
-            
-            
-                org.apache.maven.plugins
-                maven-shade-plugin
-            
-        
-    
-    
+              org/apache/hadoop/hbase/mapreduce/Driver
+            
+          
+        
+      
+      
+        org.apache.maven.plugins
+        maven-shade-plugin
+      
+    
+  
+  
         
-        
-            org.apache.hbase
-            hbase-mapreduce
-            
-              
-              
-                javax.xml.bind
-                jaxb-api
-              
-              
-                javax.ws.rs
-                jsr311-api
-              
-              
-              
-                javax.ws.rs
-                javax.ws.rs-api
-              
-              
-                com.sun.jersey
-                jersey-server
-              
-              
-                com.sun.jersey
-                jersey-client
-              
-              
-                com.sun.jersey
-                jersey-core
-              
-              
-                com.sun.jersey
-                jersey-json
-              
-              
-                com.sun.jersey.contribs
-                jersey-guice
-              
-              
-              
-                javax.servlet
-                javax.servlet-api
-              
-              
-                org.eclipse.jetty
-                jetty-http
-              
-              
-                org.eclipse.jetty
-                jetty-security
-              
-              
-                org.eclipse.jetty
-                jetty-server
-              
-              
-                org.eclipse.jetty
-                jetty-servlet
-              
-              
-                org.eclipse.jetty
-                jetty-util
-              
-              
-                org.eclipse.jetty
-                jetty-util-ajax
-              
-              
-                org.glassfish
-                javax.el
-              
-              
-                org.eclipse.jetty
-                jetty-webapp
-              
-              
-                org.glassfish.jersey.core
-                jersey-server
-              
-              
-                org.glassfish.jersey.containers
-                jersey-container-servlet-core
-              
-              
-              
-                org.glassfish.web
-                javax.servlet.jsp
-              
-              
-                javax.servlet.jsp
-                javax.servlet.jsp-api
-              
-            
-        
-    
+    
+      org.apache.hadoop
+      hadoop-common
+      provided
+    
+  
 
-    
+  
         
         
-        
-          hadoop-2.0
-          
-            
+    
+      hadoop-2.0
+      
+        
                 
-                !hadoop.profile
-            
-          
-          
-            
-              org.apache.hadoop
-              hadoop-common
-              provided
-              
-                
-                  net.java.dev.jets3t
-                  jets3t
-                
-                
-                  javax.servlet.jsp
-                  jsp-api
-                
-                
-                  org.mortbay.jetty
-                  jetty
-                
-                
-                  com.sun.jersey
-                  jersey-server
-                
-                
-                  com.sun.jersey
-                  jersey-core
-                
-                
-                  com.sun.jersey
-                  jersey-json
-                
-                
-                  javax.servlet
-                  servlet-api
-                
-                
-                  tomcat
-                  jasper-compiler
-                
-                
-                  tomcat
-                  jasper-runtime
-                
-                
-                  com.google.code.findbugs
-                  jsr305
-                
-              
-            
-            
-              org.apache.hadoop
-              hadoop-hdfs
-              provided
-              
-                
-                  javax.servlet.jsp
-                  jsp-api
-                
-                
-                  javax.servlet
-                  servlet-api
-                
-                
-                  io.netty
-                  netty
-                
-                
-                  stax
-                  stax-api
-                
-                
-                  xerces
-                  xercesImpl
-                
-              
-              ${hadoop-two.version}
-            
-            
-              org.apache.hadoop
-              hadoop-mapreduce-client-core
-              provided
-              
-                
-                  com.google.guava
-                  guava
-                
-              
-            
-            
+                
+          !hadoop.profile
+        
+      
+      
+        
+          org.apache.hadoop
+          hadoop-common
+          provided
+          
+            
+              net.java.dev.jets3t
+              jets3t
+            
+            
+              javax.servlet.jsp
+              jsp-api
+            
+            
+              org.mortbay.jetty
+              jetty
+            
+            
+              com.sun.jersey
+              jersey-server
+            
+            
+              com.sun.jersey
+              jersey-core
+            
+            
+              com.sun.jersey
+              jersey-json
+            
+            
+              javax.servlet
+              servlet-api
+            
+            
+              tomcat
+              jasper-compiler
+            
+            
+              tomcat
+              jasper-runtime
+            
+            
+              com.google.code.findbugs
+              jsr305
+            
+          
+        
+        
+          org.apache.hadoop
+          hadoop-hdfs
+          provided
+          
+            
+              javax.servlet.jsp
+              jsp-api
+            
+            
+              javax.servlet
+              servlet-api
+            
+            
+              io.netty
+              netty
+            
+            
+              stax
+              stax-api
+            
+            
+              xerces
+              xercesImpl
+            
+          
+          ${hadoop-two.version}
+        
+        
+          org.apache.hadoop
+          hadoop-mapreduce-client-core
+          provided
+          
+            
+              com.google.guava
+              guava
+            
+          
+        
+        
+          org.codehaus.jackson
+          jackson-jaxrs
+          1.9.13
+          provided
+          
+            
+              org.codehaus.jackson
+              jackson-mapper-asl
+            
+            
               org.codehaus.jackson
-              jackson-jaxrs
-              1.9.13
-              provided
-              
-                
-                  org.codehaus.jackson
-                  jackson-mapper-asl
-                
-                
-                  org.codehaus.jackson
-                  jackson-core-asl
-                
-              
-            
-            
+              jackson-core-asl
+            
+          
+        
+        
+          org.codehaus.jackson
+          jackson-xc
+          1.9.13
+          provided
+          
+            
               org.codehaus.jackson
-              jackson-xc
-              1.9.13
-              provided
-              
-                
-                  org.codehaus.jackson
-                  jackson-mapper-asl
-                
-                
-                  org.codehaus.jackson
-                  jackson-core-asl
-                
-              
-            
-            
-              org.apache.hadoop
-              hadoop-auth
-              provided
-            
-          
-        
+              jackson-mapper-asl
+            
+            
+              org.codehaus.jackson
+              jackson-core-asl
+            
+          
+        
+        
+          org.apache.hadoop
+          hadoop-auth
+          provided
+        
+      
+    
 
         
-        
-          hadoop-3.0
-          
-            
-              hadoop.profile
-              3.0
-            
-          
-          
-            ${hadoop-three.version}
-          
-          
-            
-              org.apache.hadoop
-              hadoop-common
-              provided
-            
-            
-              org.apache.hadoop
-              hadoop-hdfs
-              provided
-            
-            
-              org.apache.hadoop
-              hadoop-auth
-              provided
-            
-            
-              org.apache.hadoop
-              hadoop-mapreduce-client-core
-              provided
-              
-                
-                  com.google.guava
-                  guava
-                
-                
-                  javax.xml.bind
-                  jaxb-api
-                
-                
-                  javax.ws.rs
-                  jsr311-api
-                
-              
-            
-            
+    
+      hadoop-3.0
+      
+        
+          hadoop.profile
+          3.0
+        
+      
+      
+        ${hadoop-three.version}
+      
+      
+        
+          org.apache.hadoop
+          hadoop-common
+          provided
+        
+        
+          org.apache.hadoop
+          hadoop-hdfs
+          provided
+        
+        
+          org.apache.hadoop
+          hadoop-auth
+          provided
+        
+        
+          org.apache.hadoop
+          hadoop-mapreduce-client-core
+          provided
+          
+            
+              com.google.guava
+              guava
+            
+            
+              javax.xml.bind
+              jaxb-api
+            
+            
+              javax.ws.rs
+              jsr311-api
+            
+          
+        
+        
+          org.codehaus.jackson
+          jackson-jaxrs
+          1.9.13
+          provided
+          
+            
               org.codehaus.jackson
-              jackson-jaxrs
-              1.9.13
-              provided
-              
-                
-                  org.codehaus.jackson
-                  jackson-mapper-asl
-                
-                
-                  org.codehaus.jackson
-                  jackson-core-asl
-                
-              
-            
-            
+              jackson-mapper-asl
+            
+            
               org.codehaus.jackson
-              jackson-xc
-              1.9.13
-              provided
-              
-                
-                  org.codehaus.jackson
-                  jackson-mapper-asl
-                
-                
-                  org.codehaus.jackson
-                  jackson-core-asl
-                
-              
-            
-          
-        
-    
+              jackson-core-asl
+            
+          
+        
+        
+          org.codehaus.jackson
+          jackson-xc
+          1.9.13
+          provided
+          
+            
+              org.codehaus.jackson
+              jackson-mapper-asl
+            
+            
+              org.codehaus.jackson
+              jackson-core-asl
+            
+          
+        
+      
+    
+  
 
diff --git a/hbase-shaded/hbase-shaded-testing-util-tester/pom.xml b/hbase-shaded/hbase-shaded-testing-util-tester/pom.xml
index 74d9be39804d..3d8aaa13e5ab 100644
--- a/hbase-shaded/hbase-shaded-testing-util-tester/pom.xml
+++ b/hbase-shaded/hbase-shaded-testing-util-tester/pom.xml
@@ -1,25 +1,25 @@
 
-    
+
   4.0.0
 
   
@@ -56,8 +56,23 @@
       test
     
     
-      org.slf4j
-      slf4j-reload4j
+      org.apache.logging.log4j
+      log4j-api
+      test
+    
+    
+      org.apache.logging.log4j
+      log4j-core
+      test
+    
+    
+      org.apache.logging.log4j
+      log4j-slf4j-impl
+      test
+    
+    
+      org.apache.logging.log4j
+      log4j-1.2-api
       test
     
     
diff --git a/hbase-shaded/hbase-shaded-testing-util/pom.xml b/hbase-shaded/hbase-shaded-testing-util/pom.xml
index 03d001741143..ebc5eff50e43 100644
--- a/hbase-shaded/hbase-shaded-testing-util/pom.xml
+++ b/hbase-shaded/hbase-shaded-testing-util/pom.xml
@@ -1,183 +1,179 @@
 
-    
-    4.0.0
+  xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+  xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+
+  4.0.0
+  
+    hbase-shaded
+    org.apache.hbase
+    2.6.0-SNAPSHOT
+    ..
+  
 
-    
-        hbase-shaded
-        org.apache.hbase
-        2.6.0-SNAPSHOT
-        ..
-    
+  hbase-shaded-testing-util
+  Apache HBase - Shaded - Testing Util
 
-    hbase-shaded-testing-util
-    Apache HBase - Shaded - Testing Util
-
-    
+  
         
-        
-            org.apache.hadoop
-            hadoop-common
-            test-jar
-            compile
-        
-        
-            org.apache.hadoop
-            hadoop-hdfs
-            test-jar
-            compile
-        
-        
-            org.apache.hadoop
-            hadoop-mapreduce-client-app
-            test-jar
-            compile
-        
-        
-            org.apache.hadoop
-            hadoop-mapreduce-client-jobclient
-            test-jar
-            compile
-        
-        
-            org.apache.hbase
-            hbase-common
-            test-jar
-            compile
-        
-        
-            org.apache.hbase
-            hbase-server
-            test-jar
-            compile
-            
-              
-                 javax.xml.bind
-                 jaxb-api
-              
-            
-        
-        
-            org.apache.hbase
-            hbase-asyncfs
-            test-jar
-            compile
-        
-        
-            org.apache.hbase
-            hbase-zookeeper
-            test-jar
-            compile
-        
-        
-            org.apache.hbase
-            hbase-hadoop-compat
-            test-jar
-            compile
-        
-        
-            org.apache.hbase
-            hbase-hadoop2-compat
-            test-jar
-            compile
-        
-        
-            org.codehaus.jackson
-            jackson-jaxrs
-            1.9.13
-            compile
-        
-        
-            org.apache.hbase
-            hbase-testing-util
-            ${project.version}
-            compile
-            
-              
-                javax.xml.bind
-                jaxb-api
-              
-            
-        
-    
+    
+      org.apache.hadoop
+      hadoop-common
+      test-jar
+      compile
+    
+    
+      org.apache.hadoop
+      hadoop-hdfs
+      test-jar
+      compile
+    
+    
+      org.apache.hadoop
+      hadoop-mapreduce-client-app
+      test-jar
+      compile
+    
+    
+      org.apache.hadoop
+      hadoop-mapreduce-client-jobclient
+      test-jar
+      compile
+    
+    
+      org.apache.hbase
+      hbase-common
+      test-jar
+      compile
+    
+    
+      org.apache.hbase
+      hbase-server
+      test-jar
+      compile
+      
+        
+          javax.xml.bind
+          jaxb-api
+        
+      
+    
+    
+      org.apache.hbase
+      hbase-asyncfs
+      test-jar
+      compile
+    
+    
+      org.apache.hbase
+      hbase-zookeeper
+      test-jar
+      compile
+    
+    
+      org.apache.hbase
+      hbase-hadoop-compat
+      test-jar
+      compile
+    
+    
+      org.apache.hbase
+      hbase-hadoop2-compat
+      test-jar
+      compile
+    
+    
+      org.codehaus.jackson
+      jackson-jaxrs
+      1.9.13
+      compile
+    
+    
+      org.apache.hbase
+      hbase-testing-util
+      compile
+      
+        
+          javax.xml.bind
+          jaxb-api
+        
+      
+    
+  
 
-    
-        
-            
-                org.apache.maven.plugins
-                maven-site-plugin
-                
-                    true
-                
-            
-            
+  
+    
+      
+        org.apache.maven.plugins
+        maven-site-plugin
+        
+          true
+        
+      
+      
                 
-                maven-assembly-plugin
-                
-                    true
-                
-            
-            
-                org.apache.maven.plugins
-                maven-shade-plugin
-                
-                    
-                        aggregate-into-a-jar-with-relocated-third-parties
-                        
-                            
-                                
+        maven-assembly-plugin
+        
+          true
+        
+      
+      
+        org.apache.maven.plugins
+        maven-shade-plugin
+        
+          
+            aggregate-into-a-jar-with-relocated-third-parties
+            
+              
+                
                                     
-                                    javax.annotation:javax.annotation-api
-                                    javax.activation:javax.activation-api
-                                    jakarta.ws.rs:jakarta.ws.rs-api
-                                    jakarta.annotation:jakarta.annotation-api
-                                    jakarta.validation:jakarta.validation-api
-                                    org.glassfish.hk2.external:jakarta.inject
+                  javax.annotation:javax.annotation-api
+                  javax.activation:javax.activation-api
+                  jakarta.ws.rs:jakarta.ws.rs-api
+                  jakarta.annotation:jakarta.annotation-api
+                  jakarta.validation:jakarta.validation-api
+                  org.glassfish.hk2.external:jakarta.inject
                                     
                                     
-                                    org.apache.hbase:hbase-resource-bundle
-                                    org.slf4j:*
-                                    com.google.code.findbugs:*
-                                    com.github.stephenc.findbugs:*
-                                    com.github.spotbugs:*
-                                    org.apache.htrace:*
-                                    org.apache.yetus:*
-                                    log4j:*
-                                    ch.qos.reload4j:*
-                                    commons-logging:*
-                                    org.javassist:*
-                                    io.opentelemetry:*
-                                
-                            
-                        
-                    
-                
-            
-        
-    
-
+                  org.apache.hbase:hbase-resource-bundle
+                  org.slf4j:*
+                  com.google.code.findbugs:*
+                  com.github.stephenc.findbugs:*
+                  com.github.spotbugs:*
+                  org.apache.htrace:*
+                  org.apache.yetus:*
+                  org.apache.logging.log4j:*
+                  commons-logging:*
+                  org.javassist:*
+                  io.opentelemetry:*
+                
+              
+            
+          
+        
+      
+    
+  
 
diff --git a/hbase-shaded/hbase-shaded-with-hadoop-check-invariants/pom.xml b/hbase-shaded/hbase-shaded-with-hadoop-check-invariants/pom.xml
index 48c4a3d131b8..01a5fceb3e53 100644
--- a/hbase-shaded/hbase-shaded-with-hadoop-check-invariants/pom.xml
+++ b/hbase-shaded/hbase-shaded-with-hadoop-check-invariants/pom.xml
@@ -40,7 +40,6 @@
     
       org.apache.hbase
       hbase-shaded-client
-      ${project.version}
     
     
     
@@ -49,8 +48,18 @@
       provided
     
     
-      ch.qos.reload4j
-      reload4j
+      org.apache.logging.log4j
+      log4j-api
+      provided
+    
+    
+      org.apache.logging.log4j
+      log4j-core
+      provided
+    
+    
+      org.apache.logging.log4j
+      log4j-slf4j-impl
       provided
     
     
diff --git a/hbase-shaded/pom.xml b/hbase-shaded/pom.xml
index 1da6ad46158c..ca60766b900d 100644
--- a/hbase-shaded/pom.xml
+++ b/hbase-shaded/pom.xml
@@ -1,5 +1,7 @@
 
-
+
     
-    4.0.0
-    
-        hbase-build-configuration
-        org.apache.hbase
-        2.6.0-SNAPSHOT
-        ../hbase-build-configuration
-    
-    hbase-shaded
-    Apache HBase - Shaded
-    Module of HBase with most deps shaded.
-    pom
-    
+  4.0.0
+  
+    hbase-build-configuration
+    org.apache.hbase
+    2.6.0-SNAPSHOT
+    ../hbase-build-configuration
+  
+  hbase-shaded
+  Apache HBase - Shaded
+  Module of HBase with most deps shaded.
+  pom
+  
       
-      true
+    true
       
-      true
-      true
-      org.apache.hadoop.hbase.shaded
-    
-    
-        hbase-shaded-client-byo-hadoop
-        hbase-shaded-client
-        hbase-shaded-mapreduce
-        hbase-shaded-testing-util
-        hbase-shaded-testing-util-tester
-        hbase-shaded-check-invariants
-        hbase-shaded-with-hadoop-check-invariants
-    
-    
-      
-         org.apache.hbase
-         hbase-resource-bundle
-         true
-      
-      
-      
-        ch.qos.reload4j
-        reload4j
-        true
-      
-      
-        org.slf4j
-        slf4j-reload4j
-        true
-      
-    
-    
-        
-            
-                
-                maven-assembly-plugin
-                
-                    true
-                
-            
-            
-            
-              org.apache.maven.plugins
-              maven-remote-resources-plugin
-              
-                
-                  aggregate-licenses
-                  
-                    process
-                  
-                  
-                    
-                      ${build.year}
-                      ${license.debug.print.included}
-                      ${license.bundles.dependencies}
-                      ${license.bundles.jquery}
-                      ${license.bundles.logo}
-                      ${license.bundles.bootstrap}
-                    
-                    
-                      ${project.groupId}:hbase-resource-bundle:${project.version}
-                    
-                    
-                      ${project.groupId}:hbase-resource-bundle:${project.version}
-                    
-                    
-                      supplemental-models.xml
-                    
-                  
-                
-              
-            
-        
-        
-            
-                
-                    
-                    maven-assembly-plugin
-                    
-                        true
-                    
-                
-                
-                    org.apache.maven.plugins
-                    maven-shade-plugin
-                    3.2.4
-                    
-                        
-                            aggregate-into-a-jar-with-relocated-third-parties
-                            package
-                            
-                                shade
-                            
-                            
-                                false
-                                false
-                                true
-                                false
-                                
-                                    
-                                        
-                                        javax.annotation:javax.annotation-api
-                                        javax.activation:javax.activation-api
-                                        jakarta.activation:jakarta.activation-api 
-                                        jakarta.ws.rs:jakarta.ws.rs-api
-                                        jakarta.annotation:jakarta.annotation-api
-                                        jakarta.validation:jakarta.validation-api
-                                        org.glassfish.hk2.external:jakarta.inject
-                                        
-                                        org.apache.hadoop:*
-                                        
-                                        org.apache.hbase:hbase-resource-bundle
-                                        org.slf4j:*
-                                        com.google.code.findbugs:*
-                                        com.github.stephenc.findbugs:*
-                                        com.github.spotbugs:*
-                                        org.apache.htrace:*
-                                        org.apache.yetus:*
-                                        log4j:*
-                                        ch.qos.reload4j:*
-                                        commons-logging:*
-                                        org.javassist:*
-                                        io.opentelemetry:*
-                                    
-                                
-                                
-                                    
-                                    
-                                        com.cedarsoftware
-                                        ${shaded.prefix}.com.cedarsoftware
-                                    
-                                    
-                                        com.codahale
-                                        ${shaded.prefix}.com.codahale
-                                    
-                                    
-                                        com.ctc
-                                        ${shaded.prefix}.com.ctc
-                                    
-                                    
-                                        com.dropwizard
-                                        ${shaded.prefix}.com.dropwizard
-                                    
-                                    
-                                        com.fasterxml
-                                        ${shaded.prefix}.com.fasterxml
-                                    
-                                    
-                                        com.github.benmanes.caffeine
-                                        ${shaded.prefix}.com.github.benmanes.caffeine
-                                    
-                                    
-                                        com.google
-                                        ${shaded.prefix}.com.google
-                                    
-                                    
-                                        com.jamesmurty
-                                        ${shaded.prefix}.com.jamesmurty
-                                    
-                                    
-                                        com.jcraft
-                                        ${shaded.prefix}.com.jcraft
-                                    
-                                    
-                                        com.lmax
-                                        ${shaded.prefix}.com.lmax
-                                    
-                                    
-                                        com.microsoft
-                                        ${shaded.prefix}.com.microsoft
-                                    
-                                    
-                                        com.nimbusds
-                                        ${shaded.prefix}.com.nimbusds
-                                    
-                                    
-                                        com.squareup
-                                        ${shaded.prefix}.com.squareup
-                                    
-                                    
-                                        com.thoughtworks
-                                        ${shaded.prefix}.com.thoughtworks
-                                    
-                                    
-                                        com.zaxxer
-                                        ${shaded.prefix}.com.zaxxer
-                                    
+    true
+    true
+    org.apache.hadoop.hbase.shaded
+  
+  
+    hbase-shaded-client-byo-hadoop
+    hbase-shaded-client
+    hbase-shaded-mapreduce
+    hbase-shaded-testing-util
+    hbase-shaded-testing-util-tester
+    hbase-shaded-check-invariants
+    hbase-shaded-with-hadoop-check-invariants
+  
+  
+    
+      org.apache.hbase
+      hbase-resource-bundle
+      true
+    
+    
+    
+      org.apache.logging.log4j
+      log4j-api
+      true
+    
+    
+      org.apache.logging.log4j
+      log4j-core
+      true
+    
+    
+      org.apache.logging.log4j
+      log4j-slf4j-impl
+      true
+    
+  
+  
+    
+      
+        
+        maven-assembly-plugin
+        
+          true
+        
+      
+      
+      
+        org.apache.maven.plugins
+        maven-remote-resources-plugin
+        
+          
+            aggregate-licenses
+            
+              process
+            
+            
+              
+                ${build.year}
+                ${license.debug.print.included}
+                ${license.bundles.dependencies}
+                ${license.bundles.jquery}
+                ${license.bundles.logo}
+                ${license.bundles.bootstrap}
+              
+              
+                ${project.groupId}:hbase-resource-bundle:${project.version}
+              
+              
+                ${project.groupId}:hbase-resource-bundle:${project.version}
+              
+              
+                supplemental-models.xml
+              
+            
+          
+        
+      
+    
+    
+      
+        
+          
+          maven-assembly-plugin
+          
+            true
+          
+        
+        
+          org.apache.maven.plugins
+          maven-shade-plugin
+          3.2.4
+          
+            
+              aggregate-into-a-jar-with-relocated-third-parties
+              package
+              
+                shade
+              
+              
+                false
+                false
+                true
+                false
+                
+                  
+                    
+                    javax.annotation:javax.annotation-api
+                    javax.activation:javax.activation-api
+                    jakarta.activation:jakarta.activation-api 
+                    jakarta.ws.rs:jakarta.ws.rs-api
+                    jakarta.annotation:jakarta.annotation-api
+                    jakarta.validation:jakarta.validation-api
+                    org.glassfish.hk2.external:jakarta.inject
+                    
+                    org.apache.hadoop:*
+                    
+                    org.apache.hbase:hbase-resource-bundle
+                    org.slf4j:*
+                    com.google.code.findbugs:*
+                    com.github.stephenc.findbugs:*
+                    com.github.spotbugs:*
+                    org.apache.htrace:*
+                    org.apache.yetus:*
+                    org.apache.logging.log4j:*
+                    commons-logging:*
+                    org.javassist:*
+                    io.opentelemetry:*
+                  
+                
+                
+                  
+                  
+                    com.cedarsoftware
+                    ${shaded.prefix}.com.cedarsoftware
+                  
+                  
+                    com.codahale
+                    ${shaded.prefix}.com.codahale
+                  
+                  
+                    com.ctc
+                    ${shaded.prefix}.com.ctc
+                  
+                  
+                    com.dropwizard
+                    ${shaded.prefix}.com.dropwizard
+                  
+                  
+                    com.fasterxml
+                    ${shaded.prefix}.com.fasterxml
+                  
+                  
+                    com.github.benmanes.caffeine
+                    ${shaded.prefix}.com.github.benmanes.caffeine
+                  
+                  
+                    com.google
+                    ${shaded.prefix}.com.google
+                  
+                  
+                    com.jamesmurty
+                    ${shaded.prefix}.com.jamesmurty
+                  
+                  
+                    com.jcraft
+                    ${shaded.prefix}.com.jcraft
+                  
+                  
+                    com.lmax
+                    ${shaded.prefix}.com.lmax
+                  
+                  
+                    com.microsoft
+                    ${shaded.prefix}.com.microsoft
+                  
+                  
+                    com.nimbusds
+                    ${shaded.prefix}.com.nimbusds
+                  
+                  
+                    com.squareup
+                    ${shaded.prefix}.com.squareup
+                  
+                  
+                    com.thoughtworks
+                    ${shaded.prefix}.com.thoughtworks
+                  
+                  
+                    com.zaxxer
+                    ${shaded.prefix}.com.zaxxer
+                  
 
-                                    
-                                    
-                                        org.xbill
-                                        ${shaded.prefix}.org.xbill
-                                    
+                  
+                  
+                    org.xbill
+                    ${shaded.prefix}.org.xbill
+                  
 
-                                    
-                                    
-                                        org.jboss.netty
-                                        ${shaded.prefix}.org.jboss.netty
-                                    
-                                    
-                                        io.netty
-                                        ${shaded.prefix}.io.netty
-                                    
+                  
+                  
+                    org.jboss.netty
+                    ${shaded.prefix}.org.jboss.netty
+                  
+                  
+                    io.netty
+                    ${shaded.prefix}.io.netty
+                  
 
-                                    
-                                    
-                                        okio
-                                        ${shaded.prefix}.okio
-                                    
+                  
+                  
+                    okio
+                    ${shaded.prefix}.okio
+                  
 
-                                    
-                                    
-                                      org.checkerframework
-                                      ${shaded.prefix}.org.checkerframework
-                                    
-                                    
-                                      org.codehaus
-                                      ${shaded.prefix}.org.codehaus
-                                    
-                                    
-                                        org.eclipse
-                                        ${shaded.prefix}.org.eclipse
-                                    
-                                    
-                                        org.ehcache
-                                        ${shaded.prefix}.org.ehcache
-                                    
-                                    
-                                        org.jcodings
-                                        ${shaded.prefix}.org.jcodings
-                                    
-                                    
-                                        org.joni
-                                        ${shaded.prefix}.org.joni
-                                    
-                                    
-                                        org.mortbay
-                                        ${shaded.prefix}.org.mortbay
-                                    
-                                    
-                                        org.nustaq
-                                        ${shaded.prefix}.org.nustaq
-                                    
-                                    
-                                        org.terracotta
-                                        ${shaded.prefix}.org.terracotta
-                                    
-                                    
-                                        org.tukaani
-                                        ${shaded.prefix}.org.tukaani
-                                    
-                                    
-                                        org.xerial
-                                        ${shaded.prefix}.org.xerial
-                                    
-                                    
-                                        org.znerd
-                                        ${shaded.prefix}.org.znerd
-                                    
-                                    
-                                        org.aopalliance
-                                        ${shaded.prefix}.org.aopalliance
-                                    
-                                    
-                                        org.fusesource
-                                        ${shaded.prefix}.org.fusesource
-                                    
-                                    
-                                        org.iq80
-                                        ${shaded.prefix}.org.iq80
-                                    
-                                    
-                                        org.jamon
-                                        ${shaded.prefix}.org.jamon
-                                    
-                                    
-                                        org.jets3t
-                                        ${shaded.prefix}.org.jets3t
-                                    
-                                    
-                                    
-                                        contribs.mx
-                                        ${shaded.prefix}.contribs.mx
-                                    
-                                    
-                                        org.objectweb
-                                        ${shaded.prefix}.org.objectweb
-                                    
+                  
+                  
+                    org.checkerframework
+                    ${shaded.prefix}.org.checkerframework
+                  
+                  
+                    org.codehaus
+                    ${shaded.prefix}.org.codehaus
+                  
+                  
+                    org.eclipse
+                    ${shaded.prefix}.org.eclipse
+                  
+                  
+                    org.ehcache
+                    ${shaded.prefix}.org.ehcache
+                  
+                  
+                    org.jcodings
+                    ${shaded.prefix}.org.jcodings
+                  
+                  
+                    org.joni
+                    ${shaded.prefix}.org.joni
+                  
+                  
+                    org.mortbay
+                    ${shaded.prefix}.org.mortbay
+                  
+                  
+                    org.nustaq
+                    ${shaded.prefix}.org.nustaq
+                  
+                  
+                    org.terracotta
+                    ${shaded.prefix}.org.terracotta
+                  
+                  
+                    org.tukaani
+                    ${shaded.prefix}.org.tukaani
+                  
+                  
+                    org.xerial
+                    ${shaded.prefix}.org.xerial
+                  
+                  
+                    org.znerd
+                    ${shaded.prefix}.org.znerd
+                  
+                  
+                    org.aopalliance
+                    ${shaded.prefix}.org.aopalliance
+                  
+                  
+                    org.fusesource
+                    ${shaded.prefix}.org.fusesource
+                  
+                  
+                    org.iq80
+                    ${shaded.prefix}.org.iq80
+                  
+                  
+                    org.jamon
+                    ${shaded.prefix}.org.jamon
+                  
+                  
+                    org.jets3t
+                    ${shaded.prefix}.org.jets3t
+                  
+                  
+                  
+                    contribs.mx
+                    ${shaded.prefix}.contribs.mx
+                  
+                  
+                    org.objectweb
+                    ${shaded.prefix}.org.objectweb
+                  
 
 
-                                    
-                                    
-                                        org.apache.avro
-                                        ${shaded.prefix}.org.apache.avro
-                                    
-                                    
-                                        org.apache.curator
-                                        ${shaded.prefix}.org.apache.curator
-                                    
-                                    
-                                        org.apache.directory
-                                        ${shaded.prefix}.org.apache.directory
-                                    
-                                    
-                                        org.apache.http
-                                        ${shaded.prefix}.org.apache.http
-                                    
-                                    
-                                        org.apache.jasper
-                                        ${shaded.prefix}.org.apache.jasper
-                                    
-                                    
-                                        org.apache.jute
-                                        ${shaded.prefix}.org.apache.jute
-                                    
-                                    
-                                        org.apache.kerby
-                                        ${shaded.prefix}.org.apache.kerby
-                                    
-                                    
-                                        org.apache.taglibs
-                                        ${shaded.prefix}.org.apache.taglibs
-                                    
-                                    
-                                        org.apache.zookeeper
-                                        ${shaded.prefix}.org.apache.zookeeper
-                                    
+                  
+                  
+                    org.apache.avro
+                    ${shaded.prefix}.org.apache.avro
+                  
+                  
+                    org.apache.curator
+                    ${shaded.prefix}.org.apache.curator
+                  
+                  
+                    org.apache.directory
+                    ${shaded.prefix}.org.apache.directory
+                  
+                  
+                    org.apache.http
+                    ${shaded.prefix}.org.apache.http
+                  
+                  
+                    org.apache.jasper
+                    ${shaded.prefix}.org.apache.jasper
+                  
+                  
+                    org.apache.jute
+                    ${shaded.prefix}.org.apache.jute
+                  
+                  
+                    org.apache.kerby
+                    ${shaded.prefix}.org.apache.kerby
+                  
+                  
+                    org.apache.taglibs
+                    ${shaded.prefix}.org.apache.taglibs
+                  
+                  
+                    org.apache.zookeeper
+                    ${shaded.prefix}.org.apache.zookeeper
+                  
 
-                                    
-                                    
-                                        org.apache.commons.beanutils
-                                        ${shaded.prefix}.org.apache.commons.beanutils
-                                    
-                                    
-                                        org.apache.commons.cli
-                                        ${shaded.prefix}.org.apache.commons.cli
-                                    
-                                    
-                                        org.apache.commons.collections
-                                        ${shaded.prefix}.org.apache.commons.collections
-                                    
-                                    
-                                        org.apache.commons.configuration
-                                        ${shaded.prefix}.org.apache.commons.configuration
-                                    
-                                    
-                                        org.apache.commons.crypto
-                                        ${shaded.prefix}.org.apache.commons.crypto
-                                    
-                                    
-                                        org.apache.commons.csv
-                                        ${shaded.prefix}.org.apache.commons.csv
-                                    
-                                    
-                                        org.apache.commons.daemon
-                                        ${shaded.prefix}.org.apache.commons.daemon
-                                    
-                                    
-                                        org.apache.commons.io
-                                        ${shaded.prefix}.org.apache.commons.io
-                                    
-                                    
-                                        org.apache.commons.math
-                                        ${shaded.prefix}.org.apache.commons.math
-                                    
-                                    
-                                        org.apache.commons.math3
-                                        ${shaded.prefix}.org.apache.commons.math3
-                                    
-                                    
-                                        org.apache.commons.net
-                                        ${shaded.prefix}.org.apache.commons.net
-                                    
-                                    
-                                        org.apache.commons.lang
-                                        ${shaded.prefix}.org.apache.commons.lang
-                                    
-                                    
-                                        org.apache.commons.lang3
-                                        ${shaded.prefix}.org.apache.commons.lang3
-                                    
-                                    
-                                        org.apache.commons.el
-                                        ${shaded.prefix}.org.apache.commons.el
-                                    
-                                    
-                                        org.apache.commons.httpclient
-                                        ${shaded.prefix}.org.apache.commons.httpclient
-                                    
-                                    
-                                        org.apache.commons.compress
-                                        ${shaded.prefix}.org.apache.commons.compress
-                                    
-                                    
-                                        org.apache.commons.digester
-                                        ${shaded.prefix}.org.apache.commons.digester
-                                    
-                                    
-                                        org.apache.commons.codec
-                                        ${shaded.prefix}.org.apache.commons.codec
-                                    
-                                    
-                                        org.apache.commons.text
-                                        ${shaded.prefix}.org.apache.commons.text
-                                    
+                  
+                  
+                    org.apache.commons.beanutils
+                    ${shaded.prefix}.org.apache.commons.beanutils
+                  
+                  
+                    org.apache.commons.cli
+                    ${shaded.prefix}.org.apache.commons.cli
+                  
+                  
+                    org.apache.commons.collections
+                    ${shaded.prefix}.org.apache.commons.collections
+                  
+                  
+                    org.apache.commons.configuration
+                    ${shaded.prefix}.org.apache.commons.configuration
+                  
+                  
+                    org.apache.commons.crypto
+                    ${shaded.prefix}.org.apache.commons.crypto
+                  
+                  
+                    org.apache.commons.csv
+                    ${shaded.prefix}.org.apache.commons.csv
+                  
+                  
+                    org.apache.commons.daemon
+                    ${shaded.prefix}.org.apache.commons.daemon
+                  
+                  
+                    org.apache.commons.io
+                    ${shaded.prefix}.org.apache.commons.io
+                  
+                  
+                    org.apache.commons.math
+                    ${shaded.prefix}.org.apache.commons.math
+                  
+                  
+                    org.apache.commons.math3
+                    ${shaded.prefix}.org.apache.commons.math3
+                  
+                  
+                    org.apache.commons.net
+                    ${shaded.prefix}.org.apache.commons.net
+                  
+                  
+                    org.apache.commons.lang
+                    ${shaded.prefix}.org.apache.commons.lang
+                  
+                  
+                    org.apache.commons.lang3
+                    ${shaded.prefix}.org.apache.commons.lang3
+                  
+                  
+                    org.apache.commons.el
+                    ${shaded.prefix}.org.apache.commons.el
+                  
+                  
+                    org.apache.commons.httpclient
+                    ${shaded.prefix}.org.apache.commons.httpclient
+                  
+                  
+                    org.apache.commons.compress
+                    ${shaded.prefix}.org.apache.commons.compress
+                  
+                  
+                    org.apache.commons.digester
+                    ${shaded.prefix}.org.apache.commons.digester
+                  
+                  
+                    org.apache.commons.codec
+                    ${shaded.prefix}.org.apache.commons.codec
+                  
+                  
+                    org.apache.commons.text
+                    ${shaded.prefix}.org.apache.commons.text
+                  
 
-                                    
-                                    
-                                        net/
-                                        ${shaded.prefix}.net.
-                                    
-                                    
-                                        org.agrona
-                                        ${shaded.prefix}.org.agrona
-                                    
-                                
-                                
-                                  
-                                  
-                                    
-                                      LICENSE.txt
-                                      ASL2.0
+                  
+                  
+                    net/
+                    ${shaded.prefix}.net.
+                  
+                  
+                    org.agrona
+                    ${shaded.prefix}.org.agrona
+                  
+                
+                
+                  
+                  
+                    
+                      LICENSE.txt
+                      ASL2.0
                                       
-                                      overview.html
-                                    
-                                  
-                                  
-                                  
-                                    false
-                                    ${project.name}
-                                  
-                                  
-                                  
-                                
-                                
-                                    
-                                    
-                                        dnsjava:dnsjava
-                                        
-                                            dig*
-                                            jnamed*
-                                            lookup*
-                                            update*
-                                        
-                                    
-                                  
-                                    
-                                    org.eclipse.jetty.orbit:javax.servlet.jsp.jstl
-                                    
-                                      META-INF/ECLIPSEF.SF
-                                      META-INF/ECLIPSEF.RSA
-                                    
-                                  
-                                  
-                                    
-                                    commons-beanutils:commons-beanutils-core
-                                    
-                                      org/apache/commons/collections/*.class
-                                    
-                                  
-                                  
-                                    
-                                    org.apache.hadoop:hadoop-yarn-common
-                                    
-                                      webapps/*
-                                      webapps/**/*
-                                    
-                                  
-                                  
-                                    *:*
-                                    
-                                      
-                                      *.proto
-                                      **/*.proto
-                                      
-                                      LICENSE
-                                      NOTICE
-                                    
-                                  
-                                  
-                                    
-                                    org.apache.commons:commons-math3
-                                    
-                                      assets/org/apache/commons/math3/**/*
-                                    
-                                  
-                                  
-                                    
-                                    org.apache.hadoop:*
-                                    
-                                      mapred-default.xml.orig
-                                    
-                                  
-                                  
-                                  
-                                    org.eclipse.jetty:*
-                                    
-                                      about.html
-                                      jetty-dir.css
-                                    
-                                  
-                                  
-                                    org.apache.kerby:*
-                                    
-                                      krb5-template.conf
-                                      krb5_udp-template.conf
-                                      ccache.txt
-                                      keytab.txt
-                                    
-                                  
-                                
-                            
-                        
-                    
-                
-            
-        
-    
+                      overview.html
+                    
+                  
+                  
+                  
+                    false
+                    ${project.name}
+                  
+                  
+                  
+                
+                
+                  
+                  
+                    dnsjava:dnsjava
+                    
+                      dig*
+                      jnamed*
+                      lookup*
+                      update*
+                    
+                  
+                  
+                    
+                    org.eclipse.jetty.orbit:javax.servlet.jsp.jstl
+                    
+                      META-INF/ECLIPSEF.SF
+                      META-INF/ECLIPSEF.RSA
+                    
+                  
+                  
+                    
+                    commons-beanutils:commons-beanutils-core
+                    
+                      org/apache/commons/collections/*.class
+                    
+                  
+                  
+                    
+                    org.apache.hadoop:hadoop-yarn-common
+                    
+                      webapps/*
+                      webapps/**/*
+                    
+                  
+                  
+                    *:*
+                    
+                      
+                      *.proto
+                      **/*.proto
+                      
+                      LICENSE
+                      NOTICE
+                    
+                  
+                  
+                    
+                    org.apache.commons:commons-math3
+                    
+                      assets/org/apache/commons/math3/**/*
+                    
+                  
+                  
+                    
+                    org.apache.hadoop:*
+                    
+                      mapred-default.xml.orig
+                    
+                  
+                  
+                  
+                    org.eclipse.jetty:*
+                    
+                      about.html
+                      jetty-dir.css
+                    
+                  
+                  
+                    org.apache.kerby:*
+                    
+                      krb5-template.conf
+                      krb5_udp-template.conf
+                      ccache.txt
+                      keytab.txt
+                    
+                  
+                
+              
+            
+          
+        
+      
+    
+  
 
diff --git a/hbase-shell/pom.xml b/hbase-shell/pom.xml
index cb63fe5c2cda..e3333a8c9edc 100644
--- a/hbase-shell/pom.xml
+++ b/hbase-shell/pom.xml
@@ -147,13 +147,23 @@
       test
     
     
-      org.slf4j
-      slf4j-reload4j
+      org.apache.logging.log4j
+      log4j-api
+      test
+    
+    
+      org.apache.logging.log4j
+      log4j-core
+      test
+    
+    
+      org.apache.logging.log4j
+      log4j-slf4j-impl
       test
     
     
-      ch.qos.reload4j
-      reload4j
+      org.apache.logging.log4j
+      log4j-1.2-api
       test
     
   
diff --git a/hbase-testing-util/pom.xml b/hbase-testing-util/pom.xml
index c82ccf799373..70f74463862d 100644
--- a/hbase-testing-util/pom.xml
+++ b/hbase-testing-util/pom.xml
@@ -1,5 +1,7 @@
 
-
+
     
-    4.0.0
-    
-        hbase-build-configuration
-        org.apache.hbase
-        2.6.0-SNAPSHOT
-        ../hbase-build-configuration
-    
-    hbase-testing-util
-    Apache HBase - Testing Util
-    HBase Testing Utilities.
-    
+  4.0.0
+  
+    hbase-build-configuration
+    org.apache.hbase
+    2.6.0-SNAPSHOT
+    ../hbase-build-configuration
+  
+  hbase-testing-util
+  Apache HBase - Testing Util
+  HBase Testing Utilities.
+  
         
         
+    
+      org.apache.hbase
+      hbase-logging
+      test-jar
+      test
+    
+    
+      org.apache.hbase
+      hbase-common
+      jar
+      compile
+    
+    
+      org.apache.hbase
+      hbase-common
+      test-jar
+      compile
+    
+    
+      org.apache.hbase
+      hbase-annotations
+      test-jar
+      compile
+      
+        
+          jdk.tools
+          jdk.tools
+        
+      
+    
+    
+      org.apache.hbase
+      hbase-protocol
+      jar
+      compile
+    
+    
+      org.apache.hbase
+      hbase-client
+      jar
+      compile
+    
+    
+      org.apache.hbase
+      hbase-zookeeper
+      jar
+      compile
+    
+    
+      org.apache.hbase
+      hbase-zookeeper
+      test-jar
+      compile
+    
+    
+      org.apache.hbase
+      hbase-server
+      jar
+      compile
+    
+    
+      org.apache.hbase
+      hbase-server
+      test-jar
+      compile
+    
+    
+      org.apache.hbase
+      hbase-asyncfs
+      test-jar
+      compile
+    
+    
+      org.apache.hbase
+      hbase-hadoop-compat
+      jar
+      compile
+    
+    
+      org.apache.hbase
+      hbase-hadoop-compat
+      test-jar
+      compile
+    
+    
+      org.apache.hbase
+      ${compat.module}
+      jar
+      compile
+    
+    
+      org.apache.hbase
+      ${compat.module}
+      test-jar
+      compile
+    
+    
+      org.slf4j
+      jcl-over-slf4j
+      test
+    
+    
+      org.slf4j
+      jul-to-slf4j
+      test
+    
+    
+      org.apache.logging.log4j
+      log4j-api
+      test
+    
+    
+      org.apache.logging.log4j
+      log4j-core
+      test
+    
+    
+      org.apache.logging.log4j
+      log4j-slf4j-impl
+      test
+    
+    
+      org.apache.logging.log4j
+      log4j-1.2-api
+      test
+    
+  
+
+  
+        
+        
+
+        
+    
+      hadoop-2.0
+      
+        
+                    
+                    
+          !hadoop.profile
+        
+      
+      
         
-            org.apache.hbase
-            hbase-logging
-            test-jar
-            test
-        
-        
-            org.apache.hbase
-            hbase-common
-            jar
-            compile
-        
-        
-            org.apache.hbase
-            hbase-common
-            test-jar
-            compile
-        
-        
-            org.apache.hbase
-            hbase-annotations
-            test-jar
-            compile
-            
-                
-                    jdk.tools
-                    jdk.tools
-                
-            
-        
-        
-            org.apache.hbase
-            hbase-protocol
-            jar
-            compile
-        
-        
-            org.apache.hbase
-            hbase-client
-            jar
-            compile
-        
-        
-            org.apache.hbase
-            hbase-zookeeper
-            jar
-            compile
-        
-        
-            org.apache.hbase
-            hbase-zookeeper
-            test-jar
-            compile
+          org.apache.hadoop
+          hadoop-common
+          compile
+          
+            
+              javax.xml.bind
+              jaxb-api
+            
+          
         
         
-            org.apache.hbase
-            hbase-server
-            jar
-            compile
+          org.apache.hadoop
+          hadoop-auth
+          compile
         
         
-            org.apache.hbase
-            hbase-server
-            test-jar
-            compile
+          org.apache.hadoop
+          hadoop-client
+          compile
+          
+            
+              com.google.guava
+              guava
+            
+            
+              javax.xml.bind
+              jaxb-api
+            
+          
         
         
-            org.apache.hbase
-            hbase-asyncfs
-            test-jar
-            compile
+          org.apache.hadoop
+          hadoop-mapreduce-client-core
+          compile
+          
+            
+              com.google.guava
+              guava
+            
+            
+              javax.xml.bind
+              jaxb-api
+            
+          
         
         
-            org.apache.hbase
-            hbase-hadoop-compat
-            jar
-            compile
+          org.apache.hadoop
+          hadoop-mapreduce-client-jobclient
+          compile
+          
+            
+              com.google.guava
+              guava
+            
+          
         
         
-            org.apache.hbase
-            hbase-hadoop-compat
-            test-jar
-            compile
+          org.apache.hadoop
+          hadoop-hdfs
+          compile
         
         
-            org.apache.hbase
-            ${compat.module}
-            jar
-            compile
+          org.apache.hadoop
+          hadoop-hdfs
+          test-jar
+          compile
         
         
-            org.apache.hbase
-            ${compat.module}
-            test-jar
-            compile
+          org.apache.hadoop
+          hadoop-minicluster
+          compile
+          
+            
+              com.google.guava
+              guava
+            
+            
+              org.apache.zookeeper
+              zookeeper
+            
+          
         
         
-            org.slf4j
-            jcl-over-slf4j
-            test
+          org.apache.hadoop
+          hadoop-minikdc
         
+      
+    
+        
+    
+      hadoop-3.0
+      
+        
+          hadoop.profile
+          3.0
+        
+      
+      
         
-            org.slf4j
-            jul-to-slf4j
-            test
+          org.apache.hadoop
+          hadoop-common
+          
+            
+              javax.xml.bind
+              jaxb-api
+            
+            
+              javax.ws.rs
+              jsr311-api
+            
+          
         
         
-            org.slf4j
-            slf4j-reload4j
-            test
+          org.apache.hadoop
+          hadoop-minicluster
+          compile
+          
+            
+              com.google.guava
+              guava
+            
+            
+              javax.ws.rs
+              jsr311-api
+            
+          
         
         
-            ch.qos.reload4j
-            reload4j
-            test
+          org.apache.hadoop
+          hadoop-minikdc
         
-    
-
-    
-        
-        
-
-        
-        
-            hadoop-2.0
-            
-                
-                    
-                    !hadoop.profile
-                
-            
-            
-                
-                    org.apache.hadoop
-                    hadoop-common
-                    compile
-                    
-                      
-                        javax.xml.bind
-                        jaxb-api
-                      
-                    
-                
-                
-                    org.apache.hadoop
-                    hadoop-auth
-                    compile
-                
-                
-                    org.apache.hadoop
-                    hadoop-client
-                    compile
-                    
-                      
-                        com.google.guava
-                        guava
-                      
-                      
-                        javax.xml.bind
-                        jaxb-api
-                      
-                    
-                
-                
-                    org.apache.hadoop
-                    hadoop-mapreduce-client-core
-                    compile
-                    
-                      
-                        com.google.guava
-                        guava
-                      
-                      
-                        javax.xml.bind
-                        jaxb-api
-                      
-                    
-                
-                
-                    org.apache.hadoop
-                    hadoop-mapreduce-client-jobclient
-                    compile
-                    
-                      
-                        com.google.guava
-                        guava
-                      
-                    
-                
-                
-                    org.apache.hadoop
-                    hadoop-hdfs
-                    compile
-                
-                
-                    org.apache.hadoop
-                    hadoop-hdfs
-                    test-jar
-                    compile
-                
-                
-                    org.apache.hadoop
-                    hadoop-minicluster
-                    compile
-                    
-                      
-                        com.google.guava
-                        guava
-                      
-                      
-                        org.apache.zookeeper
-                        zookeeper
-                      
-                    
-                
-                
-                    org.apache.hadoop
-                    hadoop-minikdc
-                
-            
-        
-        
-        
-            hadoop-3.0
-            
-                
-                    hadoop.profile
-                    3.0
-                
-            
-            
-                
-                    org.apache.hadoop
-                    hadoop-common
-                    
-                      
-                         javax.xml.bind
-                         jaxb-api
-                      
-                      
-                       javax.ws.rs
-                       jsr311-api
-                      
-                    
-                
-                
-                    org.apache.hadoop
-                    hadoop-minicluster
-                    compile
-                    
-                      
-                        com.google.guava
-                        guava
-                      
-                      
-                       javax.ws.rs
-                       jsr311-api
-                      
-                    
-                
-                
-                    org.apache.hadoop
-                    hadoop-minikdc
-                
-            
-        
-    
+      
+    
+  
 
diff --git a/hbase-thrift/pom.xml b/hbase-thrift/pom.xml
index 54c1f607182e..6a8b3df7cba1 100644
--- a/hbase-thrift/pom.xml
+++ b/hbase-thrift/pom.xml
@@ -255,13 +255,23 @@
       test
     
     
-      org.slf4j
-      slf4j-reload4j
+      org.apache.logging.log4j
+      log4j-api
+      test
+    
+    
+      org.apache.logging.log4j
+      log4j-core
+      test
+    
+    
+      org.apache.logging.log4j
+      log4j-slf4j-impl
       test
     
     
-      ch.qos.reload4j
-      reload4j
+      org.apache.logging.log4j
+      log4j-1.2-api
       test
     
   
diff --git a/hbase-zookeeper/pom.xml b/hbase-zookeeper/pom.xml
index 45029db88518..b2a5b1ca9bbf 100644
--- a/hbase-zookeeper/pom.xml
+++ b/hbase-zookeeper/pom.xml
@@ -174,13 +174,18 @@
       test
     
     
-      org.slf4j
-      slf4j-reload4j
+      org.apache.logging.log4j
+      log4j-api
+      test
+    
+    
+      org.apache.logging.log4j
+      log4j-core
       test
     
     
-      ch.qos.reload4j
-      reload4j
+      org.apache.logging.log4j
+      log4j-slf4j-impl
       test
     
   
diff --git a/pom.xml b/pom.xml
index e247e5856a1a..b9dea2d37b8d 100755
--- a/pom.xml
+++ b/pom.xml
@@ -688,7 +688,7 @@
               
               
                 
-                  log4j.properties
+                  log4j2.xml
                 
               
             
@@ -1060,11 +1060,28 @@
               
                 
                   
-                    log4j:**
+                    log4j:log4j
+                  
+                  
+                    We do not allow log4j dependencies as now we use log4j2
+                  
+                
+              
+            
+          
+          
+            banned-slf4j-log4j12
+            
+              enforce
+            
+            
+              
+                
+                  
                     org.slf4j:slf4j-log4j12
                   
                   
-                    Use reload4j instead
+                    We do not allow slf4j-log4j12 dependency as now we use log4j-slf4j-impl
                   
                 
               
@@ -1119,16 +1136,18 @@
                   Use SLF4j for logging
                   
                     org.apache.commons.logging.**
+                    org.apache.log4j.**
+                    org.apache.logging.log4j.**
                   
                 
                 
                   false
                   512
                   
-                    Do not use log4j directly in code, see Log4jUtils in hbase-logging for more details.
+                    Do not use log4j2 directly in code, see Log4jUtils in hbase-logging for more details.
                   
                   
-                    org.apache.log4j.**
+                    org.apache.logging.log4j.**
                   
                 
                 
@@ -1533,7 +1552,7 @@
     1.3
     1.0.1
     1.0.1
-    1.2.19
+    2.17.2
     2.28.2
     
     com.google.protobuf
@@ -1993,8 +2012,8 @@
       
       
       
         org.slf4j
         slf4j-api
         ${slf4j.version}
       
-      
-        org.slf4j
-        slf4j-reload4j
-        ${slf4j.version}
-      
       
         org.slf4j
         jcl-over-slf4j
@@ -2027,9 +2042,24 @@
         ${slf4j.version}
       
       
-        ch.qos.reload4j
-        reload4j
-        ${reload4j.version}
+        org.apache.logging.log4j
+        log4j-api
+        ${log4j2.version}
+      
+      
+        org.apache.logging.log4j
+        log4j-core
+        ${log4j2.version}
+      
+      
+        org.apache.logging.log4j
+        log4j-slf4j-impl
+        ${log4j2.version}
+      
+      
+        org.apache.logging.log4j
+        log4j-1.2-api
+        ${log4j2.version}
       
       
       
@@ -2037,8 +2067,6 @@
         avro
         ${avro.version}
       
-      
       
         com.github.ben-manes.caffeine
         caffeine
@@ -3388,6 +3416,46 @@
               
            
          
+         
+           org.apache.hadoop
+           hadoop-mapreduce-client-app
+           ${hadoop-three.version}
+           test-jar
+           
+             
+               org.codehaus.jackson
+               jackson-mapper-asl
+             
+             
+               org.codehaus.jackson
+               jackson-core-asl
+             
+             
+               org.codehaus.jackson
+               jackson-jaxrs
+             
+             
+               org.codehaus.jackson
+               jackson-xc
+             
+             
+               javax.xml.bind
+               jaxb-api
+             
+             
+               javax.ws.rs
+               jsr311-api
+             
+             
+               org.slf4j
+              slf4j-log4j12
+             
+             
+                log4j
+                log4j
+              
+           
+         
          
            org.apache.hadoop
            hadoop-mapreduce-client-jobclient
@@ -3414,10 +3482,6 @@
                org.slf4j
                slf4j-log4j12
              
-             
-               log4j
-               log4j
-             
            
          
          
@@ -3447,10 +3511,6 @@
                 org.slf4j
                 slf4j-log4j12
               
-              
-                log4j
-                log4j
-              
            
          
          
@@ -3863,10 +3923,6 @@
                org.slf4j
                slf4j-log4j12
              
-             
-               log4j
-               log4j
-