diff --git a/bin/hbase b/bin/hbase index 75aa81b7c3a9..c9083b5ea9db 100755 --- a/bin/hbase +++ b/bin/hbase @@ -305,10 +305,13 @@ else # make it easier to check for shaded/not later on. shaded_jar="" fi +# here we will add slf4j-api, commons-logging, jul-to-slf4j, jcl-over-slf4j +# to classpath, as they are all logging bridges. Only exclude log4j* so we +# will not actually log anything out. Add it later if necessary for f in "${HBASE_HOME}"/lib/client-facing-thirdparty/*.jar; do if [[ ! "${f}" =~ ^.*/htrace-core-3.*\.jar$ ]] && \ - [ "${f}" != "htrace-core.jar$" ] && \ - [[ ! "${f}" =~ ^.*/slf4j-log4j.*$ ]]; then + [[ "${f}" != "htrace-core.jar$" ]] && \ + [[ ! "${f}" =~ ^.*/log4j.*$ ]]; then CLASSPATH="${CLASSPATH}:${f}" fi done @@ -671,7 +674,7 @@ elif [ "$COMMAND" = "mapredcp" ] ; then for f in "${HBASE_HOME}"/lib/client-facing-thirdparty/*.jar; do if [[ ! "${f}" =~ ^.*/htrace-core-3.*\.jar$ ]] && \ [ "${f}" != "htrace-core.jar$" ] && \ - [[ ! "${f}" =~ ^.*/slf4j-log4j.*$ ]]; then + [[ ! "${f}" =~ ^.*/log4j.*$ ]]; then echo -n ":${f}" fi done @@ -720,8 +723,8 @@ elif [ "$COMMAND" = "hbtop" ] ; then done fi - if [ -f "${HBASE_HOME}/conf/log4j-hbtop.properties" ] ; then - HBASE_HBTOP_OPTS="${HBASE_HBTOP_OPTS} -Dlog4j.configuration=file:${HBASE_HOME}/conf/log4j-hbtop.properties" + if [ -f "${HBASE_HOME}/conf/log4j2-hbtop.properties" ] ; then + HBASE_HBTOP_OPTS="${HBASE_HBTOP_OPTS} -Dlog4j2.configurationFile=file:${HBASE_HOME}/conf/log4j2-hbtop.properties" fi HBASE_OPTS="${HBASE_OPTS} ${HBASE_HBTOP_OPTS}" else @@ -796,10 +799,6 @@ HBASE_OPTS="$HBASE_OPTS -Dhbase.log.file=$HBASE_LOGFILE" HBASE_OPTS="$HBASE_OPTS -Dhbase.home.dir=$HBASE_HOME" HBASE_OPTS="$HBASE_OPTS -Dhbase.id.str=$HBASE_IDENT_STRING" HBASE_OPTS="$HBASE_OPTS -Dhbase.root.logger=${HBASE_ROOT_LOGGER:-INFO,console}" -if [ "x$JAVA_LIBRARY_PATH" != "x" ]; then - HBASE_OPTS="$HBASE_OPTS -Djava.library.path=$JAVA_LIBRARY_PATH" - export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$JAVA_LIBRARY_PATH" -fi # Enable security logging on the master and regionserver only if [ "$COMMAND" = "master" ] || [ "$COMMAND" = "regionserver" ]; then @@ -810,10 +809,9 @@ fi HEAP_SETTINGS="$JAVA_HEAP_MAX $JAVA_OFFHEAP_MAX" # by now if we're running a command it means we need logging -for f in ${HBASE_HOME}/lib/client-facing-thirdparty/slf4j-log4j*.jar; do +for f in ${HBASE_HOME}/lib/client-facing-thirdparty/log4j*.jar; do if [ -f "${f}" ]; then CLASSPATH="${CLASSPATH}:${f}" - break fi done diff --git a/bin/hbase-daemon.sh b/bin/hbase-daemon.sh index 11c13eb52300..6fafab0ccec0 100755 --- a/bin/hbase-daemon.sh +++ b/bin/hbase-daemon.sh @@ -155,12 +155,20 @@ JAVA=$JAVA_HOME/bin/java export HBASE_LOG_PREFIX=hbase-$HBASE_IDENT_STRING-$command-$HOSTNAME export HBASE_LOGFILE=$HBASE_LOG_PREFIX.log -if [ -z "${HBASE_ROOT_LOGGER}" ]; then -export HBASE_ROOT_LOGGER=${HBASE_ROOT_LOGGER:-"INFO,RFA"} +if [ -z "${HBASE_ROOT_LOGGER_LEVEL}" ]; then +export HBASE_ROOT_LOGGER_LEVEL=${HBASE_ROOT_LOGGER_LEVEL:-"INFO"} fi -if [ -z "${HBASE_SECURITY_LOGGER}" ]; then -export HBASE_SECURITY_LOGGER=${HBASE_SECURITY_LOGGER:-"INFO,RFAS"} +if [ -z "${HBASE_ROOT_LOGGER_APPENDER}" ]; then +export HBASE_ROOT_LOGGER_APPENDER=${HBASE_ROOT_LOGGER_APPENDER:-"RFA"} +fi + +if [ -z "${HBASE_SECURITY_LOGGER_LEVEL}" ]; then +export HBASE_SECURITY_LOGGER_LEVEL=${HBASE_SECURITY_LOGGER_LEVEL:-"INFO"} +fi + +if [ -z "${HBASE_SECURITY_LOGGER_APPENDER}" ]; then +export HBASE_SECURITY_LOGGER_APPENDER=${HBASE_SECURITY_LOGGER_APPENDER:-"RFAS"} fi HBASE_LOGOUT=${HBASE_LOGOUT:-"$HBASE_LOG_DIR/$HBASE_LOG_PREFIX.out"} diff --git a/bin/hbase.cmd b/bin/hbase.cmd index 3b569099090f..240b63c7ec71 100644 --- a/bin/hbase.cmd +++ b/bin/hbase.cmd @@ -332,6 +332,7 @@ set HBASE_OPTS=%HBASE_OPTS% -Djava.util.logging.config.class="org.apache.hadoop. if not defined HBASE_ROOT_LOGGER ( set HBASE_ROOT_LOGGER=INFO,console ) + set HBASE_OPTS=%HBASE_OPTS% -Dhbase.root.logger="%HBASE_ROOT_LOGGER%" if defined JAVA_LIBRARY_PATH ( @@ -348,6 +349,7 @@ if not defined HBASE_SECURITY_LOGGER ( set HBASE_SECURITY_LOGGER=INFO,DRFAS ) ) + set HBASE_OPTS=%HBASE_OPTS% -Dhbase.security.logger="%HBASE_SECURITY_LOGGER%" set HEAP_SETTINGS=%JAVA_HEAP_MAX% %JAVA_OFFHEAP_MAX% diff --git a/conf/hbase-env.cmd b/conf/hbase-env.cmd index 4beebf646dee..84519d5606d2 100644 --- a/conf/hbase-env.cmd +++ b/conf/hbase-env.cmd @@ -32,7 +32,7 @@ @rem set HBASE_OFFHEAPSIZE=1000 @rem For example, to allocate 8G of offheap, to 8G: -@rem etHBASE_OFFHEAPSIZE=8G +@rem set HBASE_OFFHEAPSIZE=8G @rem Extra Java runtime options. @rem Below are what we set by default. May only work with SUN JVM. @@ -82,6 +82,9 @@ set HBASE_OPTS=%HBASE_OPTS% "-XX:+UseConcMarkSweepGC" "-Djava.net.preferIPv4Stac @rem Tell HBase whether it should manage it's own instance of ZooKeeper or not. @rem set HBASE_MANAGES_ZK=true +@rem Tell HBase the logger level and appenders +@rem set HBASE_ROOT_LOGGER=INFO,DRFA + @rem Uncomment to enable trace, you can change the options to use other exporters such as jaeger or @rem zipkin. See https://github.com/open-telemetry/opentelemetry-java-instrumentation on how to @rem configure exporters and other components through system properties. diff --git a/conf/hbase-env.sh b/conf/hbase-env.sh index ee71a0ab56dc..e049fd6d853d 100644 --- a/conf/hbase-env.sh +++ b/conf/hbase-env.sh @@ -126,11 +126,11 @@ # export HBASE_MANAGES_ZK=true # The default log rolling policy is RFA, where the log file is rolled as per the size defined for the -# RFA appender. Please refer to the log4j.properties file to see more details on this appender. +# RFA appender. Please refer to the log4j2.properties file to see more details on this appender. # In case one needs to do log rolling on a date change, one should set the environment property # HBASE_ROOT_LOGGER to "<DESIRED_LOG LEVEL>,DRFA". # For example: -# HBASE_ROOT_LOGGER=INFO,DRFA +# export HBASE_ROOT_LOGGER=INFO,DRFA # The reason for changing default to RFA is to avoid the boundary case of filling out disk space as # DRFA doesn't put any cap on the log size. Please refer to HBase-5655 for more context. diff --git a/conf/log4j-hbtop.properties b/conf/log4j-hbtop.properties deleted file mode 100644 index 4d68d79db70d..000000000000 --- a/conf/log4j-hbtop.properties +++ /dev/null @@ -1,27 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -log4j.rootLogger=WARN,console -log4j.threshold=WARN - -# console -log4j.appender.console=org.apache.log4j.ConsoleAppender -log4j.appender.console.target=System.err -log4j.appender.console.layout=org.apache.log4j.PatternLayout -log4j.appender.console.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %m%n - -# ZooKeeper will still put stuff at WARN -log4j.logger.org.apache.zookeeper=ERROR diff --git a/conf/log4j.properties b/conf/log4j.properties deleted file mode 100644 index 2282fa5d4a35..000000000000 --- a/conf/log4j.properties +++ /dev/null @@ -1,139 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Define some default values that can be overridden by system properties -hbase.root.logger=INFO,console -hbase.security.logger=INFO,console -hbase.log.dir=. -hbase.log.file=hbase.log -hbase.log.level=INFO - -# Define the root logger to the system property "hbase.root.logger". -log4j.rootLogger=${hbase.root.logger} - -# Logging Threshold -log4j.threshold=ALL - -# -# Daily Rolling File Appender -# -log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender -log4j.appender.DRFA.File=${hbase.log.dir}/${hbase.log.file} - -# Rollver at midnight -log4j.appender.DRFA.DatePattern=.yyyy-MM-dd - -# 30-day backup -#log4j.appender.DRFA.MaxBackupIndex=30 -log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout - -# Pattern format: Date LogLevel LoggerName LogMessage -log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %.1000m%n - -# Rolling File Appender properties -hbase.log.maxfilesize=256MB -hbase.log.maxbackupindex=20 - -# Rolling File Appender -log4j.appender.RFA=org.apache.log4j.RollingFileAppender -log4j.appender.RFA.File=${hbase.log.dir}/${hbase.log.file} - -log4j.appender.RFA.MaxFileSize=${hbase.log.maxfilesize} -log4j.appender.RFA.MaxBackupIndex=${hbase.log.maxbackupindex} - -log4j.appender.RFA.layout=org.apache.log4j.PatternLayout -log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %.1000m%n - -# -# Security audit appender -# -hbase.security.log.file=SecurityAuth.audit -hbase.security.log.maxfilesize=256MB -hbase.security.log.maxbackupindex=20 -log4j.appender.RFAS=org.apache.log4j.RollingFileAppender -log4j.appender.RFAS.File=${hbase.log.dir}/${hbase.security.log.file} -log4j.appender.RFAS.MaxFileSize=${hbase.security.log.maxfilesize} -log4j.appender.RFAS.MaxBackupIndex=${hbase.security.log.maxbackupindex} -log4j.appender.RFAS.layout=org.apache.log4j.PatternLayout -log4j.appender.RFAS.layout.ConversionPattern=%d{ISO8601} %p %c: %.1000m%n -log4j.category.SecurityLogger=${hbase.security.logger} -log4j.additivity.SecurityLogger=false -#log4j.logger.SecurityLogger.org.apache.hadoop.hbase.security.access.AccessController=TRACE -#log4j.logger.SecurityLogger.org.apache.hadoop.hbase.security.visibility.VisibilityController=TRACE - -# -# Null Appender -# -log4j.appender.NullAppender=org.apache.log4j.varia.NullAppender - -# -# console -# Add "console" to rootlogger above if you want to use this -# -log4j.appender.console=org.apache.log4j.ConsoleAppender -log4j.appender.console.target=System.err -log4j.appender.console.layout=org.apache.log4j.PatternLayout -log4j.appender.console.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %.1000m%n - -log4j.appender.asyncconsole=org.apache.hadoop.hbase.AsyncConsoleAppender -log4j.appender.asyncconsole.target=System.err - -# Custom Logging levels - -log4j.logger.org.apache.zookeeper=${hbase.log.level} -#log4j.logger.org.apache.hadoop.fs.FSNamesystem=DEBUG -log4j.logger.org.apache.hadoop.hbase=${hbase.log.level} -log4j.logger.org.apache.hadoop.hbase.META=${hbase.log.level} -# Make these two classes INFO-level. Make them DEBUG to see more zk debug. -log4j.logger.org.apache.hadoop.hbase.zookeeper.ZKUtil=${hbase.log.level} -log4j.logger.org.apache.hadoop.hbase.zookeeper.ZKWatcher=${hbase.log.level} -#log4j.logger.org.apache.hadoop.dfs=DEBUG -# Set this class to log INFO only otherwise its OTT -# Enable this to get detailed connection error/retry logging. -# log4j.logger.org.apache.hadoop.hbase.client.ConnectionImplementation=TRACE - - -# Uncomment this line to enable tracing on _every_ RPC call (this can be a lot of output) -#log4j.logger.org.apache.hadoop.ipc.HBaseServer.trace=DEBUG - -# Uncomment the below if you want to remove logging of client region caching' -# and scan of hbase:meta messages -# log4j.logger.org.apache.hadoop.hbase.client.ConnectionImplementation=INFO - -# EventCounter -# Add "EventCounter" to rootlogger if you want to use this -# Uncomment the line below to add EventCounter information -# log4j.appender.EventCounter=org.apache.hadoop.log.metrics.EventCounter - -# Prevent metrics subsystem start/stop messages (HBASE-17722) -log4j.logger.org.apache.hadoop.metrics2.impl.MetricsConfig=WARN -log4j.logger.org.apache.hadoop.metrics2.impl.MetricsSinkAdapter=WARN -log4j.logger.org.apache.hadoop.metrics2.impl.MetricsSystemImpl=WARN - -# Disable request log by default, you can enable this by changing the appender -log4j.category.http.requests=INFO,NullAppender -log4j.additivity.http.requests=false -# Replace the above with this configuration if you want an http access.log -#log4j.appender.accessRFA=org.apache.log4j.RollingFileAppender -#log4j.appender.accessRFA.File=/var/log/hbase/access.log -#log4j.appender.accessRFA.layout=org.apache.log4j.PatternLayout -#log4j.appender.accessRFA.layout.ConversionPattern=%m%n -#log4j.appender.accessRFA.MaxFileSize=200MB -#log4j.appender.accessRFA.MaxBackupIndex=10 -# route http.requests to the accessRFA appender -#log4j.logger.http.requests=INFO,accessRFA -# disable http.requests.* entries going up to the root logger -#log4j.additivity.http.requests=false diff --git a/conf/log4j2-hbtop.properties b/conf/log4j2-hbtop.properties new file mode 100644 index 000000000000..de2f97641da7 --- /dev/null +++ b/conf/log4j2-hbtop.properties @@ -0,0 +1,35 @@ +#/** +# * Licensed to the Apache Software Foundation (ASF) under one +# * or more contributor license agreements. See the NOTICE file +# * distributed with this work for additional information +# * regarding copyright ownership. The ASF licenses this file +# * to you under the Apache License, Version 2.0 (the +# * "License"); you may not use this file except in compliance +# * with the License. You may obtain a copy of the License at +# * +# * http://www.apache.org/licenses/LICENSE-2.0 +# * +# * Unless required by applicable law or agreed to in writing, software +# * distributed under the License is distributed on an "AS IS" BASIS, +# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# * See the License for the specific language governing permissions and +# * limitations under the License. +# */ + +status = warn +dest = err +name = PropertiesConfig + +# console +appender.console.type = Console +appender.console.target = SYSTEM_ERR +appender.console.name = console +appender.console.layout.type = PatternLayout +appender.console.layout.pattern = %d{ISO8601} %-5p [%t] %c{2}: %m%n + +rootLogger = WARN,console + +# ZooKeeper will still put stuff at WARN +logger.zookeeper.name = org.apache.zookeeper +logger.zookeeper.level = ERROR + diff --git a/conf/log4j2.properties b/conf/log4j2.properties new file mode 100644 index 000000000000..5ffcfda24176 --- /dev/null +++ b/conf/log4j2.properties @@ -0,0 +1,137 @@ +#/** +# * Licensed to the Apache Software Foundation (ASF) under one +# * or more contributor license agreements. See the NOTICE file +# * distributed with this work for additional information +# * regarding copyright ownership. The ASF licenses this file +# * to you under the Apache License, Version 2.0 (the +# * "License"); you may not use this file except in compliance +# * with the License. You may obtain a copy of the License at +# * +# * http://www.apache.org/licenses/LICENSE-2.0 +# * +# * Unless required by applicable law or agreed to in writing, software +# * distributed under the License is distributed on an "AS IS" BASIS, +# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# * See the License for the specific language governing permissions and +# * limitations under the License. +# */ + +status = warn +dest = err +name = PropertiesConfig + +# Console appender +appender.console.type = Console +appender.console.target = SYSTEM_ERR +appender.console.name = console +appender.console.layout.type = PatternLayout +appender.console.layout.pattern = %d{ISO8601} %-5p [%t] %c{2}: %.1000m%n + +# Daily Rolling File Appender +appender.DRFA.type = RollingFile +appender.DRFA.name = DRFA +appender.DRFA.fileName = ${sys:hbase.log.dir:-.}/${sys:hbase.log.file:-hbase.log} +appender.DRFA.filePattern = ${sys:hbase.log.dir:-.}/${sys:hbase.log.file:-hbase.log}.%d{yyyy-MM-dd} +appender.DRFA.createOnDemand = true +appender.DRFA.layout.type = PatternLayout +appender.DRFA.layout.pattern = %d{ISO8601} %-5p [%t] %c{2}: %.1000m%n +appender.DRFA.policies.type = Policies +appender.DRFA.policies.time.type = TimeBasedTriggeringPolicy +appender.DRFA.policies.time.interval = 1 +appender.DRFA.policies.time.modulate = true +appender.DRFA.policies.size.type = SizeBasedTriggeringPolicy +appender.DRFA.policies.size.size = ${sys:hbase.log.maxfilesize:-256MB} +appender.DRFA.strategy.type = DefaultRolloverStrategy +appender.DRFA.strategy.max = ${sys:hbase.log.maxbackupindex:-20} + +# Rolling File Appender +appender.RFA.type = RollingFile +appender.RFA.name = RFA +appender.RFA.fileName = ${sys:hbase.log.dir:-.}/${sys:hbase.log.file:-hbase.log} +appender.RFA.filePattern = ${sys:hbase.log.dir:-.}/${sys:hbase.log.file:-hbase.log}.%i +appender.RFA.createOnDemand = true +appender.RFA.layout.type = PatternLayout +appender.RFA.layout.pattern = %d{ISO8601} %-5p [%t] %c{2}: %.1000m%n +appender.RFA.policies.type = Policies +appender.RFA.policies.size.type = SizeBasedTriggeringPolicy +appender.RFA.policies.size.size = ${sys:hbase.log.maxfilesize:-256MB} +appender.RFA.strategy.type = DefaultRolloverStrategy +appender.RFA.strategy.max = ${sys:hbase.log.maxbackupindex:-20} + +# Security Audit Appender +appender.RFAS.type = RollingFile +appender.RFAS.name = RFAS +appender.RFAS.fileName = ${sys:hbase.log.dir:-.}/${sys:hbase.security.log.file:-SecurityAuth.audit} +appender.RFAS.filePattern = ${sys:hbase.log.dir:-.}/${sys:hbase.security.log.file:-SecurityAuth.audit}.%i +appender.RFAS.createOnDemand = true +appender.RFAS.layout.type = PatternLayout +appender.RFAS.layout.pattern = %d{ISO8601} %-5p [%t] %c{2}: %.1000m%n +appender.RFAS.policies.type = Policies +appender.RFAS.policies.size.type = SizeBasedTriggeringPolicy +appender.RFAS.policies.size.size = ${sys:hbase.security.log.maxfilesize:-256MB} +appender.RFAS.strategy.type = DefaultRolloverStrategy +appender.RFAS.strategy.max = ${sys:hbase.security.log.maxbackupindex:-20} + +# Http Access Log RFA, uncomment this if you want an http access.log +# appender.AccessRFA.type = RollingFile +# appender.AccessRFA.name = AccessRFA +# appender.AccessRFA.fileName = /var/log/hbase/access.log +# appender.AccessRFA.filePattern = /var/log/hbase/access.log.%i +# appender.AccessRFA.createOnDemand = true +# appender.AccessRFA.layout.type = PatternLayout +# appender.AccessRFA.layout.pattern = %m%n +# appender.AccessRFA.policies.type = Policies +# appender.AccessRFA.policies.size.type = SizeBasedTriggeringPolicy +# appender.AccessRFA.policies.size.size = 200MB +# appender.AccessRFA.strategy.type = DefaultRolloverStrategy +# appender.AccessRFA.strategy.max = 10 + +# Null Appender +appender.NullAppender.type = Null +appender.NullAppender.name = NullAppender + +rootLogger = ${sys:hbase.root.logger:-INFO,console} + +logger.SecurityLogger.name = SecurityLogger +logger.SecurityLogger = ${sys:hbase.security.logger:-INFO,console} +logger.SecurityLogger.additivity = false + +# Custom Logging levels +# logger.zookeeper.name = org.apache.zookeeper +# logger.zookeeper.level = ERROR + +# logger.FSNamesystem.name = org.apache.hadoop.fs.FSNamesystem +# logger.FSNamesystem.level = DEBUG + +# logger.hbase.name = org.apache.hadoop.hbase +# logger.hbase.level = DEBUG + +# logger.META.name = org.apache.hadoop.hbase.META +# logger.META.level = DEBUG + +# Make these two classes below DEBUG to see more zk debug. +# logger.ZKUtil.name = org.apache.hadoop.hbase.zookeeper.ZKUtil +# logger.ZKUtil.level = DEBUG + +# logger.ZKWatcher.name = org.apache.hadoop.hbase.zookeeper.ZKWatcher +# logger.ZKWatcher.level = DEBUG + +# logger.dfs.name = org.apache.hadoop.dfs +# logger.dfs.level = DEBUG + +# Prevent metrics subsystem start/stop messages (HBASE-17722) +logger.MetricsConfig.name = org.apache.hadoop.metrics2.impl.MetricsConfig +logger.MetricsConfig.level = WARN + +logger.MetricsSinkAdapte.name = org.apache.hadoop.metrics2.impl.MetricsSinkAdapter +logger.MetricsSinkAdapte.level = WARN + +logger.MetricsSystemImpl.name = org.apache.hadoop.metrics2.impl.MetricsSystemImpl +logger.MetricsSystemImpl.level = WARN + +# Disable request log by default, you can enable this by changing the appender +logger.http.name = http.requests +logger.http.additivity = false +logger.http = INFO,NullAppender +# Replace the above with this configuration if you want an http access.log +# logger.http = INFO,AccessRFA diff --git a/hbase-archetypes/hbase-client-project/pom.xml b/hbase-archetypes/hbase-client-project/pom.xml index e8eea1b8af36..40cfab27f123 100644 --- a/hbase-archetypes/hbase-client-project/pom.xml +++ b/hbase-archetypes/hbase-client-project/pom.xml @@ -64,13 +64,23 @@ <scope>runtime</scope> </dependency> <dependency> - <groupId>org.slf4j</groupId> - <artifactId>slf4j-reload4j</artifactId> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-api</artifactId> + <scope>runtime</scope> + </dependency> + <dependency> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-core</artifactId> + <scope>runtime</scope> + </dependency> + <dependency> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-slf4j-impl</artifactId> <scope>runtime</scope> </dependency> <dependency> - <groupId>ch.qos.reload4j</groupId> - <artifactId>reload4j</artifactId> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-1.2-api</artifactId> <scope>runtime</scope> </dependency> <dependency> diff --git a/hbase-archetypes/hbase-client-project/src/main/resources/log4j.properties b/hbase-archetypes/hbase-client-project/src/main/resources/log4j.properties deleted file mode 100644 index 0b01e57e6ea6..000000000000 --- a/hbase-archetypes/hbase-client-project/src/main/resources/log4j.properties +++ /dev/null @@ -1,121 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -# Define some default values that can be overridden by system properties -hbase.root.logger=INFO,console -hbase.security.logger=INFO,console -hbase.log.dir=. -hbase.log.file=hbase.log - -# Define the root logger to the system property "hbase.root.logger". -log4j.rootLogger=${hbase.root.logger} - -# Logging Threshold -log4j.threshold=ALL - -# -# Daily Rolling File Appender -# -log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender -log4j.appender.DRFA.File=${hbase.log.dir}/${hbase.log.file} - -# Rollver at midnight -log4j.appender.DRFA.DatePattern=.yyyy-MM-dd - -# 30-day backup -#log4j.appender.DRFA.MaxBackupIndex=30 -log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout - -# Pattern format: Date LogLevel LoggerName LogMessage -log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %m%n - -# Rolling File Appender properties -hbase.log.maxfilesize=256MB -hbase.log.maxbackupindex=20 - -# Rolling File Appender -log4j.appender.RFA=org.apache.log4j.RollingFileAppender -log4j.appender.RFA.File=${hbase.log.dir}/${hbase.log.file} - -log4j.appender.RFA.MaxFileSize=${hbase.log.maxfilesize} -log4j.appender.RFA.MaxBackupIndex=${hbase.log.maxbackupindex} - -log4j.appender.RFA.layout=org.apache.log4j.PatternLayout -log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %m%n - -# -# Security audit appender -# -hbase.security.log.file=SecurityAuth.audit -hbase.security.log.maxfilesize=256MB -hbase.security.log.maxbackupindex=20 -log4j.appender.RFAS=org.apache.log4j.RollingFileAppender -log4j.appender.RFAS.File=${hbase.log.dir}/${hbase.security.log.file} -log4j.appender.RFAS.MaxFileSize=${hbase.security.log.maxfilesize} -log4j.appender.RFAS.MaxBackupIndex=${hbase.security.log.maxbackupindex} -log4j.appender.RFAS.layout=org.apache.log4j.PatternLayout -log4j.appender.RFAS.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n -log4j.category.SecurityLogger=${hbase.security.logger} -log4j.additivity.SecurityLogger=false -#log4j.logger.SecurityLogger.org.apache.hadoop.hbase.security.access.AccessController=TRACE -#log4j.logger.SecurityLogger.org.apache.hadoop.hbase.security.visibility.VisibilityController=TRACE - -# -# Null Appender -# -log4j.appender.NullAppender=org.apache.log4j.varia.NullAppender - -# -# console -# Add "console" to rootlogger above if you want to use this -# -log4j.appender.console=org.apache.log4j.ConsoleAppender -log4j.appender.console.target=System.err -log4j.appender.console.layout=org.apache.log4j.PatternLayout -log4j.appender.console.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %m%n - -# Custom Logging levels - -log4j.logger.org.apache.zookeeper=INFO -#log4j.logger.org.apache.hadoop.fs.FSNamesystem=DEBUG -log4j.logger.org.apache.hadoop.hbase=INFO -# Make these two classes INFO-level. Make them DEBUG to see more zk debug. -log4j.logger.org.apache.hadoop.hbase.zookeeper.ZKUtil=INFO -log4j.logger.org.apache.hadoop.hbase.zookeeper.ZKWatcher=INFO -#log4j.logger.org.apache.hadoop.dfs=DEBUG -# Set this class to log INFO only otherwise its OTT -# Enable this to get detailed connection error/retry logging. -# log4j.logger.org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation=TRACE - - -# Uncomment this line to enable tracing on _every_ RPC call (this can be a lot of output) -#log4j.logger.org.apache.hadoop.ipc.HBaseServer.trace=DEBUG - -# Uncomment the below if you want to remove logging of client region caching' -# and scan of hbase:meta messages -# log4j.logger.org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation=INFO -# log4j.logger.org.apache.hadoop.hbase.client.MetaScanner=INFO - -# EventCounter -# Add "EventCounter" to rootlogger if you want to use this -# Uncomment the line below to add EventCounter information -# log4j.appender.EventCounter=org.apache.hadoop.log.metrics.EventCounter - -# Prevent metrics subsystem start/stop messages (HBASE-17722) -log4j.logger.org.apache.hadoop.metrics2.impl.MetricsConfig=WARN -log4j.logger.org.apache.hadoop.metrics2.impl.MetricsSinkAdapter=WARN -log4j.logger.org.apache.hadoop.metrics2.impl.MetricsSystemImpl=WARN diff --git a/hbase-archetypes/hbase-client-project/src/main/resources/log4j2.properties b/hbase-archetypes/hbase-client-project/src/main/resources/log4j2.properties new file mode 100644 index 000000000000..5ffcfda24176 --- /dev/null +++ b/hbase-archetypes/hbase-client-project/src/main/resources/log4j2.properties @@ -0,0 +1,137 @@ +#/** +# * Licensed to the Apache Software Foundation (ASF) under one +# * or more contributor license agreements. See the NOTICE file +# * distributed with this work for additional information +# * regarding copyright ownership. The ASF licenses this file +# * to you under the Apache License, Version 2.0 (the +# * "License"); you may not use this file except in compliance +# * with the License. You may obtain a copy of the License at +# * +# * http://www.apache.org/licenses/LICENSE-2.0 +# * +# * Unless required by applicable law or agreed to in writing, software +# * distributed under the License is distributed on an "AS IS" BASIS, +# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# * See the License for the specific language governing permissions and +# * limitations under the License. +# */ + +status = warn +dest = err +name = PropertiesConfig + +# Console appender +appender.console.type = Console +appender.console.target = SYSTEM_ERR +appender.console.name = console +appender.console.layout.type = PatternLayout +appender.console.layout.pattern = %d{ISO8601} %-5p [%t] %c{2}: %.1000m%n + +# Daily Rolling File Appender +appender.DRFA.type = RollingFile +appender.DRFA.name = DRFA +appender.DRFA.fileName = ${sys:hbase.log.dir:-.}/${sys:hbase.log.file:-hbase.log} +appender.DRFA.filePattern = ${sys:hbase.log.dir:-.}/${sys:hbase.log.file:-hbase.log}.%d{yyyy-MM-dd} +appender.DRFA.createOnDemand = true +appender.DRFA.layout.type = PatternLayout +appender.DRFA.layout.pattern = %d{ISO8601} %-5p [%t] %c{2}: %.1000m%n +appender.DRFA.policies.type = Policies +appender.DRFA.policies.time.type = TimeBasedTriggeringPolicy +appender.DRFA.policies.time.interval = 1 +appender.DRFA.policies.time.modulate = true +appender.DRFA.policies.size.type = SizeBasedTriggeringPolicy +appender.DRFA.policies.size.size = ${sys:hbase.log.maxfilesize:-256MB} +appender.DRFA.strategy.type = DefaultRolloverStrategy +appender.DRFA.strategy.max = ${sys:hbase.log.maxbackupindex:-20} + +# Rolling File Appender +appender.RFA.type = RollingFile +appender.RFA.name = RFA +appender.RFA.fileName = ${sys:hbase.log.dir:-.}/${sys:hbase.log.file:-hbase.log} +appender.RFA.filePattern = ${sys:hbase.log.dir:-.}/${sys:hbase.log.file:-hbase.log}.%i +appender.RFA.createOnDemand = true +appender.RFA.layout.type = PatternLayout +appender.RFA.layout.pattern = %d{ISO8601} %-5p [%t] %c{2}: %.1000m%n +appender.RFA.policies.type = Policies +appender.RFA.policies.size.type = SizeBasedTriggeringPolicy +appender.RFA.policies.size.size = ${sys:hbase.log.maxfilesize:-256MB} +appender.RFA.strategy.type = DefaultRolloverStrategy +appender.RFA.strategy.max = ${sys:hbase.log.maxbackupindex:-20} + +# Security Audit Appender +appender.RFAS.type = RollingFile +appender.RFAS.name = RFAS +appender.RFAS.fileName = ${sys:hbase.log.dir:-.}/${sys:hbase.security.log.file:-SecurityAuth.audit} +appender.RFAS.filePattern = ${sys:hbase.log.dir:-.}/${sys:hbase.security.log.file:-SecurityAuth.audit}.%i +appender.RFAS.createOnDemand = true +appender.RFAS.layout.type = PatternLayout +appender.RFAS.layout.pattern = %d{ISO8601} %-5p [%t] %c{2}: %.1000m%n +appender.RFAS.policies.type = Policies +appender.RFAS.policies.size.type = SizeBasedTriggeringPolicy +appender.RFAS.policies.size.size = ${sys:hbase.security.log.maxfilesize:-256MB} +appender.RFAS.strategy.type = DefaultRolloverStrategy +appender.RFAS.strategy.max = ${sys:hbase.security.log.maxbackupindex:-20} + +# Http Access Log RFA, uncomment this if you want an http access.log +# appender.AccessRFA.type = RollingFile +# appender.AccessRFA.name = AccessRFA +# appender.AccessRFA.fileName = /var/log/hbase/access.log +# appender.AccessRFA.filePattern = /var/log/hbase/access.log.%i +# appender.AccessRFA.createOnDemand = true +# appender.AccessRFA.layout.type = PatternLayout +# appender.AccessRFA.layout.pattern = %m%n +# appender.AccessRFA.policies.type = Policies +# appender.AccessRFA.policies.size.type = SizeBasedTriggeringPolicy +# appender.AccessRFA.policies.size.size = 200MB +# appender.AccessRFA.strategy.type = DefaultRolloverStrategy +# appender.AccessRFA.strategy.max = 10 + +# Null Appender +appender.NullAppender.type = Null +appender.NullAppender.name = NullAppender + +rootLogger = ${sys:hbase.root.logger:-INFO,console} + +logger.SecurityLogger.name = SecurityLogger +logger.SecurityLogger = ${sys:hbase.security.logger:-INFO,console} +logger.SecurityLogger.additivity = false + +# Custom Logging levels +# logger.zookeeper.name = org.apache.zookeeper +# logger.zookeeper.level = ERROR + +# logger.FSNamesystem.name = org.apache.hadoop.fs.FSNamesystem +# logger.FSNamesystem.level = DEBUG + +# logger.hbase.name = org.apache.hadoop.hbase +# logger.hbase.level = DEBUG + +# logger.META.name = org.apache.hadoop.hbase.META +# logger.META.level = DEBUG + +# Make these two classes below DEBUG to see more zk debug. +# logger.ZKUtil.name = org.apache.hadoop.hbase.zookeeper.ZKUtil +# logger.ZKUtil.level = DEBUG + +# logger.ZKWatcher.name = org.apache.hadoop.hbase.zookeeper.ZKWatcher +# logger.ZKWatcher.level = DEBUG + +# logger.dfs.name = org.apache.hadoop.dfs +# logger.dfs.level = DEBUG + +# Prevent metrics subsystem start/stop messages (HBASE-17722) +logger.MetricsConfig.name = org.apache.hadoop.metrics2.impl.MetricsConfig +logger.MetricsConfig.level = WARN + +logger.MetricsSinkAdapte.name = org.apache.hadoop.metrics2.impl.MetricsSinkAdapter +logger.MetricsSinkAdapte.level = WARN + +logger.MetricsSystemImpl.name = org.apache.hadoop.metrics2.impl.MetricsSystemImpl +logger.MetricsSystemImpl.level = WARN + +# Disable request log by default, you can enable this by changing the appender +logger.http.name = http.requests +logger.http.additivity = false +logger.http = INFO,NullAppender +# Replace the above with this configuration if you want an http access.log +# logger.http = INFO,AccessRFA diff --git a/hbase-archetypes/hbase-shaded-client-project/pom.xml b/hbase-archetypes/hbase-shaded-client-project/pom.xml index ad163b422aba..73305ea12de7 100644 --- a/hbase-archetypes/hbase-shaded-client-project/pom.xml +++ b/hbase-archetypes/hbase-shaded-client-project/pom.xml @@ -70,13 +70,23 @@ <scope>runtime</scope> </dependency> <dependency> - <groupId>org.slf4j</groupId> - <artifactId>slf4j-reload4j</artifactId> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-api</artifactId> + <scope>runtime</scope> + </dependency> + <dependency> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-core</artifactId> + <scope>runtime</scope> + </dependency> + <dependency> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-slf4j-impl</artifactId> <scope>runtime</scope> </dependency> <dependency> - <groupId>ch.qos.reload4j</groupId> - <artifactId>reload4j</artifactId> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-1.2-api</artifactId> <scope>runtime</scope> </dependency> <dependency> diff --git a/hbase-archetypes/hbase-shaded-client-project/src/main/resources/log4j.properties b/hbase-archetypes/hbase-shaded-client-project/src/main/resources/log4j.properties deleted file mode 100644 index 0b01e57e6ea6..000000000000 --- a/hbase-archetypes/hbase-shaded-client-project/src/main/resources/log4j.properties +++ /dev/null @@ -1,121 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -# Define some default values that can be overridden by system properties -hbase.root.logger=INFO,console -hbase.security.logger=INFO,console -hbase.log.dir=. -hbase.log.file=hbase.log - -# Define the root logger to the system property "hbase.root.logger". -log4j.rootLogger=${hbase.root.logger} - -# Logging Threshold -log4j.threshold=ALL - -# -# Daily Rolling File Appender -# -log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender -log4j.appender.DRFA.File=${hbase.log.dir}/${hbase.log.file} - -# Rollver at midnight -log4j.appender.DRFA.DatePattern=.yyyy-MM-dd - -# 30-day backup -#log4j.appender.DRFA.MaxBackupIndex=30 -log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout - -# Pattern format: Date LogLevel LoggerName LogMessage -log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %m%n - -# Rolling File Appender properties -hbase.log.maxfilesize=256MB -hbase.log.maxbackupindex=20 - -# Rolling File Appender -log4j.appender.RFA=org.apache.log4j.RollingFileAppender -log4j.appender.RFA.File=${hbase.log.dir}/${hbase.log.file} - -log4j.appender.RFA.MaxFileSize=${hbase.log.maxfilesize} -log4j.appender.RFA.MaxBackupIndex=${hbase.log.maxbackupindex} - -log4j.appender.RFA.layout=org.apache.log4j.PatternLayout -log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %m%n - -# -# Security audit appender -# -hbase.security.log.file=SecurityAuth.audit -hbase.security.log.maxfilesize=256MB -hbase.security.log.maxbackupindex=20 -log4j.appender.RFAS=org.apache.log4j.RollingFileAppender -log4j.appender.RFAS.File=${hbase.log.dir}/${hbase.security.log.file} -log4j.appender.RFAS.MaxFileSize=${hbase.security.log.maxfilesize} -log4j.appender.RFAS.MaxBackupIndex=${hbase.security.log.maxbackupindex} -log4j.appender.RFAS.layout=org.apache.log4j.PatternLayout -log4j.appender.RFAS.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n -log4j.category.SecurityLogger=${hbase.security.logger} -log4j.additivity.SecurityLogger=false -#log4j.logger.SecurityLogger.org.apache.hadoop.hbase.security.access.AccessController=TRACE -#log4j.logger.SecurityLogger.org.apache.hadoop.hbase.security.visibility.VisibilityController=TRACE - -# -# Null Appender -# -log4j.appender.NullAppender=org.apache.log4j.varia.NullAppender - -# -# console -# Add "console" to rootlogger above if you want to use this -# -log4j.appender.console=org.apache.log4j.ConsoleAppender -log4j.appender.console.target=System.err -log4j.appender.console.layout=org.apache.log4j.PatternLayout -log4j.appender.console.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %m%n - -# Custom Logging levels - -log4j.logger.org.apache.zookeeper=INFO -#log4j.logger.org.apache.hadoop.fs.FSNamesystem=DEBUG -log4j.logger.org.apache.hadoop.hbase=INFO -# Make these two classes INFO-level. Make them DEBUG to see more zk debug. -log4j.logger.org.apache.hadoop.hbase.zookeeper.ZKUtil=INFO -log4j.logger.org.apache.hadoop.hbase.zookeeper.ZKWatcher=INFO -#log4j.logger.org.apache.hadoop.dfs=DEBUG -# Set this class to log INFO only otherwise its OTT -# Enable this to get detailed connection error/retry logging. -# log4j.logger.org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation=TRACE - - -# Uncomment this line to enable tracing on _every_ RPC call (this can be a lot of output) -#log4j.logger.org.apache.hadoop.ipc.HBaseServer.trace=DEBUG - -# Uncomment the below if you want to remove logging of client region caching' -# and scan of hbase:meta messages -# log4j.logger.org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation=INFO -# log4j.logger.org.apache.hadoop.hbase.client.MetaScanner=INFO - -# EventCounter -# Add "EventCounter" to rootlogger if you want to use this -# Uncomment the line below to add EventCounter information -# log4j.appender.EventCounter=org.apache.hadoop.log.metrics.EventCounter - -# Prevent metrics subsystem start/stop messages (HBASE-17722) -log4j.logger.org.apache.hadoop.metrics2.impl.MetricsConfig=WARN -log4j.logger.org.apache.hadoop.metrics2.impl.MetricsSinkAdapter=WARN -log4j.logger.org.apache.hadoop.metrics2.impl.MetricsSystemImpl=WARN diff --git a/hbase-archetypes/hbase-shaded-client-project/src/main/resources/log4j2.properties b/hbase-archetypes/hbase-shaded-client-project/src/main/resources/log4j2.properties new file mode 100644 index 000000000000..5ffcfda24176 --- /dev/null +++ b/hbase-archetypes/hbase-shaded-client-project/src/main/resources/log4j2.properties @@ -0,0 +1,137 @@ +#/** +# * Licensed to the Apache Software Foundation (ASF) under one +# * or more contributor license agreements. See the NOTICE file +# * distributed with this work for additional information +# * regarding copyright ownership. The ASF licenses this file +# * to you under the Apache License, Version 2.0 (the +# * "License"); you may not use this file except in compliance +# * with the License. You may obtain a copy of the License at +# * +# * http://www.apache.org/licenses/LICENSE-2.0 +# * +# * Unless required by applicable law or agreed to in writing, software +# * distributed under the License is distributed on an "AS IS" BASIS, +# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# * See the License for the specific language governing permissions and +# * limitations under the License. +# */ + +status = warn +dest = err +name = PropertiesConfig + +# Console appender +appender.console.type = Console +appender.console.target = SYSTEM_ERR +appender.console.name = console +appender.console.layout.type = PatternLayout +appender.console.layout.pattern = %d{ISO8601} %-5p [%t] %c{2}: %.1000m%n + +# Daily Rolling File Appender +appender.DRFA.type = RollingFile +appender.DRFA.name = DRFA +appender.DRFA.fileName = ${sys:hbase.log.dir:-.}/${sys:hbase.log.file:-hbase.log} +appender.DRFA.filePattern = ${sys:hbase.log.dir:-.}/${sys:hbase.log.file:-hbase.log}.%d{yyyy-MM-dd} +appender.DRFA.createOnDemand = true +appender.DRFA.layout.type = PatternLayout +appender.DRFA.layout.pattern = %d{ISO8601} %-5p [%t] %c{2}: %.1000m%n +appender.DRFA.policies.type = Policies +appender.DRFA.policies.time.type = TimeBasedTriggeringPolicy +appender.DRFA.policies.time.interval = 1 +appender.DRFA.policies.time.modulate = true +appender.DRFA.policies.size.type = SizeBasedTriggeringPolicy +appender.DRFA.policies.size.size = ${sys:hbase.log.maxfilesize:-256MB} +appender.DRFA.strategy.type = DefaultRolloverStrategy +appender.DRFA.strategy.max = ${sys:hbase.log.maxbackupindex:-20} + +# Rolling File Appender +appender.RFA.type = RollingFile +appender.RFA.name = RFA +appender.RFA.fileName = ${sys:hbase.log.dir:-.}/${sys:hbase.log.file:-hbase.log} +appender.RFA.filePattern = ${sys:hbase.log.dir:-.}/${sys:hbase.log.file:-hbase.log}.%i +appender.RFA.createOnDemand = true +appender.RFA.layout.type = PatternLayout +appender.RFA.layout.pattern = %d{ISO8601} %-5p [%t] %c{2}: %.1000m%n +appender.RFA.policies.type = Policies +appender.RFA.policies.size.type = SizeBasedTriggeringPolicy +appender.RFA.policies.size.size = ${sys:hbase.log.maxfilesize:-256MB} +appender.RFA.strategy.type = DefaultRolloverStrategy +appender.RFA.strategy.max = ${sys:hbase.log.maxbackupindex:-20} + +# Security Audit Appender +appender.RFAS.type = RollingFile +appender.RFAS.name = RFAS +appender.RFAS.fileName = ${sys:hbase.log.dir:-.}/${sys:hbase.security.log.file:-SecurityAuth.audit} +appender.RFAS.filePattern = ${sys:hbase.log.dir:-.}/${sys:hbase.security.log.file:-SecurityAuth.audit}.%i +appender.RFAS.createOnDemand = true +appender.RFAS.layout.type = PatternLayout +appender.RFAS.layout.pattern = %d{ISO8601} %-5p [%t] %c{2}: %.1000m%n +appender.RFAS.policies.type = Policies +appender.RFAS.policies.size.type = SizeBasedTriggeringPolicy +appender.RFAS.policies.size.size = ${sys:hbase.security.log.maxfilesize:-256MB} +appender.RFAS.strategy.type = DefaultRolloverStrategy +appender.RFAS.strategy.max = ${sys:hbase.security.log.maxbackupindex:-20} + +# Http Access Log RFA, uncomment this if you want an http access.log +# appender.AccessRFA.type = RollingFile +# appender.AccessRFA.name = AccessRFA +# appender.AccessRFA.fileName = /var/log/hbase/access.log +# appender.AccessRFA.filePattern = /var/log/hbase/access.log.%i +# appender.AccessRFA.createOnDemand = true +# appender.AccessRFA.layout.type = PatternLayout +# appender.AccessRFA.layout.pattern = %m%n +# appender.AccessRFA.policies.type = Policies +# appender.AccessRFA.policies.size.type = SizeBasedTriggeringPolicy +# appender.AccessRFA.policies.size.size = 200MB +# appender.AccessRFA.strategy.type = DefaultRolloverStrategy +# appender.AccessRFA.strategy.max = 10 + +# Null Appender +appender.NullAppender.type = Null +appender.NullAppender.name = NullAppender + +rootLogger = ${sys:hbase.root.logger:-INFO,console} + +logger.SecurityLogger.name = SecurityLogger +logger.SecurityLogger = ${sys:hbase.security.logger:-INFO,console} +logger.SecurityLogger.additivity = false + +# Custom Logging levels +# logger.zookeeper.name = org.apache.zookeeper +# logger.zookeeper.level = ERROR + +# logger.FSNamesystem.name = org.apache.hadoop.fs.FSNamesystem +# logger.FSNamesystem.level = DEBUG + +# logger.hbase.name = org.apache.hadoop.hbase +# logger.hbase.level = DEBUG + +# logger.META.name = org.apache.hadoop.hbase.META +# logger.META.level = DEBUG + +# Make these two classes below DEBUG to see more zk debug. +# logger.ZKUtil.name = org.apache.hadoop.hbase.zookeeper.ZKUtil +# logger.ZKUtil.level = DEBUG + +# logger.ZKWatcher.name = org.apache.hadoop.hbase.zookeeper.ZKWatcher +# logger.ZKWatcher.level = DEBUG + +# logger.dfs.name = org.apache.hadoop.dfs +# logger.dfs.level = DEBUG + +# Prevent metrics subsystem start/stop messages (HBASE-17722) +logger.MetricsConfig.name = org.apache.hadoop.metrics2.impl.MetricsConfig +logger.MetricsConfig.level = WARN + +logger.MetricsSinkAdapte.name = org.apache.hadoop.metrics2.impl.MetricsSinkAdapter +logger.MetricsSinkAdapte.level = WARN + +logger.MetricsSystemImpl.name = org.apache.hadoop.metrics2.impl.MetricsSystemImpl +logger.MetricsSystemImpl.level = WARN + +# Disable request log by default, you can enable this by changing the appender +logger.http.name = http.requests +logger.http.additivity = false +logger.http = INFO,NullAppender +# Replace the above with this configuration if you want an http access.log +# logger.http = INFO,AccessRFA diff --git a/hbase-assembly/pom.xml b/hbase-assembly/pom.xml index e6898ea63f46..eee87f7b6a55 100644 --- a/hbase-assembly/pom.xml +++ b/hbase-assembly/pom.xml @@ -352,12 +352,16 @@ <artifactId>jul-to-slf4j</artifactId> </dependency> <dependency> - <groupId>org.slf4j</groupId> - <artifactId>slf4j-reload4j</artifactId> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-api</artifactId> + </dependency> + <dependency> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-core</artifactId> </dependency> <dependency> - <groupId>ch.qos.reload4j</groupId> - <artifactId>reload4j</artifactId> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-slf4j-impl</artifactId> </dependency> <!-- Include OpenTelemetry agent --> <dependency> @@ -365,6 +369,10 @@ <artifactId>opentelemetry-javaagent</artifactId> <classifier>all</classifier> </dependency> + <dependency> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-1.2-api</artifactId> + </dependency> </dependencies> <profiles> <profile> diff --git a/hbase-assembly/src/main/assembly/client.xml b/hbase-assembly/src/main/assembly/client.xml index 41aeea577eb1..9a7f55248610 100644 --- a/hbase-assembly/src/main/assembly/client.xml +++ b/hbase-assembly/src/main/assembly/client.xml @@ -62,12 +62,17 @@ <exclude>org.apache.htrace:htrace-core4</exclude> <exclude>org.apache.htrace:htrace-core</exclude> <exclude>org.apache.yetus:audience-annotations</exclude> +<<<<<<< HEAD <exclude>org.slf4j:slf4j-api</exclude> <exclude>org.slf4j:jcl-over-slf4j</exclude> <exclude>org.slf4j:jul-to-slf4j</exclude> <exclude>org.slf4j:slf4j-log4j12</exclude> <exclude>org.slf4j:slf4j-reload4j</exclude> <exclude>io.opentelemetry.javaagent:*</exclude> +======= + <exclude>org.slf4j:*</exclude> + <exclude>org.apache.logging.log4j:*</exclude> +>>>>>>> ba3610d097... HBASE-19577 Use log4j2 instead of log4j for logging (#1708) </excludes> </dependencySet> </dependencySets> @@ -152,6 +157,7 @@ <include>org.apache.htrace:htrace-core4</include> <include>org.apache.htrace:htrace-core</include> <include>org.apache.yetus:audience-annotations</include> +<<<<<<< HEAD <include>org.slf4j:slf4j-api</include> <include>org.slf4j:jcl-over-slf4j</include> <include>org.slf4j:jul-to-slf4j</include> @@ -163,6 +169,10 @@ <outputDirectory>lib/trace</outputDirectory> <includes> <include>io.opentelemetry.javaagent:*</include> +======= + <include>org.slf4j:*</include> + <include>org.apache.logging.log4j:*</include> +>>>>>>> ba3610d097... HBASE-19577 Use log4j2 instead of log4j for logging (#1708) </includes> </dependencySet> </dependencySets> diff --git a/hbase-assembly/src/main/assembly/hadoop-three-compat.xml b/hbase-assembly/src/main/assembly/hadoop-three-compat.xml index d24f58224277..d5e32ac9f6a4 100644 --- a/hbase-assembly/src/main/assembly/hadoop-three-compat.xml +++ b/hbase-assembly/src/main/assembly/hadoop-three-compat.xml @@ -49,11 +49,9 @@ <include>org.apache.hbase:hbase-metrics</include> <include>org.apache.hbase:hbase-metrics-api</include> <include>org.apache.hbase:hbase-procedure</include> - <include>org.apache.hbase:hbase-protocol</include> <include>org.apache.hbase:hbase-protocol-shaded</include> <include>org.apache.hbase:hbase-replication</include> <include>org.apache.hbase:hbase-rest</include> - <include>org.apache.hbase:hbase-rsgroup</include> <include>org.apache.hbase:hbase-server</include> <include>org.apache.hbase:hbase-shell</include> <include>org.apache.hbase:hbase-testing-util</include> @@ -111,10 +109,15 @@ <exclude>org.apache.htrace:htrace-core4</exclude> <exclude>org.apache.htrace:htrace-core</exclude> <exclude>org.apache.yetus:audience-annotations</exclude> +<<<<<<< HEAD <exclude>org.slf4j:slf4j-api</exclude> <exclude>org.slf4j:slf4j-log4j12</exclude> <exclude>org.slf4j:slf4j-reload4j</exclude> <exclude>io.opentelemetry.javaagent:*</exclude> +======= + <exclude>org.slf4j:*</exclude> + <exclude>org.apache.logging.log4j:*</exclude> +>>>>>>> ba3610d097... HBASE-19577 Use log4j2 instead of log4j for logging (#1708) </excludes> </dependencySet> </dependencySets> @@ -211,11 +214,16 @@ <include>org.apache.htrace:htrace-core4</include> <include>org.apache.htrace:htrace-core</include> <include>org.apache.yetus:audience-annotations</include> +<<<<<<< HEAD <include>org.slf4j:slf4j-api</include> <include>org.slf4j:jcl-over-slf4j</include> <include>org.slf4j:jul-to-slf4j</include> <include>org.slf4j:slf4j-reload4j</include> <include>io.opentelemetry:*</include> +======= + <include>org.slf4j:*</include> + <include>org.apache.logging.log4j:*</include> +>>>>>>> ba3610d097... HBASE-19577 Use log4j2 instead of log4j for logging (#1708) </includes> </dependencySet> <dependencySet> diff --git a/hbase-asyncfs/pom.xml b/hbase-asyncfs/pom.xml index 2cb984012b63..26cab77a20fc 100644 --- a/hbase-asyncfs/pom.xml +++ b/hbase-asyncfs/pom.xml @@ -149,13 +149,23 @@ <scope>test</scope> </dependency> <dependency> - <groupId>org.slf4j</groupId> - <artifactId>slf4j-reload4j</artifactId> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-api</artifactId> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-core</artifactId> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-slf4j-impl</artifactId> <scope>test</scope> </dependency> <dependency> - <groupId>ch.qos.reload4j</groupId> - <artifactId>reload4j</artifactId> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-1.2-api</artifactId> <scope>test</scope> </dependency> </dependencies> diff --git a/hbase-asyncfs/src/test/java/org/apache/hadoop/hbase/io/asyncfs/AsyncFSTestBase.java b/hbase-asyncfs/src/test/java/org/apache/hadoop/hbase/io/asyncfs/AsyncFSTestBase.java index fc148e8de796..e1bc83ca684c 100644 --- a/hbase-asyncfs/src/test/java/org/apache/hadoop/hbase/io/asyncfs/AsyncFSTestBase.java +++ b/hbase-asyncfs/src/test/java/org/apache/hadoop/hbase/io/asyncfs/AsyncFSTestBase.java @@ -96,11 +96,6 @@ protected static void startMiniDFSCluster(int servers) throws IOException { createDirsAndSetProperties(); Configuration conf = UTIL.getConfiguration(); - // Error level to skip some warnings specific to the minicluster. See HBASE-4709 - org.apache.log4j.Logger.getLogger(org.apache.hadoop.metrics2.util.MBeans.class) - .setLevel(org.apache.log4j.Level.ERROR); - org.apache.log4j.Logger.getLogger(org.apache.hadoop.metrics2.impl.MetricsSystemImpl.class) - .setLevel(org.apache.log4j.Level.ERROR); CLUSTER = new MiniDFSCluster.Builder(conf).numDataNodes(servers).build(); CLUSTER.waitClusterUp(); diff --git a/hbase-balancer/pom.xml b/hbase-balancer/pom.xml new file mode 100644 index 000000000000..c321af556b16 --- /dev/null +++ b/hbase-balancer/pom.xml @@ -0,0 +1,168 @@ +<?xml version="1.0"?> +<project xmlns="https://maven.apache.org/POM/4.0.0" xmlns:xsi="https://www.w3.org/2001/XMLSchema-instance" + xsi:schemaLocation="https://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd"> + <!-- + /** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + --> + <modelVersion>4.0.0</modelVersion> + <parent> + <artifactId>hbase-build-configuration</artifactId> + <groupId>org.apache.hbase</groupId> + <version>3.0.0-SNAPSHOT</version> + <relativePath>../hbase-build-configuration</relativePath> + </parent> + + <artifactId>hbase-balancer</artifactId> + <name>Apache HBase - Balancer</name> + <description>HBase Balancer Support</description> + <!--REMOVE--> + + <build> + <plugins> + <!-- Make a jar and put the sources in the jar --> + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-source-plugin</artifactId> + </plugin> + <plugin> + <!--Make it so assembly:single does nothing in here--> + <artifactId>maven-assembly-plugin</artifactId> + <configuration> + <skipAssembly>true</skipAssembly> + </configuration> + </plugin> + <plugin> + <groupId>net.revelc.code</groupId> + <artifactId>warbucks-maven-plugin</artifactId> + </plugin> + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-checkstyle-plugin</artifactId> + <configuration> + <failOnViolation>true</failOnViolation> + </configuration> + </plugin> + </plugins> + </build> + + <dependencies> + <dependency> + <groupId>org.apache.hbase</groupId> + <artifactId>hbase-common</artifactId> + <type>test-jar</type> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.hbase</groupId> + <artifactId>hbase-annotations</artifactId> + <type>test-jar</type> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.hbase</groupId> + <artifactId>hbase-common</artifactId> + </dependency> + <dependency> + <groupId>org.apache.hbase</groupId> + <artifactId>hbase-client</artifactId> + </dependency> + <dependency> + <groupId>org.slf4j</groupId> + <artifactId>slf4j-api</artifactId> + </dependency> + <dependency> + <groupId>com.github.stephenc.findbugs</groupId> + <artifactId>findbugs-annotations</artifactId> + <scope>compile</scope> + <optional>true</optional> + </dependency> + <dependency> + <groupId>junit</groupId> + <artifactId>junit</artifactId> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.slf4j</groupId> + <artifactId>jcl-over-slf4j</artifactId> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.slf4j</groupId> + <artifactId>jul-to-slf4j</artifactId> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-api</artifactId> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-core</artifactId> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-slf4j-impl</artifactId> + <scope>test</scope> + </dependency> + </dependencies> + + <profiles> + <!-- Profiles for building against different hadoop versions --> + <profile> + <id>hadoop-3.0</id> + <activation> + <property><name>!hadoop.profile</name></property> + </activation> + <dependencies> + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-common</artifactId> + </dependency> + </dependencies> + </profile> + <profile> + <id>eclipse-specific</id> + <activation> + <property> + <name>m2e.version</name> + </property> + </activation> + <build> + <pluginManagement> + <plugins> + <!--This plugin's configuration is used to store Eclipse m2e settings + only. It has no influence on the Maven build itself.--> + <plugin> + <groupId>org.eclipse.m2e</groupId> + <artifactId>lifecycle-mapping</artifactId> + <configuration> + <lifecycleMappingMetadata> + <pluginExecutions> + </pluginExecutions> + </lifecycleMappingMetadata> + </configuration> + </plugin> + </plugins> + </pluginManagement> + </build> + </profile> + </profiles> +</project> diff --git a/hbase-client/pom.xml b/hbase-client/pom.xml index f47349787144..20b2ef7ceb55 100644 --- a/hbase-client/pom.xml +++ b/hbase-client/pom.xml @@ -177,13 +177,18 @@ <scope>test</scope> </dependency> <dependency> - <groupId>org.slf4j</groupId> - <artifactId>slf4j-reload4j</artifactId> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-api</artifactId> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-core</artifactId> <scope>test</scope> </dependency> <dependency> - <groupId>ch.qos.reload4j</groupId> - <artifactId>reload4j</artifactId> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-slf4j-impl</artifactId> <scope>test</scope> </dependency> <dependency> diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestFailedServersLog.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestFailedServersLog.java index fa44022f8d09..dc94e91f4fde 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestFailedServersLog.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestFailedServersLog.java @@ -17,73 +17,82 @@ */ package org.apache.hadoop.hbase.ipc; -import static org.hamcrest.CoreMatchers.is; -import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.Assert.assertEquals; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; +import java.util.concurrent.atomic.AtomicReference; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.net.Address; import org.apache.hadoop.hbase.testclassification.ClientTests; import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.apache.log4j.Appender; -import org.apache.log4j.Level; -import org.apache.log4j.LogManager; -import org.apache.log4j.spi.LoggingEvent; import org.junit.After; import org.junit.Before; import org.junit.ClassRule; import org.junit.Test; import org.junit.experimental.categories.Category; -import org.junit.runner.RunWith; -import org.mockito.ArgumentCaptor; -import org.mockito.Captor; -import org.mockito.Mock; -import org.mockito.Mockito; -import org.mockito.runners.MockitoJUnitRunner; +import org.mockito.invocation.InvocationOnMock; +import org.mockito.stubbing.Answer; -@RunWith(MockitoJUnitRunner.class) @Category({ ClientTests.class, SmallTests.class }) public class TestFailedServersLog { @ClassRule public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestFailedServersLog.class); + HBaseClassTestRule.forClass(TestFailedServersLog.class); static final int TEST_PORT = 9999; - private Address addr; - @Mock - private Appender mockAppender; + private Address addr; - @Captor - private ArgumentCaptor captorLoggingEvent; + private org.apache.logging.log4j.core.Appender mockAppender; @Before public void setup() { - LogManager.getRootLogger().addAppender(mockAppender); + mockAppender = mock(org.apache.logging.log4j.core.Appender.class); + when(mockAppender.getName()).thenReturn("mockAppender"); + when(mockAppender.isStarted()).thenReturn(true); + ((org.apache.logging.log4j.core.Logger) org.apache.logging.log4j.LogManager + .getLogger(FailedServers.class)).addAppender(mockAppender); + } @After public void teardown() { - LogManager.getRootLogger().removeAppender(mockAppender); + ((org.apache.logging.log4j.core.Logger) org.apache.logging.log4j.LogManager + .getLogger(FailedServers.class)).removeAppender(mockAppender); } @Test public void testAddToFailedServersLogging() { - Throwable nullException = new NullPointerException(); + AtomicReference<org.apache.logging.log4j.Level> level = new AtomicReference<>(); + AtomicReference<String> msg = new AtomicReference<String>(); + doAnswer(new Answer<Void>() { + @Override + public Void answer(InvocationOnMock invocation) throws Throwable { + org.apache.logging.log4j.core.LogEvent logEvent = + invocation.getArgument(0, org.apache.logging.log4j.core.LogEvent.class); + level.set(logEvent.getLevel()); + msg.set(logEvent.getMessage().getFormattedMessage()); + return null; + } + }).when(mockAppender).append(any(org.apache.logging.log4j.core.LogEvent.class)); + + Throwable nullException = new NullPointerException(); FailedServers fs = new FailedServers(new Configuration()); addr = Address.fromParts("localhost", TEST_PORT); fs.addToFailedServers(addr, nullException); - Mockito.verify(mockAppender).doAppend((LoggingEvent) captorLoggingEvent.capture()); - LoggingEvent loggingEvent = (LoggingEvent) captorLoggingEvent.getValue(); - assertThat(loggingEvent.getLevel(), is(Level.DEBUG)); - assertEquals("Added failed server with address " + addr.toString() + " to list caused by " - + nullException.toString(), - loggingEvent.getRenderedMessage()); + verify(mockAppender, times(1)).append(any(org.apache.logging.log4j.core.LogEvent.class)); + assertEquals(org.apache.logging.log4j.Level.DEBUG, level.get()); + assertEquals("Added failed server with address " + addr.toString() + " to list caused by " + + nullException.toString(), msg.get()); } - } diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.java index 2252c215fa68..538a9b91c3c5 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.java @@ -30,7 +30,6 @@ import java.io.IOException; import java.net.InetAddress; import java.util.Map; - import javax.security.auth.callback.Callback; import javax.security.auth.callback.NameCallback; import javax.security.auth.callback.PasswordCallback; @@ -39,7 +38,6 @@ import javax.security.sasl.RealmCallback; import javax.security.sasl.Sasl; import javax.security.sasl.SaslClient; - import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseConfiguration; @@ -55,16 +53,15 @@ import org.apache.hadoop.io.WritableUtils; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.TokenIdentifier; -import org.apache.log4j.Level; -import org.apache.log4j.Logger; import org.junit.Assert; -import org.junit.BeforeClass; import org.junit.ClassRule; import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.ExpectedException; import org.mockito.Mockito; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hbase.thirdparty.com.google.common.base.Strings; @@ -83,17 +80,12 @@ public class TestHBaseSaslRpcClient { static final String DEFAULT_USER_NAME = "principal"; static final String DEFAULT_USER_PASSWORD = "password"; - private static final Logger LOG = Logger.getLogger(TestHBaseSaslRpcClient.class); + private static final Logger LOG = LoggerFactory.getLogger(TestHBaseSaslRpcClient.class); @Rule public ExpectedException exception = ExpectedException.none(); - @BeforeClass - public static void before() { - Logger.getRootLogger().setLevel(Level.DEBUG); - } - @Test public void testSaslClientUsesGivenRpcProtection() throws Exception { Token<? extends TokenIdentifier> token = createTokenMockWithCredentials(DEFAULT_USER_NAME, diff --git a/hbase-common/pom.xml b/hbase-common/pom.xml index f983c4c143a5..ce242286f4a9 100644 --- a/hbase-common/pom.xml +++ b/hbase-common/pom.xml @@ -235,13 +235,18 @@ <scope>test</scope> </dependency> <dependency> - <groupId>org.slf4j</groupId> - <artifactId>slf4j-reload4j</artifactId> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-api</artifactId> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-core</artifactId> <scope>test</scope> </dependency> <dependency> - <groupId>ch.qos.reload4j</groupId> - <artifactId>reload4j</artifactId> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-slf4j-impl</artifactId> <scope>test</scope> </dependency> <dependency> diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/logging/TestJul2Slf4j.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/logging/TestJul2Slf4j.java index cf654f583b89..f67ce616e2e2 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/logging/TestJul2Slf4j.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/logging/TestJul2Slf4j.java @@ -17,27 +17,26 @@ */ package org.apache.hadoop.hbase.logging; -import static org.hamcrest.CoreMatchers.is; -import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.Assert.assertEquals; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; import java.io.IOException; +import java.util.concurrent.atomic.AtomicReference; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.apache.log4j.Appender; -import org.apache.log4j.Level; -import org.apache.log4j.LogManager; -import org.apache.log4j.spi.LoggingEvent; import org.junit.After; import org.junit.Before; import org.junit.ClassRule; import org.junit.Test; import org.junit.experimental.categories.Category; -import org.mockito.ArgumentCaptor; +import org.mockito.invocation.InvocationOnMock; +import org.mockito.stubbing.Answer; /** * This should be in the hbase-logging module but the {@link HBaseClassTestRule} is in hbase-common @@ -56,27 +55,42 @@ public class TestJul2Slf4j { private String loggerName = getClass().getName(); - private Appender mockAppender; + private org.apache.logging.log4j.core.Appender mockAppender; @Before public void setUp() { - mockAppender = mock(Appender.class); - LogManager.getRootLogger().addAppender(mockAppender); + mockAppender = mock(org.apache.logging.log4j.core.Appender.class); + when(mockAppender.getName()).thenReturn("mockAppender"); + when(mockAppender.isStarted()).thenReturn(true); + ((org.apache.logging.log4j.core.Logger) org.apache.logging.log4j.LogManager + .getLogger(loggerName)).addAppender(mockAppender); } @After public void tearDown() { - LogManager.getRootLogger().removeAppender(mockAppender); + ((org.apache.logging.log4j.core.Logger) org.apache.logging.log4j.LogManager + .getLogger(loggerName)).removeAppender(mockAppender); } @Test public void test() throws IOException { + AtomicReference<org.apache.logging.log4j.Level> level = new AtomicReference<>(); + AtomicReference<String> msg = new AtomicReference<String>(); + doAnswer(new Answer<Void>() { + + @Override + public Void answer(InvocationOnMock invocation) throws Throwable { + org.apache.logging.log4j.core.LogEvent logEvent = + invocation.getArgument(0, org.apache.logging.log4j.core.LogEvent.class); + level.set(logEvent.getLevel()); + msg.set(logEvent.getMessage().getFormattedMessage()); + return null; + } + }).when(mockAppender).append(any(org.apache.logging.log4j.core.LogEvent.class)); java.util.logging.Logger logger = java.util.logging.Logger.getLogger(loggerName); logger.info(loggerName); - ArgumentCaptor<LoggingEvent> captor = ArgumentCaptor.forClass(LoggingEvent.class); - verify(mockAppender, times(1)).doAppend(captor.capture()); - LoggingEvent loggingEvent = captor.getValue(); - assertThat(loggingEvent.getLevel(), is(Level.INFO)); - assertEquals(loggerName, loggingEvent.getRenderedMessage()); + verify(mockAppender, times(1)).append(any(org.apache.logging.log4j.core.LogEvent.class)); + assertEquals(org.apache.logging.log4j.Level.INFO, level.get()); + assertEquals(loggerName, msg.get()); } -} +} \ No newline at end of file diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/logging/TestLog4jUtils.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/logging/TestLog4jUtils.java index 89931de7128f..806107b55c66 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/logging/TestLog4jUtils.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/logging/TestLog4jUtils.java @@ -24,9 +24,6 @@ import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.apache.log4j.Level; -import org.apache.log4j.LogManager; -import org.apache.log4j.Logger; import org.junit.ClassRule; import org.junit.Test; import org.junit.experimental.categories.Category; @@ -44,23 +41,29 @@ public class TestLog4jUtils { @Test public void test() { - Logger zk = LogManager.getLogger("org.apache.zookeeper"); - Level zkLevel = zk.getEffectiveLevel(); - Logger hbaseZk = LogManager.getLogger("org.apache.hadoop.hbase.zookeeper"); - Level hbaseZkLevel = hbaseZk.getEffectiveLevel(); - Logger client = LogManager.getLogger("org.apache.hadoop.hbase.client"); - Level clientLevel = client.getEffectiveLevel(); + org.apache.logging.log4j.Logger zk = + org.apache.logging.log4j.LogManager.getLogger("org.apache.zookeeper"); + org.apache.logging.log4j.Level zkLevel = zk.getLevel(); + org.apache.logging.log4j.Logger hbaseZk = + org.apache.logging.log4j.LogManager.getLogger("org.apache.hadoop.hbase.zookeeper"); + org.apache.logging.log4j.Level hbaseZkLevel = hbaseZk.getLevel(); + org.apache.logging.log4j.Logger client = + org.apache.logging.log4j.LogManager.getLogger("org.apache.hadoop.hbase.client"); + org.apache.logging.log4j.Level clientLevel = client.getLevel(); Log4jUtils.disableZkAndClientLoggers(); - assertEquals(Level.OFF, zk.getLevel()); - assertEquals(Level.OFF.toString(), Log4jUtils.getEffectiveLevel(zk.getName())); - assertEquals(Level.OFF, hbaseZk.getLevel()); - assertEquals(Level.OFF.toString(), Log4jUtils.getEffectiveLevel(hbaseZk.getName())); - assertEquals(Level.OFF, client.getLevel()); - assertEquals(Level.OFF.toString(), Log4jUtils.getEffectiveLevel(client.getName())); + assertEquals(org.apache.logging.log4j.Level.OFF, zk.getLevel()); + assertEquals(org.apache.logging.log4j.Level.OFF.toString(), + Log4jUtils.getEffectiveLevel(zk.getName())); + assertEquals(org.apache.logging.log4j.Level.OFF, hbaseZk.getLevel()); + assertEquals(org.apache.logging.log4j.Level.OFF.toString(), + Log4jUtils.getEffectiveLevel(hbaseZk.getName())); + assertEquals(org.apache.logging.log4j.Level.OFF, client.getLevel()); + assertEquals(org.apache.logging.log4j.Level.OFF.toString(), + Log4jUtils.getEffectiveLevel(client.getName())); // restore the level - zk.setLevel(zkLevel); - hbaseZk.setLevel(hbaseZkLevel); - client.setLevel(clientLevel); + org.apache.logging.log4j.core.config.Configurator.setLevel(zk.getName(), zkLevel); + org.apache.logging.log4j.core.config.Configurator.setLevel(hbaseZk.getName(), hbaseZkLevel); + org.apache.logging.log4j.core.config.Configurator.setLevel(client.getName(), clientLevel); } @Test diff --git a/hbase-compression/hbase-compression-aircompressor/pom.xml b/hbase-compression/hbase-compression-aircompressor/pom.xml index ccb4f272fc72..6fc5282674dc 100644 --- a/hbase-compression/hbase-compression-aircompressor/pom.xml +++ b/hbase-compression/hbase-compression-aircompressor/pom.xml @@ -135,13 +135,18 @@ <scope>test</scope> </dependency> <dependency> - <groupId>org.slf4j</groupId> - <artifactId>slf4j-reload4j</artifactId> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-api</artifactId> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-core</artifactId> <scope>test</scope> </dependency> <dependency> - <groupId>ch.qos.reload4j</groupId> - <artifactId>reload4j</artifactId> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-slf4j-impl</artifactId> <scope>test</scope> </dependency> <dependency> diff --git a/hbase-compression/hbase-compression-lz4/pom.xml b/hbase-compression/hbase-compression-lz4/pom.xml index 6489aee169c9..92b546a9af70 100644 --- a/hbase-compression/hbase-compression-lz4/pom.xml +++ b/hbase-compression/hbase-compression-lz4/pom.xml @@ -124,13 +124,18 @@ <scope>test</scope> </dependency> <dependency> - <groupId>org.slf4j</groupId> - <artifactId>slf4j-reload4j</artifactId> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-api</artifactId> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-core</artifactId> <scope>test</scope> </dependency> <dependency> - <groupId>ch.qos.reload4j</groupId> - <artifactId>reload4j</artifactId> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-slf4j-impl</artifactId> <scope>test</scope> </dependency> <dependency> diff --git a/hbase-compression/hbase-compression-snappy/pom.xml b/hbase-compression/hbase-compression-snappy/pom.xml index 6ccf48ac7704..1e66ccd41ae8 100644 --- a/hbase-compression/hbase-compression-snappy/pom.xml +++ b/hbase-compression/hbase-compression-snappy/pom.xml @@ -124,13 +124,18 @@ <scope>test</scope> </dependency> <dependency> - <groupId>org.slf4j</groupId> - <artifactId>slf4j-reload4j</artifactId> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-api</artifactId> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-core</artifactId> <scope>test</scope> </dependency> <dependency> - <groupId>ch.qos.reload4j</groupId> - <artifactId>reload4j</artifactId> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-slf4j-impl</artifactId> <scope>test</scope> </dependency> <dependency> diff --git a/hbase-compression/hbase-compression-xz/pom.xml b/hbase-compression/hbase-compression-xz/pom.xml index 425366f7d163..1884a5c74547 100644 --- a/hbase-compression/hbase-compression-xz/pom.xml +++ b/hbase-compression/hbase-compression-xz/pom.xml @@ -108,13 +108,18 @@ <scope>test</scope> </dependency> <dependency> - <groupId>org.slf4j</groupId> - <artifactId>slf4j-reload4j</artifactId> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-api</artifactId> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-core</artifactId> <scope>test</scope> </dependency> <dependency> - <groupId>ch.qos.reload4j</groupId> - <artifactId>reload4j</artifactId> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-slf4j-impl</artifactId> <scope>test</scope> </dependency> <dependency> diff --git a/hbase-compression/hbase-compression-zstd/pom.xml b/hbase-compression/hbase-compression-zstd/pom.xml index 8afe65677fde..4f13758e54ec 100644 --- a/hbase-compression/hbase-compression-zstd/pom.xml +++ b/hbase-compression/hbase-compression-zstd/pom.xml @@ -124,13 +124,18 @@ <scope>test</scope> </dependency> <dependency> - <groupId>org.slf4j</groupId> - <artifactId>slf4j-reload4j</artifactId> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-api</artifactId> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-core</artifactId> <scope>test</scope> </dependency> <dependency> - <groupId>ch.qos.reload4j</groupId> - <artifactId>reload4j</artifactId> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-slf4j-impl</artifactId> <scope>test</scope> </dependency> <dependency> diff --git a/hbase-endpoint/pom.xml b/hbase-endpoint/pom.xml index 42ed82d29bb4..ccdb823f26a9 100644 --- a/hbase-endpoint/pom.xml +++ b/hbase-endpoint/pom.xml @@ -228,13 +228,23 @@ <scope>test</scope> </dependency> <dependency> - <groupId>org.slf4j</groupId> - <artifactId>slf4j-reload4j</artifactId> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-api</artifactId> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-core</artifactId> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-slf4j-impl</artifactId> <scope>test</scope> </dependency> <dependency> - <groupId>ch.qos.reload4j</groupId> - <artifactId>reload4j</artifactId> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-1.2-api</artifactId> <scope>test</scope> </dependency> </dependencies> diff --git a/hbase-examples/pom.xml b/hbase-examples/pom.xml index f9600c8048ea..31c526dc9bbf 100644 --- a/hbase-examples/pom.xml +++ b/hbase-examples/pom.xml @@ -225,13 +225,23 @@ <scope>test</scope> </dependency> <dependency> - <groupId>org.slf4j</groupId> - <artifactId>slf4j-reload4j</artifactId> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-api</artifactId> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-core</artifactId> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-slf4j-impl</artifactId> <scope>test</scope> </dependency> <dependency> - <groupId>ch.qos.reload4j</groupId> - <artifactId>reload4j</artifactId> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-1.2-api</artifactId> <scope>test</scope> </dependency> </dependencies> diff --git a/hbase-hadoop-compat/pom.xml b/hbase-hadoop-compat/pom.xml index ee74fd91d867..4bc74fe8836c 100644 --- a/hbase-hadoop-compat/pom.xml +++ b/hbase-hadoop-compat/pom.xml @@ -1,5 +1,7 @@ <?xml version="1.0"?> -<project xmlns="https://maven.apache.org/POM/4.0.0" xmlns:xsi="https://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="https://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd"> +<project xmlns="https://maven.apache.org/POM/4.0.0" + xmlns:xsi="https://www.w3.org/2001/XMLSchema-instance" + xsi:schemaLocation="https://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd"> <!-- /** * Licensed to the Apache Software Foundation (ASF) under one @@ -19,23 +21,23 @@ * limitations under the License. */ --> - <modelVersion>4.0.0</modelVersion> - <parent> - <artifactId>hbase-build-configuration</artifactId> - <groupId>org.apache.hbase</groupId> - <version>2.6.0-SNAPSHOT</version> - <relativePath>../hbase-build-configuration</relativePath> - </parent> + <modelVersion>4.0.0</modelVersion> + <parent> + <artifactId>hbase-build-configuration</artifactId> + <groupId>org.apache.hbase</groupId> + <version>2.6.0-SNAPSHOT</version> + <relativePath>../hbase-build-configuration</relativePath> + </parent> - <artifactId>hbase-hadoop-compat</artifactId> - <name>Apache HBase - Hadoop Compatibility</name> - <description> + <artifactId>hbase-hadoop-compat</artifactId> + <name>Apache HBase - Hadoop Compatibility</name> + <description> Interfaces to be implemented in order to smooth over hadoop version differences - </description> + </description> - <build> - <plugins> + <build> + <plugins> <plugin> <!--Make it so assembly:single does nothing in here--> <artifactId>maven-assembly-plugin</artifactId> @@ -43,117 +45,126 @@ <skipAssembly>true</skipAssembly> </configuration> </plugin> - <!-- Make a jar and put the sources in the jar --> - <plugin> - <groupId>org.apache.maven.plugins</groupId> - <artifactId>maven-source-plugin</artifactId> - </plugin> - <plugin> - <groupId>net.revelc.code</groupId> - <artifactId>warbucks-maven-plugin</artifactId> - </plugin> - </plugins> - </build> - - <dependencies> - <dependency> - <groupId>org.apache.hbase</groupId> - <artifactId>hbase-annotations</artifactId> - <type>test-jar</type> - <scope>test</scope> - </dependency> - <dependency> - <groupId>org.apache.hbase</groupId> - <artifactId>hbase-logging</artifactId> - <type>test-jar</type> - <scope>test</scope> - </dependency> - <dependency> - <groupId>org.apache.hbase</groupId> - <artifactId>hbase-common</artifactId> - <type>test-jar</type> - <scope>test</scope> - </dependency> - <dependency> - <groupId>org.apache.hbase.thirdparty</groupId> - <artifactId>hbase-shaded-miscellaneous</artifactId> - </dependency> - <!-- General dependencies --> - <dependency> - <groupId>org.slf4j</groupId> - <artifactId>slf4j-api</artifactId> - </dependency> - <dependency> - <groupId>org.apache.hbase</groupId> - <artifactId>hbase-metrics-api</artifactId> - </dependency> - <dependency> - <groupId>junit</groupId> - <artifactId>junit</artifactId> - <scope>test</scope> - </dependency> - <dependency> - <groupId>org.slf4j</groupId> - <artifactId>jcl-over-slf4j</artifactId> - <scope>test</scope> - </dependency> - <dependency> - <groupId>org.slf4j</groupId> - <artifactId>jul-to-slf4j</artifactId> - <scope>test</scope> - </dependency> - <dependency> - <groupId>org.slf4j</groupId> - <artifactId>slf4j-reload4j</artifactId> - <scope>test</scope> - </dependency> - <dependency> - <groupId>ch.qos.reload4j</groupId> - <artifactId>reload4j</artifactId> - <scope>test</scope> - </dependency> - </dependencies> + <!-- Make a jar and put the sources in the jar --> + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-source-plugin</artifactId> + </plugin> + <plugin> + <groupId>net.revelc.code</groupId> + <artifactId>warbucks-maven-plugin</artifactId> + </plugin> + </plugins> + </build> - <profiles> + <dependencies> + <dependency> + <groupId>org.apache.hbase</groupId> + <artifactId>hbase-annotations</artifactId> + <type>test-jar</type> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.hbase</groupId> + <artifactId>hbase-logging</artifactId> + <type>test-jar</type> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.hbase</groupId> + <artifactId>hbase-common</artifactId> + <type>test-jar</type> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.hbase.thirdparty</groupId> + <artifactId>hbase-shaded-miscellaneous</artifactId> + </dependency> + <!-- General dependencies --> + <dependency> + <groupId>org.slf4j</groupId> + <artifactId>slf4j-api</artifactId> + </dependency> + <dependency> + <groupId>org.apache.hbase</groupId> + <artifactId>hbase-metrics-api</artifactId> + </dependency> + <dependency> + <groupId>junit</groupId> + <artifactId>junit</artifactId> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.slf4j</groupId> + <artifactId>jcl-over-slf4j</artifactId> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.slf4j</groupId> + <artifactId>jul-to-slf4j</artifactId> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-api</artifactId> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-core</artifactId> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-slf4j-impl</artifactId> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-1.2-api</artifactId> + <scope>test</scope> + </dependency> + </dependencies> + <profiles> <!-- Skip the tests in this module --> - <profile> - <id>skipHadoopCompatTests</id> - <activation> - <property> - <name>skipHadoopCompatTests</name> - </property> - </activation> - <properties> - <surefire.skipFirstPart>true</surefire.skipFirstPart> - <surefire.skipSecondPart>true</surefire.skipSecondPart> - </properties> - </profile> - <profile> - <id>eclipse-specific</id> - <activation> - <property> - <name>m2e.version</name> - </property> - </activation> - <build> - <pluginManagement> - <plugins> + <profile> + <id>skipHadoopCompatTests</id> + <activation> + <property> + <name>skipHadoopCompatTests</name> + </property> + </activation> + <properties> + <surefire.skipFirstPart>true</surefire.skipFirstPart> + <surefire.skipSecondPart>true</surefire.skipSecondPart> + </properties> + </profile> + <profile> + <id>eclipse-specific</id> + <activation> + <property> + <name>m2e.version</name> + </property> + </activation> + <build> + <pluginManagement> + <plugins> <!--This plugin's configuration is used to store Eclipse m2e settings only. It has no influence on the Maven build itself.--> - <plugin> - <groupId>org.eclipse.m2e</groupId> - <artifactId>lifecycle-mapping</artifactId> - <configuration> - <lifecycleMappingMetadata> - <pluginExecutions> - </pluginExecutions> - </lifecycleMappingMetadata> - </configuration> - </plugin> - </plugins> - </pluginManagement> - </build> - </profile> - </profiles> + <plugin> + <groupId>org.eclipse.m2e</groupId> + <artifactId>lifecycle-mapping</artifactId> + <configuration> + <lifecycleMappingMetadata> + <pluginExecutions> + </pluginExecutions> + </lifecycleMappingMetadata> + </configuration> + </plugin> + </plugins> + </pluginManagement> + </build> + </profile> + </profiles> </project> diff --git a/hbase-hadoop2-compat/pom.xml b/hbase-hadoop2-compat/pom.xml index 54174481f0fe..3fc7503add20 100644 --- a/hbase-hadoop2-compat/pom.xml +++ b/hbase-hadoop2-compat/pom.xml @@ -180,13 +180,18 @@ limitations under the License. <scope>test</scope> </dependency> <dependency> - <groupId>org.slf4j</groupId> - <artifactId>slf4j-reload4j</artifactId> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-api</artifactId> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-core</artifactId> <scope>test</scope> </dependency> <dependency> - <groupId>ch.qos.reload4j</groupId> - <artifactId>reload4j</artifactId> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-slf4j-impl</artifactId> <scope>test</scope> </dependency> </dependencies> diff --git a/hbase-hbtop/pom.xml b/hbase-hbtop/pom.xml index a1e62180c022..20a40faf8b33 100644 --- a/hbase-hbtop/pom.xml +++ b/hbase-hbtop/pom.xml @@ -92,13 +92,18 @@ <scope>test</scope> </dependency> <dependency> - <groupId>org.slf4j</groupId> - <artifactId>slf4j-reload4j</artifactId> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-api</artifactId> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-core</artifactId> <scope>test</scope> </dependency> <dependency> - <groupId>ch.qos.reload4j</groupId> - <artifactId>reload4j</artifactId> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-slf4j-impl</artifactId> <scope>test</scope> </dependency> </dependencies> diff --git a/hbase-http/pom.xml b/hbase-http/pom.xml index bc91be32b6ae..6088f3804c69 100644 --- a/hbase-http/pom.xml +++ b/hbase-http/pom.xml @@ -246,13 +246,18 @@ <scope>test</scope> </dependency> <dependency> - <groupId>org.slf4j</groupId> - <artifactId>slf4j-reload4j</artifactId> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-api</artifactId> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-core</artifactId> <scope>test</scope> </dependency> <dependency> - <groupId>ch.qos.reload4j</groupId> - <artifactId>reload4j</artifactId> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-slf4j-impl</artifactId> <scope>test</scope> </dependency> </dependencies> diff --git a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/log/LogLevel.java b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/log/LogLevel.java index 819581735a89..611316d9ec67 100644 --- a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/log/LogLevel.java +++ b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/log/LogLevel.java @@ -45,7 +45,6 @@ import org.apache.hadoop.util.ServletUtil; import org.apache.hadoop.util.Tool; import org.apache.yetus.audience.InterfaceAudience; -import org.apache.yetus.audience.InterfaceStability; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -307,8 +306,7 @@ private void process(String urlString) throws Exception { /** * A servlet implementation */ - @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"}) - @InterfaceStability.Unstable + @InterfaceAudience.Private public static class Servlet extends HttpServlet { private static final long serialVersionUID = 1L; diff --git a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/log/TestLogLevel.java b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/log/TestLogLevel.java index b52129ccdbf3..7019b207ec61 100644 --- a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/log/TestLogLevel.java +++ b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/log/TestLogLevel.java @@ -22,6 +22,7 @@ import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; + import java.io.File; import java.io.IOException; import java.net.BindException; @@ -53,9 +54,6 @@ import org.apache.hadoop.security.ssl.SSLFactory; import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.util.StringUtils; -import org.apache.log4j.Level; -import org.apache.log4j.LogManager; -import org.apache.log4j.Logger; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.ClassRule; @@ -65,11 +63,11 @@ /** * Test LogLevel. */ -@Category({MiscTests.class, SmallTests.class}) +@Category({ MiscTests.class, SmallTests.class }) public class TestLogLevel { @ClassRule public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestLogLevel.class); + HBaseClassTestRule.forClass(TestLogLevel.class); private static String keystoresDir; private static String sslConfDir; @@ -79,9 +77,10 @@ public class TestLogLevel { private static final String logName = TestLogLevel.class.getName(); private static final String protectedPrefix = "protected"; private static final String protectedLogName = protectedPrefix + "." + logName; - private static final Logger log = LogManager.getLogger(logName); + private static final org.apache.logging.log4j.Logger log = + org.apache.logging.log4j.LogManager.getLogger(logName); private final static String PRINCIPAL = "loglevel.principal"; - private final static String KEYTAB = "loglevel.keytab"; + private final static String KEYTAB = "loglevel.keytab"; private static MiniKdc kdc; @@ -111,8 +110,7 @@ public static void setUp() throws Exception { } /** - * Sets up {@link MiniKdc} for testing security. - * Copied from HBaseTestingUtility#setupMiniKdc(). + * Sets up {@link MiniKdc} for testing security. Copied from HBaseTestingUtility#setupMiniKdc(). */ static private MiniKdc setupMiniKdc() throws Exception { Properties conf = MiniKdc.createConf(); @@ -130,7 +128,7 @@ static private MiniKdc setupMiniKdc() throws Exception { kdc = new MiniKdc(conf, dir); kdc.start(); } catch (BindException e) { - FileUtils.deleteDirectory(dir); // clean directory + FileUtils.deleteDirectory(dir); // clean directory numTries++; if (numTries == 3) { log.error("Failed setting up MiniKDC. Tried " + numTries + " times."); @@ -156,15 +154,15 @@ static private void setupSSL(File base) throws Exception { } /** - * Get the SSL configuration. - * This method is copied from KeyStoreTestUtil#getSslConfig() in Hadoop. + * Get the SSL configuration. This method is copied from KeyStoreTestUtil#getSslConfig() in + * Hadoop. * @return {@link Configuration} instance with ssl configs loaded. * @param conf to pull client/server SSL settings filename from */ - private static Configuration getSslConfig(Configuration conf){ + private static Configuration getSslConfig(Configuration conf) { Configuration sslConf = new Configuration(false); String sslServerConfFile = conf.get(SSLFactory.SSL_SERVER_CONF_KEY); - String sslClientConfFile = conf.get(SSLFactory.SSL_CLIENT_CONF_KEY); + String sslClientConfFile = conf.get(SSLFactory.SSL_CLIENT_CONF_KEY); sslConf.addResource(sslServerConfFile); sslConf.addResource(sslClientConfFile); sslConf.set(SSLFactory.SSL_SERVER_CONF_KEY, sslServerConfFile); @@ -189,36 +187,29 @@ public static void tearDown() { public void testCommandOptions() throws Exception { final String className = this.getClass().getName(); - assertFalse(validateCommand(new String[] {"-foo" })); + assertFalse(validateCommand(new String[] { "-foo" })); // fail due to insufficient number of arguments assertFalse(validateCommand(new String[] {})); - assertFalse(validateCommand(new String[] {"-getlevel" })); - assertFalse(validateCommand(new String[] {"-setlevel" })); - assertFalse(validateCommand(new String[] {"-getlevel", "foo.bar:8080" })); + assertFalse(validateCommand(new String[] { "-getlevel" })); + assertFalse(validateCommand(new String[] { "-setlevel" })); + assertFalse(validateCommand(new String[] { "-getlevel", "foo.bar:8080" })); // valid command arguments - assertTrue(validateCommand( - new String[] {"-getlevel", "foo.bar:8080", className })); - assertTrue(validateCommand( - new String[] {"-setlevel", "foo.bar:8080", className, "DEBUG" })); - assertTrue(validateCommand( - new String[] {"-getlevel", "foo.bar:8080", className })); - assertTrue(validateCommand( - new String[] {"-setlevel", "foo.bar:8080", className, "DEBUG" })); + assertTrue(validateCommand(new String[] { "-getlevel", "foo.bar:8080", className })); + assertTrue(validateCommand(new String[] { "-setlevel", "foo.bar:8080", className, "DEBUG" })); + assertTrue(validateCommand(new String[] { "-getlevel", "foo.bar:8080", className })); + assertTrue(validateCommand(new String[] { "-setlevel", "foo.bar:8080", className, "DEBUG" })); // fail due to the extra argument - assertFalse(validateCommand( - new String[] {"-getlevel", "foo.bar:8080", className, "blah" })); - assertFalse(validateCommand( - new String[] {"-setlevel", "foo.bar:8080", className, "DEBUG", "blah" })); - assertFalse(validateCommand( - new String[] {"-getlevel", "foo.bar:8080", className, "-setlevel", "foo.bar:8080", - className })); + assertFalse(validateCommand(new String[] { "-getlevel", "foo.bar:8080", className, "blah" })); + assertFalse( + validateCommand(new String[] { "-setlevel", "foo.bar:8080", className, "DEBUG", "blah" })); + assertFalse(validateCommand(new String[] { "-getlevel", "foo.bar:8080", className, "-setlevel", + "foo.bar:8080", className })); } /** * Check to see if a command can be accepted. - * * @param args a String array of arguments * @return true if the command can be accepted, false if not. */ @@ -237,40 +228,32 @@ private boolean validateCommand(String[] args) { } /** - * Creates and starts a Jetty server binding at an ephemeral port to run - * LogLevel servlet. + * Creates and starts a Jetty server binding at an ephemeral port to run LogLevel servlet. * @param protocol "http" or "https" * @param isSpnego true if SPNEGO is enabled * @return a created HttpServer object * @throws Exception if unable to create or start a Jetty server */ - private HttpServer createServer(String protocol, boolean isSpnego) - throws Exception { - HttpServer.Builder builder = new HttpServer.Builder() - .setName("..") - .addEndpoint(new URI(protocol + "://localhost:0")) - .setFindPort(true) - .setConf(serverConf); + private HttpServer createServer(String protocol, boolean isSpnego) throws Exception { + HttpServer.Builder builder = new HttpServer.Builder().setName("..") + .addEndpoint(new URI(protocol + "://localhost:0")).setFindPort(true).setConf(serverConf); if (isSpnego) { // Set up server Kerberos credentials. // Since the server may fall back to simple authentication, // use ACL to make sure the connection is Kerberos/SPNEGO authenticated. - builder.setSecurityEnabled(true) - .setUsernameConfKey(PRINCIPAL) - .setKeytabConfKey(KEYTAB) - .setACL(new AccessControlList("client")); + builder.setSecurityEnabled(true).setUsernameConfKey(PRINCIPAL).setKeytabConfKey(KEYTAB) + .setACL(new AccessControlList("client")); } // if using HTTPS, configure keystore/truststore properties. if (protocol.equals(LogLevel.PROTOCOL_HTTPS)) { - builder = builder. - keyPassword(sslConf.get("ssl.server.keystore.keypassword")) - .keyStore(sslConf.get("ssl.server.keystore.location"), - sslConf.get("ssl.server.keystore.password"), - sslConf.get("ssl.server.keystore.type", "jks")) - .trustStore(sslConf.get("ssl.server.truststore.location"), - sslConf.get("ssl.server.truststore.password"), - sslConf.get("ssl.server.truststore.type", "jks")); + builder = builder.keyPassword(sslConf.get("ssl.server.keystore.keypassword")) + .keyStore(sslConf.get("ssl.server.keystore.location"), + sslConf.get("ssl.server.keystore.password"), + sslConf.get("ssl.server.keystore.type", "jks")) + .trustStore(sslConf.get("ssl.server.truststore.location"), + sslConf.get("ssl.server.truststore.password"), + sslConf.get("ssl.server.truststore.type", "jks")); } HttpServer server = builder.build(); @@ -279,38 +262,38 @@ private HttpServer createServer(String protocol, boolean isSpnego) } private void testDynamicLogLevel(final String bindProtocol, final String connectProtocol, - final boolean isSpnego) - throws Exception { - testDynamicLogLevel(bindProtocol, connectProtocol, isSpnego, logName, Level.DEBUG.toString()); + final boolean isSpnego) throws Exception { + testDynamicLogLevel(bindProtocol, connectProtocol, isSpnego, + logName, + org.apache.logging.log4j.Level.DEBUG.toString()); } private void testDynamicLogLevel(final String bindProtocol, final String connectProtocol, - final boolean isSpnego, final String newLevel) - throws Exception { - testDynamicLogLevel(bindProtocol, connectProtocol, isSpnego, logName, newLevel); + final boolean isSpnego, final String newLevel) throws Exception { + testDynamicLogLevel(bindProtocol, connectProtocol, isSpnego, + logName, + newLevel); } /** * Run both client and server using the given protocol. - * * @param bindProtocol specify either http or https for server * @param connectProtocol specify either http or https for client * @param isSpnego true if SPNEGO is enabled * @throws Exception if client can't accesss server. */ private void testDynamicLogLevel(final String bindProtocol, final String connectProtocol, - final boolean isSpnego, final String loggerName, final String newLevel) - throws Exception { + final boolean isSpnego, final String loggerName, final String newLevel) throws Exception { if (!LogLevel.isValidProtocol(bindProtocol)) { throw new Exception("Invalid server protocol " + bindProtocol); } if (!LogLevel.isValidProtocol(connectProtocol)) { throw new Exception("Invalid client protocol " + connectProtocol); } - Logger log = LogManager.getLogger(loggerName); - Level oldLevel = log.getLevel(); + org.apache.logging.log4j.Logger log = org.apache.logging.log4j.LogManager.getLogger(loggerName); + org.apache.logging.log4j.Level oldLevel = log.getLevel(); assertNotEquals("Get default Log Level which shouldn't be ERROR.", - Level.ERROR, oldLevel); + org.apache.logging.log4j.Level.ERROR, oldLevel); // configs needed for SPNEGO at server side if (isSpnego) { @@ -331,8 +314,8 @@ private void testDynamicLogLevel(final String bindProtocol, final String connect String keytabFilePath = keyTabFile.getAbsolutePath(); - UserGroupInformation clientUGI = UserGroupInformation. - loginUserFromKeytabAndReturnUGI(clientPrincipal, keytabFilePath); + UserGroupInformation clientUGI = + UserGroupInformation.loginUserFromKeytabAndReturnUGI(clientPrincipal, keytabFilePath); try { clientUGI.doAs((PrivilegedExceptionAction<Void>) () -> { // client command line @@ -346,41 +329,36 @@ private void testDynamicLogLevel(final String bindProtocol, final String connect } // restore log level - GenericTestUtils.setLogLevel(log, oldLevel); + org.apache.logging.log4j.core.config.Configurator.setLevel(log.getName(), oldLevel); } /** - * Run LogLevel command line to start a client to get log level of this test - * class. - * + * Run LogLevel command line to start a client to get log level of this test class. * @param protocol specify either http or https * @param authority daemon's web UI address * @throws Exception if unable to connect */ private void getLevel(String protocol, String authority, String logName) throws Exception { - String[] getLevelArgs = {"-getlevel", authority, logName, "-protocol", protocol}; + String[] getLevelArgs = { "-getlevel", authority, logName, "-protocol", protocol }; CLI cli = new CLI(protocol.equalsIgnoreCase("https") ? sslConf : clientConf); cli.run(getLevelArgs); } /** - * Run LogLevel command line to start a client to set log level of this test - * class to debug. - * + * Run LogLevel command line to start a client to set log level of this test class to debug. * @param protocol specify either http or https * @param authority daemon's web UI address * @throws Exception if unable to run or log level does not change as expected */ - private void setLevel(String protocol, String authority, String logName, String newLevel) - throws Exception { - String[] setLevelArgs = {"-setlevel", authority, logName, newLevel, "-protocol", protocol}; + private void setLevel(String protocol, String authority, String logName, String newLevel) throws Exception { + String[] setLevelArgs = { "-setlevel", authority, logName, newLevel, "-protocol", protocol }; CLI cli = new CLI(protocol.equalsIgnoreCase("https") ? sslConf : clientConf); cli.run(setLevelArgs); - Logger log = LogManager.getLogger(logName); + org.apache.logging.log4j.Logger logger = org.apache.logging.log4j.LogManager.getLogger(logName); assertEquals("new level not equal to expected: ", newLevel.toUpperCase(), - log.getEffectiveLevel().toString()); + logger.getLevel().toString()); } @Test @@ -397,7 +375,6 @@ public void testSettingProtectedLogLevel() throws Exception { /** * Test setting log level to "Info". - * * @throws Exception if client can't set log level to INFO. */ @Test @@ -407,7 +384,6 @@ public void testInfoLogLevel() throws Exception { /** * Test setting log level to "Error". - * * @throws Exception if client can't set log level to ERROR. */ @Test @@ -417,18 +393,15 @@ public void testErrorLogLevel() throws Exception { /** * Server runs HTTP, no SPNEGO. - * - * @throws Exception if http client can't access http server, - * or http client can access https server. + * @throws Exception if http client can't access http server, or http client can access https + * server. */ @Test public void testLogLevelByHttp() throws Exception { testDynamicLogLevel(LogLevel.PROTOCOL_HTTP, LogLevel.PROTOCOL_HTTP, false); try { - testDynamicLogLevel(LogLevel.PROTOCOL_HTTP, LogLevel.PROTOCOL_HTTPS, - false); - fail("An HTTPS Client should not have succeeded in connecting to a " + - "HTTP server"); + testDynamicLogLevel(LogLevel.PROTOCOL_HTTP, LogLevel.PROTOCOL_HTTPS, false); + fail("An HTTPS Client should not have succeeded in connecting to a " + "HTTP server"); } catch (SSLException e) { exceptionShouldContains("Unrecognized SSL message", e); } @@ -436,18 +409,15 @@ public void testLogLevelByHttp() throws Exception { /** * Server runs HTTP + SPNEGO. - * - * @throws Exception if http client can't access http server, - * or http client can access https server. + * @throws Exception if http client can't access http server, or http client can access https + * server. */ @Test public void testLogLevelByHttpWithSpnego() throws Exception { testDynamicLogLevel(LogLevel.PROTOCOL_HTTP, LogLevel.PROTOCOL_HTTP, true); try { - testDynamicLogLevel(LogLevel.PROTOCOL_HTTP, LogLevel.PROTOCOL_HTTPS, - true); - fail("An HTTPS Client should not have succeeded in connecting to a " + - "HTTP server"); + testDynamicLogLevel(LogLevel.PROTOCOL_HTTP, LogLevel.PROTOCOL_HTTPS, true); + fail("An HTTPS Client should not have succeeded in connecting to a " + "HTTP server"); } catch (SSLException e) { exceptionShouldContains("Unrecognized SSL message", e); } @@ -455,19 +425,15 @@ public void testLogLevelByHttpWithSpnego() throws Exception { /** * Server runs HTTPS, no SPNEGO. - * - * @throws Exception if https client can't access https server, - * or https client can access http server. + * @throws Exception if https client can't access https server, or https client can access http + * server. */ @Test public void testLogLevelByHttps() throws Exception { - testDynamicLogLevel(LogLevel.PROTOCOL_HTTPS, LogLevel.PROTOCOL_HTTPS, - false); + testDynamicLogLevel(LogLevel.PROTOCOL_HTTPS, LogLevel.PROTOCOL_HTTPS, false); try { - testDynamicLogLevel(LogLevel.PROTOCOL_HTTPS, LogLevel.PROTOCOL_HTTP, - false); - fail("An HTTP Client should not have succeeded in connecting to a " + - "HTTPS server"); + testDynamicLogLevel(LogLevel.PROTOCOL_HTTPS, LogLevel.PROTOCOL_HTTP, false); + fail("An HTTP Client should not have succeeded in connecting to a " + "HTTPS server"); } catch (SocketException e) { exceptionShouldContains("Unexpected end of file from server", e); } @@ -475,32 +441,27 @@ public void testLogLevelByHttps() throws Exception { /** * Server runs HTTPS + SPNEGO. - * - * @throws Exception if https client can't access https server, - * or https client can access http server. + * @throws Exception if https client can't access https server, or https client can access http + * server. */ @Test public void testLogLevelByHttpsWithSpnego() throws Exception { - testDynamicLogLevel(LogLevel.PROTOCOL_HTTPS, LogLevel.PROTOCOL_HTTPS, - true); + testDynamicLogLevel(LogLevel.PROTOCOL_HTTPS, LogLevel.PROTOCOL_HTTPS, true); try { - testDynamicLogLevel(LogLevel.PROTOCOL_HTTPS, LogLevel.PROTOCOL_HTTP, - true); - fail("An HTTP Client should not have succeeded in connecting to a " + - "HTTPS server"); - } catch (SocketException e) { + testDynamicLogLevel(LogLevel.PROTOCOL_HTTPS, LogLevel.PROTOCOL_HTTP, true); + fail("An HTTP Client should not have succeeded in connecting to a " + "HTTPS server"); + } catch (SocketException e) { exceptionShouldContains("Unexpected end of file from server", e); } } /** - * Assert that a throwable or one of its causes should contain the substr in its message. - * - * Ideally we should use {@link GenericTestUtils#assertExceptionContains(String, Throwable)} util - * method which asserts t.toString() contains the substr. As the original throwable may have been - * wrapped in Hadoop3 because of HADOOP-12897, it's required to check all the wrapped causes. - * After stop supporting Hadoop2, this method can be removed and assertion in tests can use - * t.getCause() directly, similar to HADOOP-15280. + * Assert that a throwable or one of its causes should contain the substr in its message. Ideally + * we should use {@link GenericTestUtils#assertExceptionContains(String, Throwable)} util method + * which asserts t.toString() contains the substr. As the original throwable may have been wrapped + * in Hadoop3 because of HADOOP-12897, it's required to check all the wrapped causes. After stop + * supporting Hadoop2, this method can be removed and assertion in tests can use t.getCause() + * directly, similar to HADOOP-15280. */ private static void exceptionShouldContains(String substr, Throwable throwable) { Throwable t = throwable; @@ -512,6 +473,6 @@ private static void exceptionShouldContains(String substr, Throwable throwable) t = t.getCause(); } throw new AssertionError("Expected to find '" + substr + "' but got unexpected exception:" + - StringUtils.stringifyException(throwable), throwable); + StringUtils.stringifyException(throwable), throwable); } -} +} \ No newline at end of file diff --git a/hbase-it/pom.xml b/hbase-it/pom.xml index eddc3a40be6c..d9b580c85511 100644 --- a/hbase-it/pom.xml +++ b/hbase-it/pom.xml @@ -268,13 +268,23 @@ <scope>test</scope> </dependency> <dependency> - <groupId>org.slf4j</groupId> - <artifactId>slf4j-reload4j</artifactId> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-api</artifactId> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-core</artifactId> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-slf4j-impl</artifactId> <scope>test</scope> </dependency> <dependency> - <groupId>ch.qos.reload4j</groupId> - <artifactId>reload4j</artifactId> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-1.2-api</artifactId> <scope>test</scope> </dependency> <dependency> diff --git a/hbase-logging/pom.xml b/hbase-logging/pom.xml index 91703bff9a58..fc183ef2cf23 100644 --- a/hbase-logging/pom.xml +++ b/hbase-logging/pom.xml @@ -38,7 +38,7 @@ <testResource> <directory>src/test/resources</directory> <includes> - <include>log4j.properties</include> + <include>log4j2.xml</include> </includes> </testResource> </testResources> @@ -80,7 +80,7 @@ </dependency> <dependency> <groupId>org.slf4j</groupId> - <artifactId>slf4j-reload4j</artifactId> + <artifactId>jcl-over-slf4j</artifactId> <scope>test</scope> </dependency> <dependency> @@ -89,9 +89,24 @@ <scope>provided</scope> </dependency> <dependency> - <groupId>ch.qos.reload4j</groupId> - <artifactId>reload4j</artifactId> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-api</artifactId> <scope>provided</scope> </dependency> + <dependency> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-core</artifactId> + <scope>provided</scope> + </dependency> + <dependency> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-slf4j-impl</artifactId> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-1.2-api</artifactId> + <scope>test</scope> + </dependency> </dependencies> </project> diff --git a/hbase-logging/src/main/java/org/apache/hadoop/hbase/AsyncConsoleAppender.java b/hbase-logging/src/main/java/org/apache/hadoop/hbase/AsyncConsoleAppender.java deleted file mode 100644 index 939b453c8d4b..000000000000 --- a/hbase-logging/src/main/java/org/apache/hadoop/hbase/AsyncConsoleAppender.java +++ /dev/null @@ -1,46 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hbase; - -import org.apache.yetus.audience.InterfaceAudience; - -/** - * Logger class that buffers before trying to log to the specified console. - */ -@InterfaceAudience.Private -public class AsyncConsoleAppender extends org.apache.log4j.AsyncAppender { - private final org.apache.log4j.ConsoleAppender consoleAppender; - - public AsyncConsoleAppender() { - super(); - consoleAppender = new org.apache.log4j.ConsoleAppender( - new org.apache.log4j.PatternLayout("%d{ISO8601} %-5p [%t] %c{2}: %m%n")); - this.addAppender(consoleAppender); - } - - public void setTarget(String value) { - consoleAppender.setTarget(value); - } - - @Override - public void activateOptions() { - consoleAppender.activateOptions(); - super.activateOptions(); - } - -} diff --git a/hbase-logging/src/main/java/org/apache/hadoop/hbase/logging/InternalLog4jUtils.java b/hbase-logging/src/main/java/org/apache/hadoop/hbase/logging/InternalLog4jUtils.java index 28d29bf30131..b0711d7e8f1a 100644 --- a/hbase-logging/src/main/java/org/apache/hadoop/hbase/logging/InternalLog4jUtils.java +++ b/hbase-logging/src/main/java/org/apache/hadoop/hbase/logging/InternalLog4jUtils.java @@ -19,16 +19,15 @@ import java.io.File; import java.io.IOException; -import java.util.Enumeration; import java.util.HashSet; import java.util.Set; import org.apache.yetus.audience.InterfaceAudience; /** - * The actual class for operating on log4j. + * The actual class for operating on log4j2. * <p/> * This class will depend on log4j directly, so callers should not use this class directly to avoid - * introducing log4j dependencies to downstream users. Please call the methods in + * introducing log4j2 dependencies to downstream users. Please call the methods in * {@link Log4jUtils}, as they will call the methods here through reflection. */ @InterfaceAudience.Private @@ -38,32 +37,53 @@ private InternalLog4jUtils() { } static void setLogLevel(String loggerName, String levelName) { - org.apache.log4j.Logger logger = org.apache.log4j.LogManager.getLogger(loggerName); - org.apache.log4j.Level level = org.apache.log4j.Level.toLevel(levelName.toUpperCase()); + org.apache.logging.log4j.Level level = + org.apache.logging.log4j.Level.toLevel(levelName.toUpperCase()); if (!level.toString().equalsIgnoreCase(levelName)) { throw new IllegalArgumentException("Unsupported log level " + levelName); } - logger.setLevel(level); + org.apache.logging.log4j.core.config.Configurator.setLevel(loggerName, level); } static String getEffectiveLevel(String loggerName) { - org.apache.log4j.Logger logger = org.apache.log4j.LogManager.getLogger(loggerName); - return logger.getEffectiveLevel().toString(); + org.apache.logging.log4j.Logger logger = + org.apache.logging.log4j.LogManager.getLogger(loggerName); + return logger.getLevel().name(); } static Set<File> getActiveLogFiles() throws IOException { Set<File> ret = new HashSet<>(); - org.apache.log4j.Appender a; - @SuppressWarnings("unchecked") - Enumeration<org.apache.log4j.Appender> e = - org.apache.log4j.Logger.getRootLogger().getAllAppenders(); - while (e.hasMoreElements()) { - a = e.nextElement(); - if (a instanceof org.apache.log4j.FileAppender) { - org.apache.log4j.FileAppender fa = (org.apache.log4j.FileAppender) a; - String filename = fa.getFile(); - ret.add(new File(filename)); - } + org.apache.logging.log4j.Logger logger = org.apache.logging.log4j.LogManager.getRootLogger(); + if (!(logger instanceof org.apache.logging.log4j.core.Logger)) { + return ret; + } + org.apache.logging.log4j.core.Logger coreLogger = (org.apache.logging.log4j.core.Logger) logger; + for (org.apache.logging.log4j.core.Appender appender : coreLogger.getAppenders().values()) { + if (appender instanceof org.apache.logging.log4j.core.appender.FileAppender) { + String fileName = + ((org.apache.logging.log4j.core.appender.FileAppender) appender).getFileName(); + ret.add(new File(fileName)); + } else if (appender instanceof org.apache.logging.log4j.core.appender.AbstractFileAppender) { + String fileName = + ((org.apache.logging.log4j.core.appender.AbstractFileAppender<?>) appender).getFileName(); + ret.add(new File(fileName)); + } else if (appender instanceof org.apache.logging.log4j.core.appender.RollingFileAppender) { + String fileName = + ((org.apache.logging.log4j.core.appender.RollingFileAppender) appender).getFileName(); + ret.add(new File(fileName)); + } else + if (appender instanceof org.apache.logging.log4j.core.appender.RandomAccessFileAppender) { + String fileName = + ((org.apache.logging.log4j.core.appender.RandomAccessFileAppender) appender) + .getFileName(); + ret.add(new File(fileName)); + } else + if (appender instanceof org.apache.logging.log4j.core.appender.MemoryMappedFileAppender) { + String fileName = + ((org.apache.logging.log4j.core.appender.MemoryMappedFileAppender) appender) + .getFileName(); + ret.add(new File(fileName)); + } } return ret; } diff --git a/hbase-logging/src/test/java/org/apache/log4j/FileAppender.java b/hbase-logging/src/test/java/org/apache/log4j/FileAppender.java new file mode 100644 index 000000000000..7b3876ce0833 --- /dev/null +++ b/hbase-logging/src/test/java/org/apache/log4j/FileAppender.java @@ -0,0 +1,288 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.log4j; + +import java.io.BufferedWriter; +import java.io.File; +import java.io.FileNotFoundException; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.InterruptedIOException; +import java.io.Writer; + +/** + * Just a copy of the old log4j12 FileAppender. The ContainerLogAppender for YARN NodeManager needs + * this class but the log4j-1.2-api bridge does not provide it which causes the UTs in + * hbase-mapreduce module to fail if we start a separated MR cluster. + */ +public class FileAppender extends WriterAppender { + + /** + * Controls file truncatation. The default value for this variable is <code>true</code>, meaning + * that by default a <code>FileAppender</code> will append to an existing file and not truncate + * it. + * <p> + * This option is meaningful only if the FileAppender opens the file. + */ + protected boolean fileAppend = true; + + /** + * The name of the log file. + */ + protected String fileName = null; + + /** + * Do we do bufferedIO? + */ + protected boolean bufferedIO = false; + + /** + * Determines the size of IO buffer be. Default is 8K. + */ + protected int bufferSize = 8 * 1024; + + /** + * The default constructor does not do anything. + */ + public FileAppender() { + } + + /** + * Instantiate a <code>FileAppender</code> and open the file designated by <code>fileName</code>. + * The opened filename will become the output destination for this appender. + * <p> + * If the <code>append</code> parameter is true, the file will be appended to. Otherwise, the file + * designated by <code>fileName</code> will be truncated before being opened. + * <p> + * If the <code>bufferedIO</code> parameter is <code>true</code>, then buffered IO will be used to + * write to the output file. + */ + public FileAppender(Layout layout, String fileName, boolean append, boolean bufferedIO, + int bufferSize) throws IOException { + this.layout = layout; + this.setFile(fileName, append, bufferedIO, bufferSize); + } + + /** + * Instantiate a FileAppender and open the file designated by <code>fileName</code>. The opened + * filename will become the output destination for this appender. + * <p> + * If the <code>append</code> parameter is true, the file will be appended to. Otherwise, the file + * designated by <code>fileName</code> will be truncated before being opened. + */ + public FileAppender(Layout layout, String fileName, boolean append) throws IOException { + this.layout = layout; + this.setFile(fileName, append, false, bufferSize); + } + + /** + * Instantiate a FileAppender and open the file designated by <code>filename</code>. The opened + * filename will become the output destination for this appender. + * <p> + * The file will be appended to. + */ + public FileAppender(Layout layout, String fileName) throws IOException { + this(layout, fileName, true); + } + + /** + * The <b>File</b> property takes a string value which should be the name of the file to append + * to. + * <p> + * <font color="#DD0044"><b>Note that the special values "System.out" or "System.err" are no + * longer honored.</b></font> + * <p> + * Note: Actual opening of the file is made when {@link #activateOptions} is called, not when the + * options are set. + */ + public void setFile(String file) { + // Trim spaces from both ends. The users probably does not want + // trailing spaces in file names. + String val = file.trim(); + fileName = val; + } + + /** + * Returns the value of the <b>Append</b> option. + */ + public boolean getAppend() { + return fileAppend; + } + + /** Returns the value of the <b>File</b> option. */ + public String getFile() { + return fileName; + } + + /** + * If the value of <b>File</b> is not <code>null</code>, then {@link #setFile} is called with the + * values of <b>File</b> and <b>Append</b> properties. + * @since 0.8.1 + */ + @Override + public void activateOptions() { + if (fileName != null) { + try { + setFile(fileName, fileAppend, bufferedIO, bufferSize); + } catch (java.io.IOException e) { + errorHandler.error("setFile(" + fileName + "," + fileAppend + ") call failed.", e, + org.apache.log4j.spi.ErrorCode.FILE_OPEN_FAILURE); + } + } + } + + /** + * Closes the previously opened file. + */ + protected void closeFile() { + if (this.qw != null) { + try { + this.qw.close(); + } catch (java.io.IOException e) { + if (e instanceof InterruptedIOException) { + Thread.currentThread().interrupt(); + } + // Exceptionally, it does not make sense to delegate to an + // ErrorHandler. Since a closed appender is basically dead. + } + } + } + + /** + * Get the value of the <b>BufferedIO</b> option. + * <p> + * BufferedIO will significatnly increase performance on heavily loaded systems. + */ + public boolean getBufferedIO() { + return this.bufferedIO; + } + + /** + * Get the size of the IO buffer. + */ + public int getBufferSize() { + return this.bufferSize; + } + + /** + * The <b>Append</b> option takes a boolean value. It is set to <code>true</code> by default. If + * true, then <code>File</code> will be opened in append mode by {@link #setFile setFile} (see + * above). Otherwise, {@link #setFile setFile} will open <code>File</code> in truncate mode. + * <p> + * Note: Actual opening of the file is made when {@link #activateOptions} is called, not when the + * options are set. + */ + public void setAppend(boolean flag) { + fileAppend = flag; + } + + /** + * The <b>BufferedIO</b> option takes a boolean value. It is set to <code>false</code> by default. + * If true, then <code>File</code> will be opened and the resulting {@link java.io.Writer} wrapped + * around a {@link BufferedWriter}. BufferedIO will significatnly increase performance on heavily + * loaded systems. + */ + public void setBufferedIO(boolean bufferedIO) { + this.bufferedIO = bufferedIO; + if (bufferedIO) { + immediateFlush = false; + } + } + + /** + * Set the size of the IO buffer. + */ + public void setBufferSize(int bufferSize) { + this.bufferSize = bufferSize; + } + + /** + * <p> + * Sets and <i>opens</i> the file where the log output will go. The specified file must be + * writable. + * <p> + * If there was already an opened file, then the previous file is closed first. + * <p> + * <b>Do not use this method directly. To configure a FileAppender or one of its subclasses, set + * its properties one by one and then call activateOptions.</b> + * @param fileName The path to the log file. + * @param append If true will append to fileName. Otherwise will truncate fileName. + */ + public synchronized void setFile(String fileName, boolean append, boolean bufferedIO, + int bufferSize) throws IOException { + + // It does not make sense to have immediate flush and bufferedIO. + if (bufferedIO) { + setImmediateFlush(false); + } + + reset(); + FileOutputStream ostream = null; + try { + // + // attempt to create file + // + ostream = new FileOutputStream(fileName, append); + } catch (FileNotFoundException ex) { + // + // if parent directory does not exist then + // attempt to create it and try to create file + // see bug 9150 + // + String parentName = new File(fileName).getParent(); + if (parentName != null) { + File parentDir = new File(parentName); + if (!parentDir.exists() && parentDir.mkdirs()) { + ostream = new FileOutputStream(fileName, append); + } else { + throw ex; + } + } else { + throw ex; + } + } + Writer fw = createWriter(ostream); + if (bufferedIO) { + fw = new BufferedWriter(fw, bufferSize); + } + this.setQWForFiles(fw); + this.fileName = fileName; + this.fileAppend = append; + this.bufferedIO = bufferedIO; + this.bufferSize = bufferSize; + writeHeader(); + } + + /** + * Sets the quiet writer being used. This method is overriden by {@code RollingFileAppender}. + */ + protected void setQWForFiles(Writer writer) { + this.qw = new org.apache.log4j.helpers.QuietWriter(writer, errorHandler); + } + + /** + * Close any previously opened file and call the parent's <code>reset</code>. + */ + @Override + protected void reset() { + closeFile(); + this.fileName = null; + super.reset(); + } +} diff --git a/hbase-logging/src/test/resources/log4j.properties b/hbase-logging/src/test/resources/log4j.properties deleted file mode 100644 index c322699ced24..000000000000 --- a/hbase-logging/src/test/resources/log4j.properties +++ /dev/null @@ -1,68 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Define some default values that can be overridden by system properties -hbase.root.logger=INFO,console -hbase.log.dir=. -hbase.log.file=hbase.log - -# Define the root logger to the system property "hbase.root.logger". -log4j.rootLogger=${hbase.root.logger} - -# Logging Threshold -log4j.threshold=ALL - -# -# Daily Rolling File Appender -# -log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender -log4j.appender.DRFA.File=${hbase.log.dir}/${hbase.log.file} - -# Rollver at midnight -log4j.appender.DRFA.DatePattern=.yyyy-MM-dd - -# 30-day backup -#log4j.appender.DRFA.MaxBackupIndex=30 -log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout -# Debugging Pattern format -log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %C{2}(%L): %m%n - - -# -# console -# Add "console" to rootlogger above if you want to use this -# -log4j.appender.console=org.apache.log4j.ConsoleAppender -log4j.appender.console.target=System.err -log4j.appender.console.layout=org.apache.log4j.PatternLayout -log4j.appender.console.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %C{2}(%L): %m%n - -# Custom Logging levels - -#log4j.logger.org.apache.hadoop.fs.FSNamesystem=DEBUG - -log4j.logger.org.apache.hadoop=WARN -log4j.logger.org.apache.zookeeper=ERROR -log4j.logger.org.apache.hadoop.hbase=DEBUG - -#These settings are workarounds against spurious logs from the minicluster. -#See HBASE-4709 -log4j.logger.org.apache.hadoop.metrics2.impl.MetricsConfig=WARN -log4j.logger.org.apache.hadoop.metrics2.impl.MetricsSinkAdapter=WARN -log4j.logger.org.apache.hadoop.metrics2.impl.MetricsSystemImpl=WARN -log4j.logger.org.apache.hadoop.metrics2.util.MBeans=WARN -# Enable this to get detailed connection error/retry logging. -# log4j.logger.org.apache.hadoop.hbase.client.ConnectionImplementation=TRACE diff --git a/hbase-logging/src/test/resources/log4j2.properties b/hbase-logging/src/test/resources/log4j2.properties new file mode 100644 index 000000000000..f63c8701e35f --- /dev/null +++ b/hbase-logging/src/test/resources/log4j2.properties @@ -0,0 +1,68 @@ +#/** +# * Licensed to the Apache Software Foundation (ASF) under one +# * or more contributor license agreements. See the NOTICE file +# * distributed with this work for additional information +# * regarding copyright ownership. The ASF licenses this file +# * to you under the Apache License, Version 2.0 (the +# * "License"); you may not use this file except in compliance +# * with the License. You may obtain a copy of the License at +# * +# * http://www.apache.org/licenses/LICENSE-2.0 +# * +# * Unless required by applicable law or agreed to in writing, software +# * distributed under the License is distributed on an "AS IS" BASIS, +# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# * See the License for the specific language governing permissions and +# * limitations under the License. +# */ + +status = debug +dest = err +name = PropertiesConfig + +appender.console.type = Console +appender.console.target = SYSTEM_ERR +appender.console.name = Console +appender.console.layout.type = PatternLayout +appender.console.layout.pattern = %d{ISO8601} %-5p [%t] %C{2}(%L): %m%n + +rootLogger = INFO,Console + +logger.hadoop.name = org.apache.hadoop +logger.hadoop.level = WARN + +logger.zookeeper.name = org.apache.zookeeper +logger.zookeeper.level = ERROR + +logger.hbase.name = org.apache.hadoop.hbase +logger.hbase.level = DEBUG + +# These settings are workarounds against spurious logs from the minicluster. See HBASE-4709 +logger.MetricsConfig.name = org.apache.hadoop.metrics2.impl.MetricsConfig +logger.MetricsConfig.level = WARN + +logger.MetricsSinkAdapter.name = org.apache.hadoop.metrics2.impl.MetricsSinkAdapter +logger.MetricsSinkAdapter.level = WARN + +logger.MetricsSystemImpl.name = org.apache.hadoop.metrics2.impl.MetricsSystemImpl +logger.MetricsSystemImpl.level = WARN + +logger.MBeans.name = org.apache.hadoop.metrics2.util.MBeans +logger.MBeans.level = WARN + +logger.directory.name = org.apache.directory +logger.directory.level = WARN +logger.directory.additivity = false + +logger.netty.name = org.apache.hbase.thirdparty.io.netty.channel +logger.netty.level = DEBUG + +# For testing where we want to capture the log message of these special loggers +logger.FailedServers.name = org.apache.hadoop.hbase.ipc.FailedServers +logger.FailedServers.level = DEBUG + +logger.RSRpcServices.name = org.apache.hadoop.hbase.regionserver.RSRpcServices +logger.RSRpcServices.level = DEBUG + +logger.TestJul2Slf4j.name = org.apache.hadoop.hbase.logging.TestJul2Slf4j +logger.TestJul2Slf4j.level = DEBUG diff --git a/hbase-mapreduce/pom.xml b/hbase-mapreduce/pom.xml index 5ae021cd2f4a..b2183b36c52f 100644 --- a/hbase-mapreduce/pom.xml +++ b/hbase-mapreduce/pom.xml @@ -306,13 +306,23 @@ <scope>test</scope> </dependency> <dependency> - <groupId>org.slf4j</groupId> - <artifactId>slf4j-reload4j</artifactId> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-api</artifactId> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-core</artifactId> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-slf4j-impl</artifactId> <scope>test</scope> </dependency> <dependency> - <groupId>ch.qos.reload4j</groupId> - <artifactId>reload4j</artifactId> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-1.2-api</artifactId> <scope>test</scope> </dependency> </dependencies> diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java index 63c1760626f0..7614b8376d07 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java @@ -25,24 +25,16 @@ import java.util.List; import java.util.Properties; import java.util.concurrent.atomic.AtomicReference; - import javax.crypto.spec.SecretKeySpec; - import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; -import org.apache.log4j.Level; -import org.apache.log4j.LogManager; -import org.apache.yetus.audience.InterfaceAudience; -import org.apache.zookeeper.ZooKeeper; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.Durability; @@ -52,6 +44,7 @@ import org.apache.hadoop.hbase.io.crypto.Encryption; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.log.HBaseMarkers; +import org.apache.hadoop.hbase.logging.Log4jUtils; import org.apache.hadoop.hbase.regionserver.BloomType; import org.apache.hadoop.hbase.security.EncryptionUtil; import org.apache.hadoop.hbase.security.HBaseKerberosUtils; @@ -61,6 +54,10 @@ import org.apache.hadoop.hbase.util.test.LoadTestDataGenerator; import org.apache.hadoop.hbase.util.test.LoadTestDataGeneratorWithACL; import org.apache.hadoop.util.ToolRunner; +import org.apache.yetus.audience.InterfaceAudience; +import org.apache.zookeeper.ZooKeeper; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hbase.thirdparty.org.apache.commons.cli.AlreadySelectedException; import org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine; @@ -583,7 +580,7 @@ public void initTestTable() throws IOException { @Override protected int doWork() throws IOException { if (!isVerbose) { - LogManager.getLogger(ZooKeeper.class.getName()).setLevel(Level.WARN); + Log4jUtils.setLogLevel(ZooKeeper.class.getName(), "WARN"); } if (numTables > 1) { return parallelLoadTables(); diff --git a/hbase-metrics-api/pom.xml b/hbase-metrics-api/pom.xml index 9bd2f13b0d73..2754f5fdf9d0 100644 --- a/hbase-metrics-api/pom.xml +++ b/hbase-metrics-api/pom.xml @@ -133,13 +133,18 @@ <scope>test</scope> </dependency> <dependency> - <groupId>org.slf4j</groupId> - <artifactId>slf4j-reload4j</artifactId> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-api</artifactId> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-core</artifactId> <scope>test</scope> </dependency> <dependency> - <groupId>ch.qos.reload4j</groupId> - <artifactId>reload4j</artifactId> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-slf4j-impl</artifactId> <scope>test</scope> </dependency> </dependencies> diff --git a/hbase-metrics/pom.xml b/hbase-metrics/pom.xml index 280c0c80fd5d..8c04df45bcf6 100644 --- a/hbase-metrics/pom.xml +++ b/hbase-metrics/pom.xml @@ -141,13 +141,18 @@ <scope>test</scope> </dependency> <dependency> - <groupId>org.slf4j</groupId> - <artifactId>slf4j-reload4j</artifactId> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-api</artifactId> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-core</artifactId> <scope>test</scope> </dependency> <dependency> - <groupId>ch.qos.reload4j</groupId> - <artifactId>reload4j</artifactId> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-slf4j-impl</artifactId> <scope>test</scope> </dependency> </dependencies> diff --git a/hbase-procedure/pom.xml b/hbase-procedure/pom.xml index d9e784c8f960..36f6cdb90f79 100644 --- a/hbase-procedure/pom.xml +++ b/hbase-procedure/pom.xml @@ -130,13 +130,18 @@ <scope>test</scope> </dependency> <dependency> - <groupId>org.slf4j</groupId> - <artifactId>slf4j-reload4j</artifactId> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-api</artifactId> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-core</artifactId> <scope>test</scope> </dependency> <dependency> - <groupId>ch.qos.reload4j</groupId> - <artifactId>reload4j</artifactId> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-slf4j-impl</artifactId> <scope>test</scope> </dependency> </dependencies> diff --git a/hbase-replication/pom.xml b/hbase-replication/pom.xml index 946bffdc4464..973ee627b315 100644 --- a/hbase-replication/pom.xml +++ b/hbase-replication/pom.xml @@ -136,13 +136,18 @@ <scope>test</scope> </dependency> <dependency> - <groupId>org.slf4j</groupId> - <artifactId>slf4j-reload4j</artifactId> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-api</artifactId> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-core</artifactId> <scope>test</scope> </dependency> <dependency> - <groupId>ch.qos.reload4j</groupId> - <artifactId>reload4j</artifactId> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-slf4j-impl</artifactId> <scope>test</scope> </dependency> </dependencies> diff --git a/hbase-rest/pom.xml b/hbase-rest/pom.xml index c88cee8a040b..271d0398965b 100644 --- a/hbase-rest/pom.xml +++ b/hbase-rest/pom.xml @@ -372,13 +372,23 @@ <scope>test</scope> </dependency> <dependency> - <groupId>org.slf4j</groupId> - <artifactId>slf4j-reload4j</artifactId> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-api</artifactId> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-core</artifactId> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-slf4j-impl</artifactId> <scope>test</scope> </dependency> <dependency> - <groupId>ch.qos.reload4j</groupId> - <artifactId>reload4j</artifactId> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-1.2-api</artifactId> <scope>test</scope> </dependency> </dependencies> diff --git a/hbase-rsgroup/pom.xml b/hbase-rsgroup/pom.xml index d5a5a0587c64..a3da37907158 100644 --- a/hbase-rsgroup/pom.xml +++ b/hbase-rsgroup/pom.xml @@ -181,13 +181,23 @@ <scope>test</scope> </dependency> <dependency> - <groupId>org.slf4j</groupId> - <artifactId>slf4j-reload4j</artifactId> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-api</artifactId> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-core</artifactId> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-slf4j-impl</artifactId> <scope>test</scope> </dependency> <dependency> - <groupId>ch.qos.reload4j</groupId> - <artifactId>reload4j</artifactId> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-1.2-api</artifactId> <scope>test</scope> </dependency> <dependency> diff --git a/hbase-server/pom.xml b/hbase-server/pom.xml index 53e5c32a6dc4..6148f8448244 100644 --- a/hbase-server/pom.xml +++ b/hbase-server/pom.xml @@ -533,13 +533,23 @@ <scope>test</scope> </dependency> <dependency> - <groupId>org.slf4j</groupId> - <artifactId>slf4j-reload4j</artifactId> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-api</artifactId> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-core</artifactId> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-slf4j-impl</artifactId> <scope>test</scope> </dependency> <dependency> - <groupId>ch.qos.reload4j</groupId> - <artifactId>reload4j</artifactId> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-1.2-api</artifactId> <scope>test</scope> </dependency> </dependencies> diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java index 72e12043c0e2..20ed888e056e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java @@ -695,7 +695,6 @@ public MiniDFSCluster startMiniDFSClusterForTestWAL(int namenodePort) throws IOE * This is used before starting HDFS and map-reduce mini-clusters Run something like the below to * check for the likes of '/tmp' references -- i.e. references outside of the test data dir -- in * the conf. - * * <pre> * Configuration conf = TEST_UTIL.getConfiguration(); * for (Iterator<Map.Entry<String, String>> i = conf.iterator(); i.hasNext();) { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableBatchRetryImmediately.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableBatchRetryImmediately.java index 57cfbeca6e29..028b8fd8c30f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableBatchRetryImmediately.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableBatchRetryImmediately.java @@ -29,11 +29,10 @@ import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.logging.Log4jUtils; import org.apache.hadoop.hbase.testclassification.ClientTests; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.util.Bytes; -import org.apache.log4j.Level; -import org.apache.log4j.LogManager; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.ClassRule; @@ -61,10 +60,13 @@ public class TestAsyncTableBatchRetryImmediately { private static AsyncConnection CONN; + private static String LOG_LEVEL; + @BeforeClass public static void setUp() throws Exception { // disable the debug log to avoid flooding the output - LogManager.getLogger(AsyncRegionLocatorHelper.class).setLevel(Level.INFO); + LOG_LEVEL = Log4jUtils.getEffectiveLevel(AsyncRegionLocatorHelper.class.getName()); + Log4jUtils.setLogLevel(AsyncRegionLocatorHelper.class.getName(), "INFO"); UTIL.getConfiguration().setLong(HConstants.HBASE_SERVER_SCANNER_MAX_RESULT_SIZE_KEY, 1024); UTIL.startMiniCluster(1); Table table = UTIL.createTable(TABLE_NAME, FAMILY); @@ -79,6 +81,9 @@ public static void setUp() throws Exception { @AfterClass public static void tearDown() throws Exception { + if (LOG_LEVEL != null) { + Log4jUtils.setLogLevel(AsyncRegionLocatorHelper.class.getName(), LOG_LEVEL); + } CONN.close(); UTIL.shutdownMiniCluster(); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiRespectsLimits.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiRespectsLimits.java index d8baa8d1cb16..34707f0a0eee 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiRespectsLimits.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiRespectsLimits.java @@ -35,6 +35,7 @@ import org.apache.hadoop.hbase.Waiter; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.ipc.RpcServerInterface; +import org.apache.hadoop.hbase.logging.Log4jUtils; import org.apache.hadoop.hbase.metrics.BaseSource; import org.apache.hadoop.hbase.regionserver.HRegionServer; import org.apache.hadoop.hbase.test.MetricsAssertHelper; @@ -65,15 +66,18 @@ public class TestMultiRespectsLimits { CompatibilityFactory.getInstance(MetricsAssertHelper.class); private final static byte[] FAMILY = Bytes.toBytes("D"); public static final int MAX_SIZE = 100; + private static String LOG_LEVEL; @Rule public TestName name = new TestName(); @BeforeClass public static void setUpBeforeClass() throws Exception { - TEST_UTIL.getConfiguration().setLong( - HConstants.HBASE_SERVER_SCANNER_MAX_RESULT_SIZE_KEY, - MAX_SIZE); + // disable the debug log to avoid flooding the output + LOG_LEVEL = Log4jUtils.getEffectiveLevel(AsyncRegionLocatorHelper.class.getName()); + Log4jUtils.setLogLevel(AsyncRegionLocatorHelper.class.getName(), "INFO"); + TEST_UTIL.getConfiguration().setLong(HConstants.HBASE_SERVER_SCANNER_MAX_RESULT_SIZE_KEY, + MAX_SIZE); // Only start on regionserver so that all regions are on the same server. TEST_UTIL.startMiniCluster(1); @@ -81,6 +85,9 @@ public static void setUpBeforeClass() throws Exception { @AfterClass public static void tearDownAfterClass() throws Exception { + if (LOG_LEVEL != null) { + Log4jUtils.setLogLevel(AsyncRegionLocatorHelper.class.getName(), LOG_LEVEL); + } TEST_UTIL.shutdownMiniCluster(); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestProtoBufRpc.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestProtoBufRpc.java index 941d921481d3..a45804a45159 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestProtoBufRpc.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestProtoBufRpc.java @@ -30,6 +30,7 @@ import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.logging.Log4jUtils; import org.apache.hadoop.hbase.testclassification.RPCTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.junit.After; @@ -82,10 +83,8 @@ public void setUp() throws IOException { // Setup server for both protocols this.conf = HBaseConfiguration.create(); this.conf.set(RpcServerFactory.CUSTOM_RPC_SERVER_IMPL_CONF_KEY, rpcServerImpl); - org.apache.log4j.Logger.getLogger("org.apache.hadoop.ipc.HBaseServer") - .setLevel(org.apache.log4j.Level.ERROR); - org.apache.log4j.Logger.getLogger("org.apache.hadoop.ipc.HBaseServer.trace") - .setLevel(org.apache.log4j.Level.TRACE); + Log4jUtils.setLogLevel("org.apache.hadoop.ipc.HBaseServer", "ERROR"); + Log4jUtils.setLogLevel("org.apache.hadoop.ipc.HBaseServer.trace", "TRACE"); // Create server side implementation // Get RPC server for server side implementation this.server = RpcServerFactory.createRpcServer(null, "testrpc", diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcServerTraceLogging.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcServerTraceLogging.java index 2d66106a3d2b..122517574f7c 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcServerTraceLogging.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcServerTraceLogging.java @@ -35,17 +35,19 @@ public class TestRpcServerTraceLogging { @ClassRule - public static final HBaseClassTestRule CLASS_RULE = HBaseClassTestRule - .forClass(TestRpcServerTraceLogging.class); + public static final HBaseClassTestRule CLASS_RULE = + HBaseClassTestRule.forClass(TestRpcServerTraceLogging.class); - static org.apache.log4j.Logger rpcServerLog = org.apache.log4j.Logger.getLogger(RpcServer.class); + private static final org.apache.logging.log4j.core.Logger rpcServerLog = + (org.apache.logging.log4j.core.Logger) org.apache.logging.log4j.LogManager + .getLogger(RpcServer.class); static final String TRACE_LOG_MSG = - "This is dummy message for testing:: region { type: REGION_NAME value: \"hbase:meta,,1\" }" - + " scan { column { family: \"info\" } time_range { from: 0 to: 9223372036854775807 } " - + "max_versions: 1 cache_blocks: true max_result_size: 2097152 caching: 2147483647 } " - + "number_of_rows: 2147483647 close_scanner: false client_handles_partials: " - + "true client_handles_heartbeats: true track_scan_metrics: false"; + "This is dummy message for testing:: region { type: REGION_NAME value: \"hbase:meta,,1\" }" + + " scan { column { family: \"info\" } time_range { from: 0 to: 9223372036854775807 } " + + "max_versions: 1 cache_blocks: true max_result_size: 2097152 caching: 2147483647 } " + + "number_of_rows: 2147483647 close_scanner: false client_handles_partials: " + + "true client_handles_heartbeats: true track_scan_metrics: false"; static final int TRACE_LOG_LENGTH = TRACE_LOG_MSG.length(); @@ -62,7 +64,7 @@ public static void setUp() { @Test public void testLoggingWithTraceOff() { conf.setInt("hbase.ipc.trace.log.max.length", 250); - rpcServerLog.setLevel(org.apache.log4j.Level.DEBUG); + rpcServerLog.setLevel(org.apache.logging.log4j.Level.DEBUG); String truncatedString = mockRpcServer.truncateTraceLog(TRACE_LOG_MSG); assertEquals(150 + RpcServer.KEY_WORD_TRUNCATED.length(), truncatedString.length()); @@ -72,7 +74,7 @@ public void testLoggingWithTraceOff() { @Test public void testLoggingWithTraceOn() { conf.setInt("hbase.ipc.trace.log.max.length", 250); - rpcServerLog.setLevel(org.apache.log4j.Level.TRACE); + rpcServerLog.setLevel(org.apache.logging.log4j.Level.TRACE); String truncatedString = mockRpcServer.truncateTraceLog(TRACE_LOG_MSG); assertEquals(250 + RpcServer.KEY_WORD_TRUNCATED.length(), truncatedString.length()); @@ -82,7 +84,7 @@ public void testLoggingWithTraceOn() { @Test public void testLoggingWithTraceOnLargeMax() { conf.setInt("hbase.ipc.trace.log.max.length", 2000); - rpcServerLog.setLevel(org.apache.log4j.Level.TRACE); + rpcServerLog.setLevel(org.apache.logging.log4j.Level.TRACE); String truncatedString = mockRpcServer.truncateTraceLog(TRACE_LOG_MSG); assertEquals(TRACE_LOG_LENGTH, truncatedString.length()); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiLogThreshold.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiLogThreshold.java index e2dcac08122c..5e2679e41118 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiLogThreshold.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiLogThreshold.java @@ -20,14 +20,16 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; -import static org.mockito.Mockito.atLeastOnce; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.reset; -import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; import java.io.IOException; import java.util.Arrays; import java.util.List; +import java.util.concurrent.BlockingDeque; +import java.util.concurrent.LinkedBlockingDeque; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -36,10 +38,6 @@ import org.apache.hadoop.hbase.ipc.HBaseRpcController; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.util.Bytes; -import org.apache.log4j.Appender; -import org.apache.log4j.Level; -import org.apache.log4j.LogManager; -import org.apache.log4j.spi.LoggingEvent; import org.junit.After; import org.junit.Before; import org.junit.ClassRule; @@ -47,8 +45,9 @@ import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; -import org.mockito.ArgumentCaptor; import org.mockito.Mockito; +import org.mockito.invocation.InvocationOnMock; +import org.mockito.stubbing.Answer; import org.apache.hbase.thirdparty.com.google.protobuf.RpcController; import org.apache.hbase.thirdparty.com.google.protobuf.ServiceException; @@ -80,7 +79,7 @@ public class TestMultiLogThreshold { private HRegionServer rs; private RSRpcServices services; - private Appender appender; + private org.apache.logging.log4j.core.Appender appender; @Parameterized.Parameter public static boolean rejectLargeBatchOp; @@ -90,6 +89,21 @@ public static List<Object[]> params() { return Arrays.asList(new Object[] { false }, new Object[] { true }); } + private final class LevelAndMessage { + final org.apache.logging.log4j.Level level; + + final String msg; + + public LevelAndMessage(org.apache.logging.log4j.Level level, String msg) { + this.level = level; + this.msg = msg; + } + + } + + // log4j2 will reuse the LogEvent so we need to copy the level and message out. + private BlockingDeque<LevelAndMessage> logs = new LinkedBlockingDeque<>(); + @Before public void setupTest() throws Exception { util = new HBaseTestingUtility(); @@ -100,13 +114,28 @@ public void setupTest() throws Exception { util.startMiniCluster(); util.createTable(NAME, TEST_FAM); rs = util.getRSForFirstRegionInTable(NAME); - appender = mock(Appender.class); - LogManager.getLogger(RSRpcServices.class).addAppender(appender); + appender = mock(org.apache.logging.log4j.core.Appender.class); + when(appender.getName()).thenReturn("mockAppender"); + when(appender.isStarted()).thenReturn(true); + doAnswer(new Answer<Void>() { + + @Override + public Void answer(InvocationOnMock invocation) throws Throwable { + org.apache.logging.log4j.core.LogEvent logEvent = + invocation.getArgument(0, org.apache.logging.log4j.core.LogEvent.class); + logs.add( + new LevelAndMessage(logEvent.getLevel(), logEvent.getMessage().getFormattedMessage())); + return null; + } + }).when(appender).append(any(org.apache.logging.log4j.core.LogEvent.class)); + ((org.apache.logging.log4j.core.Logger) org.apache.logging.log4j.LogManager + .getLogger(RSRpcServices.class)).addAppender(appender); } @After public void tearDown() throws Exception { - LogManager.getLogger(RSRpcServices.class).removeAppender(appender); + ((org.apache.logging.log4j.core.Logger) org.apache.logging.log4j.LogManager + .getLogger(RSRpcServices.class)).removeAppender(appender); util.shutdownMiniCluster(); } @@ -149,17 +178,16 @@ private void sendMultiRequest(int rows, ActionType actionType) } private void assertLogBatchWarnings(boolean expected) { - ArgumentCaptor<LoggingEvent> captor = ArgumentCaptor.forClass(LoggingEvent.class); - verify(appender, atLeastOnce()).doAppend(captor.capture()); + assertFalse(logs.isEmpty()); boolean actual = false; - for (LoggingEvent event : captor.getAllValues()) { - if (event.getLevel() == Level.WARN && - event.getRenderedMessage().contains("Large batch operation detected")) { + for (LevelAndMessage event : logs) { + if (event.level == org.apache.logging.log4j.Level.WARN && + event.msg.contains("Large batch operation detected")) { actual = true; break; } } - reset(appender); + logs.clear(); assertEquals(expected, actual); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerReportForDuty.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerReportForDuty.java index 4d9bb68ac0c0..3524a72a64cf 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerReportForDuty.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerReportForDuty.java @@ -42,11 +42,6 @@ import org.apache.hadoop.hbase.util.JVMClusterUtil.MasterThread; import org.apache.hadoop.hbase.util.JVMClusterUtil.RegionServerThread; import org.apache.hadoop.hbase.util.Threads; -import org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder; -import org.apache.log4j.Appender; -import org.apache.log4j.Layout; -import org.apache.log4j.PatternLayout; -import org.apache.log4j.WriterAppender; import org.apache.zookeeper.KeeperException; import org.junit.After; import org.junit.Before; @@ -56,6 +51,8 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder; + @Category(LargeTests.class) public class TestRegionServerReportForDuty { @@ -91,26 +88,15 @@ public void tearDown() throws Exception { testUtil.shutdownMiniDFSCluster(); } - /** - * LogCapturer is similar to {@link org.apache.hadoop.test.GenericTestUtils.LogCapturer} - * except that this implementation has a default appender to the root logger. - * Hadoop 2.8+ supports the default appender in the LogCapture it ships and this can be replaced. - * TODO: This class can be removed after we upgrade Hadoop dependency. - */ - static class LogCapturer { + private static class LogCapturer { private StringWriter sw = new StringWriter(); - private WriterAppender appender; - private org.apache.log4j.Logger logger; + private org.apache.logging.log4j.core.appender.WriterAppender appender; + private org.apache.logging.log4j.core.Logger logger; - LogCapturer(org.apache.log4j.Logger logger) { + LogCapturer(org.apache.logging.log4j.core.Logger logger) { this.logger = logger; - Appender defaultAppender = org.apache.log4j.Logger.getRootLogger().getAppender("stdout"); - if (defaultAppender == null) { - defaultAppender = org.apache.log4j.Logger.getRootLogger().getAppender("console"); - } - final Layout layout = (defaultAppender == null) ? new PatternLayout() : - defaultAppender.getLayout(); - this.appender = new WriterAppender(layout, sw); + this.appender = org.apache.logging.log4j.core.appender.WriterAppender.newBuilder() + .setName("test").setTarget(sw).build(); this.logger.addAppender(this.appender); } @@ -146,7 +132,9 @@ public void testReportForDutyBackoff() throws IOException, InterruptedException master = cluster.addMaster(); master.start(); - LogCapturer capturer = new LogCapturer(org.apache.log4j.Logger.getLogger(HRegionServer.class)); + LogCapturer capturer = + new LogCapturer((org.apache.logging.log4j.core.Logger) org.apache.logging.log4j.LogManager + .getLogger(HRegionServer.class)); // Set sleep interval relatively low so that exponential backoff is more demanding. int msginterval = 100; cluster.getConfiguration().setInt("hbase.regionserver.msginterval", msginterval); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/PerfTestCompactionPolicies.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/PerfTestCompactionPolicies.java index 13c7a6bc1039..d5b7951285a5 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/PerfTestCompactionPolicies.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/PerfTestCompactionPolicies.java @@ -26,24 +26,36 @@ import java.util.ArrayList; import java.util.Collection; import java.util.List; - import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseConfiguration; +import org.apache.hadoop.hbase.client.RegionInfoBuilder; +import org.apache.hadoop.hbase.logging.Log4jUtils; import org.apache.hadoop.hbase.regionserver.HStore; import org.apache.hadoop.hbase.regionserver.HStoreFile; import org.apache.hadoop.hbase.regionserver.StoreConfigInformation; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.util.ReflectionUtils; +import org.junit.ClassRule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; +/** + * This is not a unit test. It is not run as part of the general unit test suite. It is for + * comparing compaction policies. You must run it explicitly; + * e.g. mvn test -Dtest=PerfTestCompactionPolicies + */ @Category({RegionServerTests.class, MediumTests.class}) @RunWith(Parameterized.class) public class PerfTestCompactionPolicies extends MockStoreFileGenerator { + @ClassRule + public static final HBaseClassTestRule CLASS_RULE = + HBaseClassTestRule.forClass(PerfTestCompactionPolicies.class); + private final RatioBasedCompactionPolicy cp; private final StoreFileListGenerator generator; private final HStore store; @@ -120,12 +132,9 @@ public PerfTestCompactionPolicies( this.ratio = inRatio; // Hide lots of logging so the system out is usable as a tab delimited file. - org.apache.log4j.Logger.getLogger(CompactionConfiguration.class). - setLevel(org.apache.log4j.Level.ERROR); - org.apache.log4j.Logger.getLogger(RatioBasedCompactionPolicy.class). - setLevel(org.apache.log4j.Level.ERROR); - - org.apache.log4j.Logger.getLogger(cpClass).setLevel(org.apache.log4j.Level.ERROR); + Log4jUtils.setLogLevel(CompactionConfiguration.class.getName(), "ERROR"); + Log4jUtils.setLogLevel(RatioBasedCompactionPolicy.class.getName(), "ERROR"); + Log4jUtils.setLogLevel(cpClass.getName(), "ERROR"); Configuration configuration = HBaseConfiguration.create(); @@ -197,7 +206,8 @@ private HStore createMockStore() { HStore s = mock(HStore.class); when(s.getStoreFileTtl()).thenReturn(Long.MAX_VALUE); when(s.getBlockingFileCount()).thenReturn(7L); + when(s.getRegionInfo()).thenReturn(RegionInfoBuilder.FIRST_META_REGIONINFO); return s; } -} +} \ No newline at end of file diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestCanaryTool.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestCanaryTool.java index 546643542aaa..9eb543858d1d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestCanaryTool.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestCanaryTool.java @@ -27,15 +27,17 @@ import static org.mockito.ArgumentMatchers.eq; import static org.mockito.ArgumentMatchers.isA; import static org.mockito.Mockito.atLeastOnce; +import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + import java.util.List; import java.util.Map; import java.util.concurrent.ExecutorService; import java.util.concurrent.ScheduledThreadPoolExecutor; - import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseConfiguration; @@ -50,9 +52,6 @@ import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.util.ToolRunner; -import org.apache.log4j.Appender; -import org.apache.log4j.LogManager; -import org.apache.log4j.spi.LoggingEvent; import org.junit.After; import org.junit.Before; import org.junit.ClassRule; @@ -60,19 +59,14 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; -import org.junit.runner.RunWith; import org.mockito.ArgumentMatcher; -import org.mockito.Mock; -import org.mockito.junit.MockitoJUnitRunner; -import org.apache.hbase.thirdparty.com.google.common.collect.Iterables; -@RunWith(MockitoJUnitRunner.class) -@Category({LargeTests.class}) +@Category({ LargeTests.class }) public class TestCanaryTool { @ClassRule public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestCanaryTool.class); + HBaseClassTestRule.forClass(TestCanaryTool.class); private HBaseTestingUtility testingUtility; private static final byte[] FAMILY = Bytes.toBytes("f"); @@ -81,22 +75,26 @@ public class TestCanaryTool { @Rule public TestName name = new TestName(); + private org.apache.logging.log4j.core.Appender mockAppender; + @Before public void setUp() throws Exception { testingUtility = new HBaseTestingUtility(); testingUtility.startMiniCluster(); - LogManager.getRootLogger().addAppender(mockAppender); + mockAppender = mock(org.apache.logging.log4j.core.Appender.class); + when(mockAppender.getName()).thenReturn("mockAppender"); + when(mockAppender.isStarted()).thenReturn(true); + ((org.apache.logging.log4j.core.Logger) org.apache.logging.log4j.LogManager + .getLogger("org.apache.hadoop.hbase")).addAppender(mockAppender); } @After public void tearDown() throws Exception { testingUtility.shutdownMiniCluster(); - LogManager.getRootLogger().removeAppender(mockAppender); + ((org.apache.logging.log4j.core.Logger) org.apache.logging.log4j.LogManager + .getLogger("org.apache.hadoop.hbase")).removeAppender(mockAppender); } - @Mock - Appender mockAppender; - @Test public void testBasicZookeeperCanaryWorks() throws Exception { final String[] args = { "-t", "10000", "-zookeeper" }; @@ -105,7 +103,8 @@ public void testBasicZookeeperCanaryWorks() throws Exception { @Test public void testZookeeperCanaryPermittedFailuresArgumentWorks() throws Exception { - final String[] args = { "-t", "10000", "-zookeeper", "-treatFailureAsError", "-permittedZookeeperFailures", "1" }; + final String[] args = + { "-t", "10000", "-zookeeper", "-treatFailureAsError", "-permittedZookeeperFailures", "1" }; testZookeeperCanaryWithArgs(args); } @@ -114,7 +113,7 @@ public void testBasicCanaryWorks() throws Exception { final TableName tableName = TableName.valueOf(name.getMethodName()); Table table = testingUtility.createTable(tableName, new byte[][] { FAMILY }); // insert some test rows - for (int i=0; i<1000; i++) { + for (int i = 0; i < 1000; i++) { byte[] iBytes = Bytes.toBytes(i); Put p = new Put(iBytes); p.addColumn(FAMILY, COLUMN, iBytes); @@ -155,7 +154,7 @@ public void testCanaryRegionTaskReadAllCF() throws Exception { // the test table has two column family. If readAllCF set true, // we expect read count is double of region count int expectedReadCount = - readAllCF ? 2 * sink.getTotalExpectedRegions() : sink.getTotalExpectedRegions(); + readAllCF ? 2 * sink.getTotalExpectedRegions() : sink.getTotalExpectedRegions(); assertEquals("canary region success count should equal total expected read count", expectedReadCount, sink.getReadSuccessCount()); Map<String, List<CanaryTool.RegionTaskResult>> regionMap = sink.getRegionMap(); @@ -183,7 +182,7 @@ public void testCanaryRegionTaskResult() throws Exception { TableName tableName = TableName.valueOf("testCanaryRegionTaskResult"); Table table = testingUtility.createTable(tableName, new byte[][] { FAMILY }); // insert some test rows - for (int i=0; i<1000; i++) { + for (int i = 0; i < 1000; i++) { byte[] iBytes = Bytes.toBytes(i); Put p = new Put(iBytes); p.addColumn(FAMILY, COLUMN, iBytes); @@ -212,7 +211,7 @@ public void testCanaryRegionTaskResult() throws Exception { assertFalse("verify region map has size > 0", regionMap.isEmpty()); for (String regionName : regionMap.keySet()) { - for (CanaryTool.RegionTaskResult res: regionMap.get(regionName)) { + for (CanaryTool.RegionTaskResult res : regionMap.get(regionName)) { assertNotNull("verify getRegionNameAsString()", regionName); assertNotNull("verify getRegionInfo()", res.getRegionInfo()); assertNotNull("verify getTableName()", res.getTableName()); @@ -235,24 +234,25 @@ public void testCanaryRegionTaskResult() throws Exception { // Ignore this test. It fails w/ the below on some mac os x. // [ERROR] Failures: - // [ERROR] TestCanaryTool.testReadTableTimeouts:216 + // [ERROR] TestCanaryTool.testReadTableTimeouts:216 // Argument(s) are different! Wanted: // mockAppender.doAppend( // <custom argument matcher> - // ); - // -> at org.apache.hadoop.hbase.tool.TestCanaryTool - // .testReadTableTimeouts(TestCanaryTool.java:216) - // Actual invocations have different arguments: - // mockAppender.doAppend( - // org.apache.log4j.spi.LoggingEvent@2055cfc1 - // ); - // ) - // ) + // ); + // -> at org.apache.hadoop.hbase.tool.TestCanaryTool + // .testReadTableTimeouts(TestCanaryTool.java:216) + // Actual invocations have different arguments: + // mockAppender.doAppend( + // org.apache.log4j.spi.LoggingEvent@2055cfc1 + // ); + // ) + // ) // - @org.junit.Ignore @Test + @org.junit.Ignore + @Test public void testReadTableTimeouts() throws Exception { - final TableName [] tableNames = new TableName[] {TableName.valueOf(name.getMethodName() + "1"), - TableName.valueOf(name.getMethodName() + "2")}; + final TableName[] tableNames = new TableName[] { TableName.valueOf(name.getMethodName() + "1"), + TableName.valueOf(name.getMethodName() + "2") }; // Create 2 test tables. for (int j = 0; j < 2; j++) { Table table = testingUtility.createTable(tableNames[j], new byte[][] { FAMILY }); @@ -269,8 +269,8 @@ public void testReadTableTimeouts() throws Exception { CanaryTool canary = new CanaryTool(executor, sink); String configuredTimeoutStr = tableNames[0].getNameAsString() + "=" + Long.MAX_VALUE + "," + tableNames[1].getNameAsString() + "=0"; - String[] args = {"-readTableTimeouts", configuredTimeoutStr, name.getMethodName() + "1", - name.getMethodName() + "2"}; + String[] args = { "-readTableTimeouts", configuredTimeoutStr, name.getMethodName() + "1", + name.getMethodName() + "2" }; assertEquals(0, ToolRunner.run(testingUtility.getConfiguration(), canary, args)); verify(sink, times(tableNames.length)).initializeAndGetReadLatencyForTable(isA(String.class)); for (int i = 0; i < 2; i++) { @@ -280,18 +280,21 @@ public void testReadTableTimeouts() throws Exception { sink.getReadLatencyMap().get(tableNames[i].getNameAsString())); } // One table's timeout is set for 0 ms and thus, should lead to an error. - verify(mockAppender, times(1)).doAppend(argThat(new ArgumentMatcher<LoggingEvent>() { - @Override - public boolean matches(LoggingEvent argument) { - return argument.getRenderedMessage().contains("exceeded the configured read timeout."); - } - })); - verify(mockAppender, times(2)).doAppend(argThat(new ArgumentMatcher<LoggingEvent>() { - @Override - public boolean matches(LoggingEvent argument) { - return argument.getRenderedMessage().contains("Configured read timeout"); - } - })); + verify(mockAppender, times(1)) + .append(argThat(new ArgumentMatcher<org.apache.logging.log4j.core.LogEvent>() { + @Override + public boolean matches(org.apache.logging.log4j.core.LogEvent argument) { + return argument.getMessage().getFormattedMessage() + .contains("exceeded the configured read timeout."); + } + })); + verify(mockAppender, times(2)) + .append(argThat(new ArgumentMatcher<org.apache.logging.log4j.core.LogEvent>() { + @Override + public boolean matches(org.apache.logging.log4j.core.LogEvent argument) { + return argument.getMessage().getFormattedMessage().contains("Configured read timeout"); + } + })); } @Test @@ -299,43 +302,47 @@ public void testWriteTableTimeout() throws Exception { ExecutorService executor = new ScheduledThreadPoolExecutor(1); CanaryTool.RegionStdOutSink sink = spy(new CanaryTool.RegionStdOutSink()); CanaryTool canary = new CanaryTool(executor, sink); - String[] args = { "-writeSniffing", "-writeTableTimeout", String.valueOf(Long.MAX_VALUE)}; + String[] args = { "-writeSniffing", "-writeTableTimeout", String.valueOf(Long.MAX_VALUE) }; assertEquals(0, ToolRunner.run(testingUtility.getConfiguration(), canary, args)); assertNotEquals("verify non-null write latency", null, sink.getWriteLatency()); assertNotEquals("verify non-zero write latency", 0L, sink.getWriteLatency()); - verify(mockAppender, times(1)).doAppend(argThat( - new ArgumentMatcher<LoggingEvent>() { - @Override - public boolean matches(LoggingEvent argument) { - return argument.getRenderedMessage().contains("Configured write timeout"); - } - })); + verify(mockAppender, times(1)) + .append(argThat(new ArgumentMatcher<org.apache.logging.log4j.core.LogEvent>() { + @Override + public boolean matches(org.apache.logging.log4j.core.LogEvent argument) { + return argument.getMessage().getFormattedMessage().contains("Configured write timeout"); + } + })); } - //no table created, so there should be no regions + // no table created, so there should be no regions @Test public void testRegionserverNoRegions() throws Exception { runRegionserverCanary(); - verify(mockAppender).doAppend(argThat(new ArgumentMatcher<LoggingEvent>() { - @Override - public boolean matches(LoggingEvent argument) { - return argument.getRenderedMessage().contains("Regionserver not serving any regions"); - } - })); + verify(mockAppender) + .append(argThat(new ArgumentMatcher<org.apache.logging.log4j.core.LogEvent>() { + @Override + public boolean matches(org.apache.logging.log4j.core.LogEvent argument) { + return argument.getMessage().getFormattedMessage() + .contains("Regionserver not serving any regions"); + } + })); } - //by creating a table, there shouldn't be any region servers not serving any regions + // by creating a table, there shouldn't be any region servers not serving any regions @Test public void testRegionserverWithRegions() throws Exception { final TableName tableName = TableName.valueOf(name.getMethodName()); testingUtility.createTable(tableName, new byte[][] { FAMILY }); runRegionserverCanary(); - verify(mockAppender, never()).doAppend(argThat(new ArgumentMatcher<LoggingEvent>() { - @Override - public boolean matches(LoggingEvent argument) { - return argument.getRenderedMessage().contains("Regionserver not serving any regions"); - } - })); + verify(mockAppender, never()) + .append(argThat(new ArgumentMatcher<org.apache.logging.log4j.core.LogEvent>() { + @Override + public boolean matches(org.apache.logging.log4j.core.LogEvent argument) { + return argument.getMessage().getFormattedMessage() + .contains("Regionserver not serving any regions"); + } + })); } @Test @@ -343,7 +350,7 @@ public void testRawScanConfig() throws Exception { final TableName tableName = TableName.valueOf(name.getMethodName()); Table table = testingUtility.createTable(tableName, new byte[][] { FAMILY }); // insert some test rows - for (int i=0; i<1000; i++) { + for (int i = 0; i < 1000; i++) { byte[] iBytes = Bytes.toBytes(i); Put p = new Put(iBytes); p.addColumn(FAMILY, COLUMN, iBytes); @@ -357,23 +364,20 @@ public void testRawScanConfig() throws Exception { new org.apache.hadoop.conf.Configuration(testingUtility.getConfiguration()); conf.setBoolean(HConstants.HBASE_CANARY_READ_RAW_SCAN_KEY, true); assertEquals(0, ToolRunner.run(conf, canary, args)); - verify(sink, atLeastOnce()) - .publishReadTiming(isA(ServerName.class), isA(RegionInfo.class), - isA(ColumnFamilyDescriptor.class), anyLong()); + verify(sink, atLeastOnce()).publishReadTiming(isA(ServerName.class), isA(RegionInfo.class), + isA(ColumnFamilyDescriptor.class), anyLong()); assertEquals("verify no read error count", 0, canary.getReadFailures().size()); } private void runRegionserverCanary() throws Exception { ExecutorService executor = new ScheduledThreadPoolExecutor(1); CanaryTool canary = new CanaryTool(executor, new CanaryTool.RegionServerStdOutSink()); - String[] args = { "-t", "10000", "-regionserver"}; + String[] args = { "-t", "10000", "-regionserver" }; assertEquals(0, ToolRunner.run(testingUtility.getConfiguration(), canary, args)); assertEquals("verify no read error count", 0, canary.getReadFailures().size()); } private void testZookeeperCanaryWithArgs(String[] args) throws Exception { - Integer port = - Iterables.getOnlyElement(testingUtility.getZkCluster().getClientPortList(), null); String hostPort = testingUtility.getZkCluster().getAddress().toString(); testingUtility.getConfiguration().set(HConstants.ZOOKEEPER_QUORUM, hostPort + "/hbase"); ExecutorService executor = new ScheduledThreadPoolExecutor(2); @@ -381,8 +385,8 @@ private void testZookeeperCanaryWithArgs(String[] args) throws Exception { CanaryTool canary = new CanaryTool(executor, sink); assertEquals(0, ToolRunner.run(testingUtility.getConfiguration(), canary, args)); - String baseZnode = testingUtility.getConfiguration() - .get(HConstants.ZOOKEEPER_ZNODE_PARENT, HConstants.DEFAULT_ZOOKEEPER_ZNODE_PARENT); + String baseZnode = testingUtility.getConfiguration().get(HConstants.ZOOKEEPER_ZNODE_PARENT, + HConstants.DEFAULT_ZOOKEEPER_ZNODE_PARENT); verify(sink, atLeastOnce()).publishReadTiming(eq(baseZnode), eq(hostPort), anyLong()); } } diff --git a/hbase-shaded/hbase-shaded-check-invariants/pom.xml b/hbase-shaded/hbase-shaded-check-invariants/pom.xml index 071f899d5607..b07c66dfd046 100644 --- a/hbase-shaded/hbase-shaded-check-invariants/pom.xml +++ b/hbase-shaded/hbase-shaded-check-invariants/pom.xml @@ -46,12 +46,10 @@ <dependency> <groupId>org.apache.hbase</groupId> <artifactId>hbase-shaded-mapreduce</artifactId> - <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.hbase</groupId> <artifactId>hbase-shaded-client-byo-hadoop</artifactId> - <version>${project.version}</version> </dependency> <!-- parent pom defines these for children. :( :( :( --> <dependency> @@ -60,8 +58,18 @@ <scope>provided</scope> </dependency> <dependency> - <groupId>ch.qos.reload4j</groupId> - <artifactId>reload4j</artifactId> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-api</artifactId> + <scope>provided</scope> + </dependency> + <dependency> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-core</artifactId> + <scope>provided</scope> + </dependency> + <dependency> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-slf4j-impl</artifactId> <scope>provided</scope> </dependency> <!-- Test dependencies --> @@ -108,8 +116,7 @@ <excludes> <!-- We leave logging stuff alone --> <exclude>org.slf4j:*</exclude> - <exclude>log4j:*</exclude> - <exclude>ch.qos.reload4j:*</exclude> + <exclude>org.apache.logging.log4j:*</exclude> <exclude>commons-logging:*</exclude> <!-- annotations that never change --> <exclude>com.google.code.findbugs:*</exclude> diff --git a/hbase-shaded/hbase-shaded-client-byo-hadoop/pom.xml b/hbase-shaded/hbase-shaded-client-byo-hadoop/pom.xml index b575b53e5778..5c699583faba 100644 --- a/hbase-shaded/hbase-shaded-client-byo-hadoop/pom.xml +++ b/hbase-shaded/hbase-shaded-client-byo-hadoop/pom.xml @@ -1,6 +1,6 @@ <project xmlns="https://maven.apache.org/POM/4.0.0" - xmlns:xsi="https://www.w3.org/2001/XMLSchema-instance" - xsi:schemaLocation="https://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd"> + xmlns:xsi="https://www.w3.org/2001/XMLSchema-instance" + xsi:schemaLocation="https://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd"> <!-- /** * Licensed to the Apache Software Foundation (ASF) under one @@ -20,159 +20,161 @@ * limitations under the License. */ --> - <modelVersion>4.0.0</modelVersion> - <parent> - <artifactId>hbase-shaded</artifactId> - <groupId>org.apache.hbase</groupId> - <version>2.6.0-SNAPSHOT</version> - <relativePath>..</relativePath> - </parent> - <artifactId>hbase-shaded-client-byo-hadoop</artifactId> - <name>Apache HBase - Shaded - Client</name> - <build> - <plugins> - <plugin> - <groupId>org.apache.maven.plugins</groupId> - <artifactId>maven-site-plugin</artifactId> - <configuration> - <skip>true</skip> - </configuration> - </plugin> - <plugin> - <!--Make it so assembly:single does nothing in here--> - <artifactId>maven-assembly-plugin</artifactId> - <configuration> - <skipAssembly>true</skipAssembly> - </configuration> - </plugin> - <plugin> - <groupId>org.apache.maven.plugins</groupId> - <artifactId>maven-shade-plugin</artifactId> - </plugin> - </plugins> - </build> - <dependencies> - <dependency> - <groupId>org.apache.hbase</groupId> - <artifactId>hbase-client</artifactId> - </dependency> - </dependencies> + <modelVersion>4.0.0</modelVersion> + <parent> + <artifactId>hbase-shaded</artifactId> + <groupId>org.apache.hbase</groupId> + <version>2.6.0-SNAPSHOT</version> + <relativePath>..</relativePath> + </parent> + <artifactId>hbase-shaded-client-byo-hadoop</artifactId> + <name>Apache HBase - Shaded - Client</name> + <build> + <plugins> + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-site-plugin</artifactId> + <configuration> + <skip>true</skip> + </configuration> + </plugin> + <plugin> + <!--Make it so assembly:single does nothing in here--> + <artifactId>maven-assembly-plugin</artifactId> + <configuration> + <skipAssembly>true</skipAssembly> + </configuration> + </plugin> + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-shade-plugin</artifactId> + </plugin> + </plugins> + </build> + <dependencies> + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-auth</artifactId> + <scope>provided</scope> + </dependency> + </dependencies> - <profiles> + <profiles> <!-- These hadoop profiles should be derived from those in the hbase-client module. Essentially, you must list the same hadoop-* dependencies so provided dependencies will not be transitively included. --> - <profile> - <id>hadoop-2.0</id> - <activation> - <property> + <profile> + <id>hadoop-2.0</id> + <activation> + <property> <!--Below formatting for dev-support/generate-hadoopX-poms.sh--> - <!--h2--><name>!hadoop.profile</name> - </property> - </activation> - <dependencies> - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-auth</artifactId> - <scope>provided</scope> - </dependency> - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-common</artifactId> - <scope>provided</scope> - </dependency> - <dependency> - <groupId>org.codehaus.jackson</groupId> - <artifactId>jackson-jaxrs</artifactId> - <version>1.9.13</version> - <scope>provided</scope> - <exclusions> - <exclusion> - <groupId>org.codehaus.jackson</groupId> - <artifactId>jackson-mapper-asl</artifactId> - </exclusion> - <exclusion> - <groupId>org.codehaus.jackson</groupId> - <artifactId>jackson-core-asl</artifactId> - </exclusion> - </exclusions> - </dependency> - <dependency> - <groupId>org.codehaus.jackson</groupId> - <artifactId>jackson-xc</artifactId> - <version>1.9.13</version> - <scope>provided</scope> - <exclusions> - <exclusion> - <groupId>org.codehaus.jackson</groupId> - <artifactId>jackson-mapper-asl</artifactId> - </exclusion> - <exclusion> - <groupId>org.codehaus.jackson</groupId> - <artifactId>jackson-core-asl</artifactId> - </exclusion> - </exclusions> - </dependency> - </dependencies> - </profile> + <!--h2--> + <name>!hadoop.profile</name> + </property> + </activation> + <dependencies> + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-auth</artifactId> + <scope>provided</scope> + </dependency> + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-common</artifactId> + <scope>provided</scope> + </dependency> + <dependency> + <groupId>org.codehaus.jackson</groupId> + <artifactId>jackson-jaxrs</artifactId> + <version>1.9.13</version> + <scope>provided</scope> + <exclusions> + <exclusion> + <groupId>org.codehaus.jackson</groupId> + <artifactId>jackson-mapper-asl</artifactId> + </exclusion> + <exclusion> + <groupId>org.codehaus.jackson</groupId> + <artifactId>jackson-core-asl</artifactId> + </exclusion> + </exclusions> + </dependency> + <dependency> + <groupId>org.codehaus.jackson</groupId> + <artifactId>jackson-xc</artifactId> + <version>1.9.13</version> + <scope>provided</scope> + <exclusions> + <exclusion> + <groupId>org.codehaus.jackson</groupId> + <artifactId>jackson-mapper-asl</artifactId> + </exclusion> + <exclusion> + <groupId>org.codehaus.jackson</groupId> + <artifactId>jackson-core-asl</artifactId> + </exclusion> + </exclusions> + </dependency> + </dependencies> + </profile> <!-- profile for building against Hadoop 3.0.x. Activate using: mvn -Dhadoop.profile=3.0 --> - <profile> - <id>hadoop-3.0</id> - <activation> - <property> - <name>hadoop.profile</name> - <value>3.0</value> - </property> - </activation> - <dependencies> - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-auth</artifactId> - <scope>provided</scope> - </dependency> - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-common</artifactId> - <scope>provided</scope> - </dependency> - <dependency> - <groupId>org.codehaus.jackson</groupId> - <artifactId>jackson-jaxrs</artifactId> - <version>1.9.13</version> - <scope>provided</scope> - <exclusions> - <exclusion> - <groupId>org.codehaus.jackson</groupId> - <artifactId>jackson-mapper-asl</artifactId> - </exclusion> - <exclusion> - <groupId>org.codehaus.jackson</groupId> - <artifactId>jackson-core-asl</artifactId> - </exclusion> - </exclusions> - </dependency> - <dependency> - <groupId>org.codehaus.jackson</groupId> - <artifactId>jackson-xc</artifactId> - <version>1.9.13</version> - <scope>provided</scope> - <exclusions> - <exclusion> - <groupId>org.codehaus.jackson</groupId> - <artifactId>jackson-mapper-asl</artifactId> - </exclusion> - <exclusion> - <groupId>org.codehaus.jackson</groupId> - <artifactId>jackson-core-asl</artifactId> - </exclusion> - </exclusions> - </dependency> - </dependencies> - </profile> - </profiles> + <profile> + <id>hadoop-3.0</id> + <activation> + <property> + <name>hadoop.profile</name> + <value>3.0</value> + </property> + </activation> + <dependencies> + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-auth</artifactId> + <scope>provided</scope> + </dependency> + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-common</artifactId> + <scope>provided</scope> + </dependency> + <dependency> + <groupId>org.codehaus.jackson</groupId> + <artifactId>jackson-jaxrs</artifactId> + <version>1.9.13</version> + <scope>provided</scope> + <exclusions> + <exclusion> + <groupId>org.codehaus.jackson</groupId> + <artifactId>jackson-mapper-asl</artifactId> + </exclusion> + <exclusion> + <groupId>org.codehaus.jackson</groupId> + <artifactId>jackson-core-asl</artifactId> + </exclusion> + </exclusions> + </dependency> + <dependency> + <groupId>org.codehaus.jackson</groupId> + <artifactId>jackson-xc</artifactId> + <version>1.9.13</version> + <scope>provided</scope> + <exclusions> + <exclusion> + <groupId>org.codehaus.jackson</groupId> + <artifactId>jackson-mapper-asl</artifactId> + </exclusion> + <exclusion> + <groupId>org.codehaus.jackson</groupId> + <artifactId>jackson-core-asl</artifactId> + </exclusion> + </exclusions> + </dependency> + </dependencies> + </profile> + </profiles> </project> diff --git a/hbase-shaded/hbase-shaded-client/pom.xml b/hbase-shaded/hbase-shaded-client/pom.xml index de9a070ded87..e6a3a2efc1d2 100644 --- a/hbase-shaded/hbase-shaded-client/pom.xml +++ b/hbase-shaded/hbase-shaded-client/pom.xml @@ -1,6 +1,6 @@ <project xmlns="https://maven.apache.org/POM/4.0.0" - xmlns:xsi="https://www.w3.org/2001/XMLSchema-instance" - xsi:schemaLocation="https://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd"> + xmlns:xsi="https://www.w3.org/2001/XMLSchema-instance" + xsi:schemaLocation="https://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd"> <!-- /** * Licensed to the Apache Software Foundation (ASF) under one @@ -20,80 +20,78 @@ * limitations under the License. */ --> - <modelVersion>4.0.0</modelVersion> - <parent> - <artifactId>hbase-shaded</artifactId> - <groupId>org.apache.hbase</groupId> - <version>2.6.0-SNAPSHOT</version> - <relativePath>..</relativePath> - </parent> - <artifactId>hbase-shaded-client</artifactId> - <name>Apache HBase - Shaded - Client (with Hadoop bundled)</name> - <build> - <plugins> - <plugin> - <groupId>org.apache.maven.plugins</groupId> - <artifactId>maven-site-plugin</artifactId> - <configuration> - <skip>true</skip> - </configuration> - </plugin> - <plugin> + <modelVersion>4.0.0</modelVersion> + <parent> + <artifactId>hbase-shaded</artifactId> + <groupId>org.apache.hbase</groupId> + <version>2.6.0-SNAPSHOT</version> + <relativePath>..</relativePath> + </parent> + <artifactId>hbase-shaded-client</artifactId> + <name>Apache HBase - Shaded - Client (with Hadoop bundled)</name> + <build> + <plugins> + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-site-plugin</artifactId> + <configuration> + <skip>true</skip> + </configuration> + </plugin> + <plugin> <!--Make it so assembly:single does nothing in here--> - <artifactId>maven-assembly-plugin</artifactId> - <configuration> - <skipAssembly>true</skipAssembly> - </configuration> - </plugin> - <plugin> - <groupId>org.apache.maven.plugins</groupId> - <artifactId>maven-shade-plugin</artifactId> - <executions> - <execution> - <id>aggregate-into-a-jar-with-relocated-third-parties</id> - <configuration> - <artifactSet> - <excludes> - <!-- exclude J2EE modules that come in for JDK11+ (since - hadoop-3.2.0) or modules that come in for JDK8+ but - need not be included --> - <exclude>javax.annotation:javax.annotation-api</exclude> - <exclude>javax.activation:javax.activation-api</exclude> - <exclude>jakarta.activation:jakarta.activation-api</exclude> <!-- Hadoop 3.3.1 --> - <exclude>jakarta.ws.rs:jakarta.ws.rs-api</exclude> - <exclude>jakarta.annotation:jakarta.annotation-api</exclude> - <exclude>jakarta.validation:jakarta.validation-api</exclude> - <exclude>org.glassfish.hk2.external:jakarta.inject</exclude> - <!-- - Tell the shade plugin that in this case we want to include hadoop - by leaving out the exclude. - --> - <!-- The rest of these should be kept in sync with the parent pom --> - <exclude>org.apache.hbase:hbase-resource-bundle</exclude> - <exclude>org.slf4j:*</exclude> - <exclude>com.google.code.findbugs:*</exclude> - <exclude>com.github.stephenc.findbugs:*</exclude> - <exclude>com.github.spotbugs:*</exclude> - <exclude>org.apache.htrace:*</exclude> - <exclude>org.apache.yetus:*</exclude> - <exclude>log4j:*</exclude> - <exclude>ch.qos.reload4j:*</exclude> - <exclude>commons-logging:*</exclude> - <exclude>org.javassist:*</exclude> - <exclude>io.opentelemetry:*</exclude> - </excludes> - </artifactSet> - </configuration> - </execution> - </executions> - </plugin> - </plugins> - </build> - <dependencies> - <dependency> - <groupId>org.apache.hbase</groupId> - <artifactId>hbase-client</artifactId> - </dependency> - </dependencies> - + <artifactId>maven-assembly-plugin</artifactId> + <configuration> + <skipAssembly>true</skipAssembly> + </configuration> + </plugin> + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-shade-plugin</artifactId> + <executions> + <execution> + <id>aggregate-into-a-jar-with-relocated-third-parties</id> + <configuration> + <artifactSet> + <excludes> + <!-- exclude J2EE modules that come in for JDK11+ (since + hadoop-3.2.0) or modules that come in for JDK8+ but + need not be included --> + <exclude>javax.annotation:javax.annotation-api</exclude> + <exclude>javax.activation:javax.activation-api</exclude> + <exclude>jakarta.activation:jakarta.activation-api</exclude> <!-- Hadoop 3.3.1 --> + <exclude>jakarta.ws.rs:jakarta.ws.rs-api</exclude> + <exclude>jakarta.annotation:jakarta.annotation-api</exclude> + <exclude>jakarta.validation:jakarta.validation-api</exclude> + <exclude>org.glassfish.hk2.external:jakarta.inject</exclude> + <!-- + Tell the shade plugin that in this case we want to include hadoop + by leaving out the exclude. + --> + <!-- The rest of these should be kept in sync with the parent pom --> + <exclude>org.apache.hbase:hbase-resource-bundle</exclude> + <exclude>org.slf4j:*</exclude> + <exclude>com.google.code.findbugs:*</exclude> + <exclude>com.github.stephenc.findbugs:*</exclude> + <exclude>com.github.spotbugs:*</exclude> + <exclude>org.apache.htrace:*</exclude> + <exclude>org.apache.yetus:*</exclude> + <exclude>org.apache.logging.log4j:*</exclude> + <exclude>commons-logging:*</exclude> + <exclude>org.javassist:*</exclude> + <exclude>io.opentelemetry:*</exclude> + </excludes> + </artifactSet> + </configuration> + </execution> + </executions> + </plugin> + </plugins> + </build> + <dependencies> + <dependency> + <groupId>org.apache.hbase</groupId> + <artifactId>hbase-client</artifactId> + </dependency> + </dependencies> </project> diff --git a/hbase-shaded/hbase-shaded-mapreduce/pom.xml b/hbase-shaded/hbase-shaded-mapreduce/pom.xml index 10d943109afc..0a000a2f4d40 100644 --- a/hbase-shaded/hbase-shaded-mapreduce/pom.xml +++ b/hbase-shaded/hbase-shaded-mapreduce/pom.xml @@ -1,6 +1,6 @@ <project xmlns="https://maven.apache.org/POM/4.0.0" - xmlns:xsi="https://www.w3.org/2001/XMLSchema-instance" - xsi:schemaLocation="https://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd"> + xmlns:xsi="https://www.w3.org/2001/XMLSchema-instance" + xsi:schemaLocation="https://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd"> <!-- /** * Licensed to the Apache Software Foundation (ASF) under one @@ -20,378 +20,288 @@ * limitations under the License. */ --> - <modelVersion>4.0.0</modelVersion> - <parent> - <artifactId>hbase-shaded</artifactId> - <groupId>org.apache.hbase</groupId> - <version>2.6.0-SNAPSHOT</version> - <relativePath>..</relativePath> - </parent> - <artifactId>hbase-shaded-mapreduce</artifactId> - <name>Apache HBase - Shaded - MapReduce</name> - <build> - <plugins> - <plugin> - <groupId>org.apache.maven.plugins</groupId> - <artifactId>maven-site-plugin</artifactId> - <configuration> - <skip>true</skip> - </configuration> - </plugin> - <plugin> + <modelVersion>4.0.0</modelVersion> + <parent> + <artifactId>hbase-shaded</artifactId> + <groupId>org.apache.hbase</groupId> + <version>2.6.0-SNAPSHOT</version> + <relativePath>..</relativePath> + </parent> + <artifactId>hbase-shaded-mapreduce</artifactId> + <name>Apache HBase - Shaded - MapReduce</name> + <build> + <plugins> + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-site-plugin</artifactId> + <configuration> + <skip>true</skip> + </configuration> + </plugin> + <plugin> <!--Make it so assembly:single does nothing in here--> - <artifactId>maven-assembly-plugin</artifactId> - <configuration> - <skipAssembly>true</skipAssembly> - </configuration> - </plugin> - <plugin> - <groupId>org.apache.maven.plugins</groupId> - <artifactId>maven-jar-plugin</artifactId> - <configuration> - <archive> - <manifest> + <artifactId>maven-assembly-plugin</artifactId> + <configuration> + <skipAssembly>true</skipAssembly> + </configuration> + </plugin> + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-jar-plugin</artifactId> + <configuration> + <archive> + <manifest> <!--Include the Driver class as the 'main'. Executing the jar will then show a list of the basic MR jobs. --> - <mainClass>org/apache/hadoop/hbase/mapreduce/Driver</mainClass> - </manifest> - </archive> - </configuration> - </plugin> - <plugin> - <groupId>org.apache.maven.plugins</groupId> - <artifactId>maven-shade-plugin</artifactId> - </plugin> - </plugins> - </build> - <dependencies> + <mainClass>org/apache/hadoop/hbase/mapreduce/Driver</mainClass> + </manifest> + </archive> + </configuration> + </plugin> + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-shade-plugin</artifactId> + </plugin> + </plugins> + </build> + <dependencies> <!-- We want to ensure needed hadoop bits are at provided scope for our shaded artifact, so we list them below in hadoop specific profiles. --> - <dependency> - <groupId>org.apache.hbase</groupId> - <artifactId>hbase-mapreduce</artifactId> - <exclusions> - <!-- Jaxb-api is a part of Java SE now --> - <exclusion> - <groupId>javax.xml.bind</groupId> - <artifactId>jaxb-api</artifactId> - </exclusion> - <exclusion> - <groupId>javax.ws.rs</groupId> - <artifactId>jsr311-api</artifactId> - </exclusion> - <!-- Jersey not used by our MR support --> - <exclusion> - <groupId>javax.ws.rs</groupId> - <artifactId>javax.ws.rs-api</artifactId> - </exclusion> - <exclusion> - <groupId>com.sun.jersey</groupId> - <artifactId>jersey-server</artifactId> - </exclusion> - <exclusion> - <groupId>com.sun.jersey</groupId> - <artifactId>jersey-client</artifactId> - </exclusion> - <exclusion> - <groupId>com.sun.jersey</groupId> - <artifactId>jersey-core</artifactId> - </exclusion> - <exclusion> - <groupId>com.sun.jersey</groupId> - <artifactId>jersey-json</artifactId> - </exclusion> - <exclusion> - <groupId>com.sun.jersey.contribs</groupId> - <artifactId>jersey-guice</artifactId> - </exclusion> - <!-- Jetty not used by our MR support --> - <exclusion> - <groupId>javax.servlet</groupId> - <artifactId>javax.servlet-api</artifactId> - </exclusion> - <exclusion> - <groupId>org.eclipse.jetty</groupId> - <artifactId>jetty-http</artifactId> - </exclusion> - <exclusion> - <groupId>org.eclipse.jetty</groupId> - <artifactId>jetty-security</artifactId> - </exclusion> - <exclusion> - <groupId>org.eclipse.jetty</groupId> - <artifactId>jetty-server</artifactId> - </exclusion> - <exclusion> - <groupId>org.eclipse.jetty</groupId> - <artifactId>jetty-servlet</artifactId> - </exclusion> - <exclusion> - <groupId>org.eclipse.jetty</groupId> - <artifactId>jetty-util</artifactId> - </exclusion> - <exclusion> - <groupId>org.eclipse.jetty</groupId> - <artifactId>jetty-util-ajax</artifactId> - </exclusion> - <exclusion> - <groupId>org.glassfish</groupId> - <artifactId>javax.el</artifactId> - </exclusion> - <exclusion> - <groupId>org.eclipse.jetty</groupId> - <artifactId>jetty-webapp</artifactId> - </exclusion> - <exclusion> - <groupId>org.glassfish.jersey.core</groupId> - <artifactId>jersey-server</artifactId> - </exclusion> - <exclusion> - <groupId>org.glassfish.jersey.containers</groupId> - <artifactId>jersey-container-servlet-core</artifactId> - </exclusion> - <!-- We excluded the server-side generated classes for JSP, so exclude - their runtime support libraries too - --> - <exclusion> - <groupId>org.glassfish.web</groupId> - <artifactId>javax.servlet.jsp</artifactId> - </exclusion> - <exclusion> - <groupId>javax.servlet.jsp</groupId> - <artifactId>javax.servlet.jsp-api</artifactId> - </exclusion> - </exclusions> - </dependency> - </dependencies> + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-common</artifactId> + <scope>provided</scope> + </dependency> + </dependencies> - <profiles> + <profiles> <!-- These hadoop profiles should be derived from those in the hbase-mapreduce module. Essentially, you must list the same hadoop-* dependencies since provided dependencies are not transitively included. --> <!-- profile against Hadoop 2.x: This is the default. --> - <profile> - <id>hadoop-2.0</id> - <activation> - <property> + <profile> + <id>hadoop-2.0</id> + <activation> + <property> <!--Below formatting for dev-support/generate-hadoopX-poms.sh--> - <!--h2--><name>!hadoop.profile</name> - </property> - </activation> - <dependencies> - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-common</artifactId> - <scope>provided</scope> - <exclusions> - <exclusion> - <groupId>net.java.dev.jets3t</groupId> - <artifactId>jets3t</artifactId> - </exclusion> - <exclusion> - <groupId>javax.servlet.jsp</groupId> - <artifactId>jsp-api</artifactId> - </exclusion> - <exclusion> - <groupId>org.mortbay.jetty</groupId> - <artifactId>jetty</artifactId> - </exclusion> - <exclusion> - <groupId>com.sun.jersey</groupId> - <artifactId>jersey-server</artifactId> - </exclusion> - <exclusion> - <groupId>com.sun.jersey</groupId> - <artifactId>jersey-core</artifactId> - </exclusion> - <exclusion> - <groupId>com.sun.jersey</groupId> - <artifactId>jersey-json</artifactId> - </exclusion> - <exclusion> - <groupId>javax.servlet</groupId> - <artifactId>servlet-api</artifactId> - </exclusion> - <exclusion> - <groupId>tomcat</groupId> - <artifactId>jasper-compiler</artifactId> - </exclusion> - <exclusion> - <groupId>tomcat</groupId> - <artifactId>jasper-runtime</artifactId> - </exclusion> - <exclusion> - <groupId>com.google.code.findbugs</groupId> - <artifactId>jsr305</artifactId> - </exclusion> - </exclusions> - </dependency> - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-hdfs</artifactId> - <scope>provided</scope> - <exclusions> - <exclusion> - <groupId>javax.servlet.jsp</groupId> - <artifactId>jsp-api</artifactId> - </exclusion> - <exclusion> - <groupId>javax.servlet</groupId> - <artifactId>servlet-api</artifactId> - </exclusion> - <exclusion> - <groupId>io.netty</groupId> - <artifactId>netty</artifactId> - </exclusion> - <exclusion> - <groupId>stax</groupId> - <artifactId>stax-api</artifactId> - </exclusion> - <exclusion> - <groupId>xerces</groupId> - <artifactId>xercesImpl</artifactId> - </exclusion> - </exclusions> - <version>${hadoop-two.version}</version> - </dependency> - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-mapreduce-client-core</artifactId> - <scope>provided</scope> - <exclusions> - <exclusion> - <groupId>com.google.guava</groupId> - <artifactId>guava</artifactId> - </exclusion> - </exclusions> - </dependency> - <dependency> + <!--h2--> + <name>!hadoop.profile</name> + </property> + </activation> + <dependencies> + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-common</artifactId> + <scope>provided</scope> + <exclusions> + <exclusion> + <groupId>net.java.dev.jets3t</groupId> + <artifactId>jets3t</artifactId> + </exclusion> + <exclusion> + <groupId>javax.servlet.jsp</groupId> + <artifactId>jsp-api</artifactId> + </exclusion> + <exclusion> + <groupId>org.mortbay.jetty</groupId> + <artifactId>jetty</artifactId> + </exclusion> + <exclusion> + <groupId>com.sun.jersey</groupId> + <artifactId>jersey-server</artifactId> + </exclusion> + <exclusion> + <groupId>com.sun.jersey</groupId> + <artifactId>jersey-core</artifactId> + </exclusion> + <exclusion> + <groupId>com.sun.jersey</groupId> + <artifactId>jersey-json</artifactId> + </exclusion> + <exclusion> + <groupId>javax.servlet</groupId> + <artifactId>servlet-api</artifactId> + </exclusion> + <exclusion> + <groupId>tomcat</groupId> + <artifactId>jasper-compiler</artifactId> + </exclusion> + <exclusion> + <groupId>tomcat</groupId> + <artifactId>jasper-runtime</artifactId> + </exclusion> + <exclusion> + <groupId>com.google.code.findbugs</groupId> + <artifactId>jsr305</artifactId> + </exclusion> + </exclusions> + </dependency> + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-hdfs</artifactId> + <scope>provided</scope> + <exclusions> + <exclusion> + <groupId>javax.servlet.jsp</groupId> + <artifactId>jsp-api</artifactId> + </exclusion> + <exclusion> + <groupId>javax.servlet</groupId> + <artifactId>servlet-api</artifactId> + </exclusion> + <exclusion> + <groupId>io.netty</groupId> + <artifactId>netty</artifactId> + </exclusion> + <exclusion> + <groupId>stax</groupId> + <artifactId>stax-api</artifactId> + </exclusion> + <exclusion> + <groupId>xerces</groupId> + <artifactId>xercesImpl</artifactId> + </exclusion> + </exclusions> + <version>${hadoop-two.version}</version> + </dependency> + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-mapreduce-client-core</artifactId> + <scope>provided</scope> + <exclusions> + <exclusion> + <groupId>com.google.guava</groupId> + <artifactId>guava</artifactId> + </exclusion> + </exclusions> + </dependency> + <dependency> + <groupId>org.codehaus.jackson</groupId> + <artifactId>jackson-jaxrs</artifactId> + <version>1.9.13</version> + <scope>provided</scope> + <exclusions> + <exclusion> + <groupId>org.codehaus.jackson</groupId> + <artifactId>jackson-mapper-asl</artifactId> + </exclusion> + <exclusion> <groupId>org.codehaus.jackson</groupId> - <artifactId>jackson-jaxrs</artifactId> - <version>1.9.13</version> - <scope>provided</scope> - <exclusions> - <exclusion> - <groupId>org.codehaus.jackson</groupId> - <artifactId>jackson-mapper-asl</artifactId> - </exclusion> - <exclusion> - <groupId>org.codehaus.jackson</groupId> - <artifactId>jackson-core-asl</artifactId> - </exclusion> - </exclusions> - </dependency> - <dependency> + <artifactId>jackson-core-asl</artifactId> + </exclusion> + </exclusions> + </dependency> + <dependency> + <groupId>org.codehaus.jackson</groupId> + <artifactId>jackson-xc</artifactId> + <version>1.9.13</version> + <scope>provided</scope> + <exclusions> + <exclusion> <groupId>org.codehaus.jackson</groupId> - <artifactId>jackson-xc</artifactId> - <version>1.9.13</version> - <scope>provided</scope> - <exclusions> - <exclusion> - <groupId>org.codehaus.jackson</groupId> - <artifactId>jackson-mapper-asl</artifactId> - </exclusion> - <exclusion> - <groupId>org.codehaus.jackson</groupId> - <artifactId>jackson-core-asl</artifactId> - </exclusion> - </exclusions> - </dependency> - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-auth</artifactId> - <scope>provided</scope> - </dependency> - </dependencies> - </profile> + <artifactId>jackson-mapper-asl</artifactId> + </exclusion> + <exclusion> + <groupId>org.codehaus.jackson</groupId> + <artifactId>jackson-core-asl</artifactId> + </exclusion> + </exclusions> + </dependency> + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-auth</artifactId> + <scope>provided</scope> + </dependency> + </dependencies> + </profile> <!-- profile for building against Hadoop 3.0.x. Activate using: mvn -Dhadoop.profile=3.0 --> - <profile> - <id>hadoop-3.0</id> - <activation> - <property> - <name>hadoop.profile</name> - <value>3.0</value> - </property> - </activation> - <properties> - <hadoop.version>${hadoop-three.version}</hadoop.version> - </properties> - <dependencies> - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-common</artifactId> - <scope>provided</scope> - </dependency> - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-hdfs</artifactId> - <scope>provided</scope> - </dependency> - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-auth</artifactId> - <scope>provided</scope> - </dependency> - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-mapreduce-client-core</artifactId> - <scope>provided</scope> - <exclusions> - <exclusion> - <groupId>com.google.guava</groupId> - <artifactId>guava</artifactId> - </exclusion> - <exclusion> - <groupId>javax.xml.bind</groupId> - <artifactId>jaxb-api</artifactId> - </exclusion> - <exclusion> - <groupId>javax.ws.rs</groupId> - <artifactId>jsr311-api</artifactId> - </exclusion> - </exclusions> - </dependency> - <dependency> + <profile> + <id>hadoop-3.0</id> + <activation> + <property> + <name>hadoop.profile</name> + <value>3.0</value> + </property> + </activation> + <properties> + <hadoop.version>${hadoop-three.version}</hadoop.version> + </properties> + <dependencies> + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-common</artifactId> + <scope>provided</scope> + </dependency> + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-hdfs</artifactId> + <scope>provided</scope> + </dependency> + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-auth</artifactId> + <scope>provided</scope> + </dependency> + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-mapreduce-client-core</artifactId> + <scope>provided</scope> + <exclusions> + <exclusion> + <groupId>com.google.guava</groupId> + <artifactId>guava</artifactId> + </exclusion> + <exclusion> + <groupId>javax.xml.bind</groupId> + <artifactId>jaxb-api</artifactId> + </exclusion> + <exclusion> + <groupId>javax.ws.rs</groupId> + <artifactId>jsr311-api</artifactId> + </exclusion> + </exclusions> + </dependency> + <dependency> + <groupId>org.codehaus.jackson</groupId> + <artifactId>jackson-jaxrs</artifactId> + <version>1.9.13</version> + <scope>provided</scope> + <exclusions> + <exclusion> <groupId>org.codehaus.jackson</groupId> - <artifactId>jackson-jaxrs</artifactId> - <version>1.9.13</version> - <scope>provided</scope> - <exclusions> - <exclusion> - <groupId>org.codehaus.jackson</groupId> - <artifactId>jackson-mapper-asl</artifactId> - </exclusion> - <exclusion> - <groupId>org.codehaus.jackson</groupId> - <artifactId>jackson-core-asl</artifactId> - </exclusion> - </exclusions> - </dependency> - <dependency> + <artifactId>jackson-mapper-asl</artifactId> + </exclusion> + <exclusion> <groupId>org.codehaus.jackson</groupId> - <artifactId>jackson-xc</artifactId> - <version>1.9.13</version> - <scope>provided</scope> - <exclusions> - <exclusion> - <groupId>org.codehaus.jackson</groupId> - <artifactId>jackson-mapper-asl</artifactId> - </exclusion> - <exclusion> - <groupId>org.codehaus.jackson</groupId> - <artifactId>jackson-core-asl</artifactId> - </exclusion> - </exclusions> - </dependency> - </dependencies> - </profile> - </profiles> + <artifactId>jackson-core-asl</artifactId> + </exclusion> + </exclusions> + </dependency> + <dependency> + <groupId>org.codehaus.jackson</groupId> + <artifactId>jackson-xc</artifactId> + <version>1.9.13</version> + <scope>provided</scope> + <exclusions> + <exclusion> + <groupId>org.codehaus.jackson</groupId> + <artifactId>jackson-mapper-asl</artifactId> + </exclusion> + <exclusion> + <groupId>org.codehaus.jackson</groupId> + <artifactId>jackson-core-asl</artifactId> + </exclusion> + </exclusions> + </dependency> + </dependencies> + </profile> + </profiles> </project> diff --git a/hbase-shaded/hbase-shaded-testing-util-tester/pom.xml b/hbase-shaded/hbase-shaded-testing-util-tester/pom.xml index 74d9be39804d..3d8aaa13e5ab 100644 --- a/hbase-shaded/hbase-shaded-testing-util-tester/pom.xml +++ b/hbase-shaded/hbase-shaded-testing-util-tester/pom.xml @@ -1,25 +1,25 @@ <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> - <!-- - /** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - --> +<!-- + /** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +--> <modelVersion>4.0.0</modelVersion> <parent> @@ -56,8 +56,23 @@ <scope>test</scope> </dependency> <dependency> - <groupId>org.slf4j</groupId> - <artifactId>slf4j-reload4j</artifactId> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-api</artifactId> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-core</artifactId> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-slf4j-impl</artifactId> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-1.2-api</artifactId> <scope>test</scope> </dependency> <dependency> diff --git a/hbase-shaded/hbase-shaded-testing-util/pom.xml b/hbase-shaded/hbase-shaded-testing-util/pom.xml index 03d001741143..ebc5eff50e43 100644 --- a/hbase-shaded/hbase-shaded-testing-util/pom.xml +++ b/hbase-shaded/hbase-shaded-testing-util/pom.xml @@ -1,183 +1,179 @@ <project xmlns="http://maven.apache.org/POM/4.0.0" - xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" - xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> - <!-- - /** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - --> - <modelVersion>4.0.0</modelVersion> + xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" + xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> +<!-- + /** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +--> + <modelVersion>4.0.0</modelVersion> + <parent> + <artifactId>hbase-shaded</artifactId> + <groupId>org.apache.hbase</groupId> + <version>2.6.0-SNAPSHOT</version> + <relativePath>..</relativePath> + </parent> - <parent> - <artifactId>hbase-shaded</artifactId> - <groupId>org.apache.hbase</groupId> - <version>2.6.0-SNAPSHOT</version> - <relativePath>..</relativePath> - </parent> + <artifactId>hbase-shaded-testing-util</artifactId> + <name>Apache HBase - Shaded - Testing Util</name> - <artifactId>hbase-shaded-testing-util</artifactId> - <name>Apache HBase - Shaded - Testing Util</name> - - <dependencies> + <dependencies> <!-- test-jar dependencies --> - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-common</artifactId> - <type>test-jar</type> - <scope>compile</scope> - </dependency> - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-hdfs</artifactId> - <type>test-jar</type> - <scope>compile</scope> - </dependency> - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-mapreduce-client-app</artifactId> - <type>test-jar</type> - <scope>compile</scope> - </dependency> - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-mapreduce-client-jobclient</artifactId> - <type>test-jar</type> - <scope>compile</scope> - </dependency> - <dependency> - <groupId>org.apache.hbase</groupId> - <artifactId>hbase-common</artifactId> - <type>test-jar</type> - <scope>compile</scope> - </dependency> - <dependency> - <groupId>org.apache.hbase</groupId> - <artifactId>hbase-server</artifactId> - <type>test-jar</type> - <scope>compile</scope> - <exclusions> - <exclusion> - <groupId>javax.xml.bind</groupId> - <artifactId>jaxb-api</artifactId> - </exclusion> - </exclusions> - </dependency> - <dependency> - <groupId>org.apache.hbase</groupId> - <artifactId>hbase-asyncfs</artifactId> - <type>test-jar</type> - <scope>compile</scope> - </dependency> - <dependency> - <groupId>org.apache.hbase</groupId> - <artifactId>hbase-zookeeper</artifactId> - <type>test-jar</type> - <scope>compile</scope> - </dependency> - <dependency> - <groupId>org.apache.hbase</groupId> - <artifactId>hbase-hadoop-compat</artifactId> - <type>test-jar</type> - <scope>compile</scope> - </dependency> - <dependency> - <groupId>org.apache.hbase</groupId> - <artifactId>hbase-hadoop2-compat</artifactId> - <type>test-jar</type> - <scope>compile</scope> - </dependency> - <dependency> - <groupId>org.codehaus.jackson</groupId> - <artifactId>jackson-jaxrs</artifactId> - <version>1.9.13</version> - <scope>compile</scope> - </dependency> - <dependency> - <groupId>org.apache.hbase</groupId> - <artifactId>hbase-testing-util</artifactId> - <version>${project.version}</version> - <scope>compile</scope> - <exclusions> - <exclusion> - <groupId>javax.xml.bind</groupId> - <artifactId>jaxb-api</artifactId> - </exclusion> - </exclusions> - </dependency> - </dependencies> + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-common</artifactId> + <type>test-jar</type> + <scope>compile</scope> + </dependency> + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-hdfs</artifactId> + <type>test-jar</type> + <scope>compile</scope> + </dependency> + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-mapreduce-client-app</artifactId> + <type>test-jar</type> + <scope>compile</scope> + </dependency> + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-mapreduce-client-jobclient</artifactId> + <type>test-jar</type> + <scope>compile</scope> + </dependency> + <dependency> + <groupId>org.apache.hbase</groupId> + <artifactId>hbase-common</artifactId> + <type>test-jar</type> + <scope>compile</scope> + </dependency> + <dependency> + <groupId>org.apache.hbase</groupId> + <artifactId>hbase-server</artifactId> + <type>test-jar</type> + <scope>compile</scope> + <exclusions> + <exclusion> + <groupId>javax.xml.bind</groupId> + <artifactId>jaxb-api</artifactId> + </exclusion> + </exclusions> + </dependency> + <dependency> + <groupId>org.apache.hbase</groupId> + <artifactId>hbase-asyncfs</artifactId> + <type>test-jar</type> + <scope>compile</scope> + </dependency> + <dependency> + <groupId>org.apache.hbase</groupId> + <artifactId>hbase-zookeeper</artifactId> + <type>test-jar</type> + <scope>compile</scope> + </dependency> + <dependency> + <groupId>org.apache.hbase</groupId> + <artifactId>hbase-hadoop-compat</artifactId> + <type>test-jar</type> + <scope>compile</scope> + </dependency> + <dependency> + <groupId>org.apache.hbase</groupId> + <artifactId>hbase-hadoop2-compat</artifactId> + <type>test-jar</type> + <scope>compile</scope> + </dependency> + <dependency> + <groupId>org.codehaus.jackson</groupId> + <artifactId>jackson-jaxrs</artifactId> + <version>1.9.13</version> + <scope>compile</scope> + </dependency> + <dependency> + <groupId>org.apache.hbase</groupId> + <artifactId>hbase-testing-util</artifactId> + <scope>compile</scope> + <exclusions> + <exclusion> + <groupId>javax.xml.bind</groupId> + <artifactId>jaxb-api</artifactId> + </exclusion> + </exclusions> + </dependency> + </dependencies> - <build> - <plugins> - <plugin> - <groupId>org.apache.maven.plugins</groupId> - <artifactId>maven-site-plugin</artifactId> - <configuration> - <skip>true</skip> - </configuration> - </plugin> - <plugin> + <build> + <plugins> + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-site-plugin</artifactId> + <configuration> + <skip>true</skip> + </configuration> + </plugin> + <plugin> <!--Make it so assembly:single does nothing in here--> - <artifactId>maven-assembly-plugin</artifactId> - <configuration> - <skipAssembly>true</skipAssembly> - </configuration> - </plugin> - <plugin> - <groupId>org.apache.maven.plugins</groupId> - <artifactId>maven-shade-plugin</artifactId> - <executions> - <execution> - <id>aggregate-into-a-jar-with-relocated-third-parties</id> - <configuration> - <artifactSet> - <excludes> + <artifactId>maven-assembly-plugin</artifactId> + <configuration> + <skipAssembly>true</skipAssembly> + </configuration> + </plugin> + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-shade-plugin</artifactId> + <executions> + <execution> + <id>aggregate-into-a-jar-with-relocated-third-parties</id> + <configuration> + <artifactSet> + <excludes> <!-- exclude J2EE modules that come in for JDK11+ (since hadoop-3.2.0) or modules that come in for JDK8+ but need not be included --> - <exclude>javax.annotation:javax.annotation-api</exclude> - <exclude>javax.activation:javax.activation-api</exclude> - <exclude>jakarta.ws.rs:jakarta.ws.rs-api</exclude> - <exclude>jakarta.annotation:jakarta.annotation-api</exclude> - <exclude>jakarta.validation:jakarta.validation-api</exclude> - <exclude>org.glassfish.hk2.external:jakarta.inject</exclude> + <exclude>javax.annotation:javax.annotation-api</exclude> + <exclude>javax.activation:javax.activation-api</exclude> + <exclude>jakarta.ws.rs:jakarta.ws.rs-api</exclude> + <exclude>jakarta.annotation:jakarta.annotation-api</exclude> + <exclude>jakarta.validation:jakarta.validation-api</exclude> + <exclude>org.glassfish.hk2.external:jakarta.inject</exclude> <!-- Tell the shade plugin that in this case we want to include hadoop by leaving out the exclude. --> <!-- The rest of these should be kept in sync with the parent pom --> - <exclude>org.apache.hbase:hbase-resource-bundle</exclude> - <exclude>org.slf4j:*</exclude> - <exclude>com.google.code.findbugs:*</exclude> - <exclude>com.github.stephenc.findbugs:*</exclude> - <exclude>com.github.spotbugs:*</exclude> - <exclude>org.apache.htrace:*</exclude> - <exclude>org.apache.yetus:*</exclude> - <exclude>log4j:*</exclude> - <exclude>ch.qos.reload4j:*</exclude> - <exclude>commons-logging:*</exclude> - <exclude>org.javassist:*</exclude> - <exclude>io.opentelemetry:*</exclude> - </excludes> - </artifactSet> - </configuration> - </execution> - </executions> - </plugin> - </plugins> - </build> - + <exclude>org.apache.hbase:hbase-resource-bundle</exclude> + <exclude>org.slf4j:*</exclude> + <exclude>com.google.code.findbugs:*</exclude> + <exclude>com.github.stephenc.findbugs:*</exclude> + <exclude>com.github.spotbugs:*</exclude> + <exclude>org.apache.htrace:*</exclude> + <exclude>org.apache.yetus:*</exclude> + <exclude>org.apache.logging.log4j:*</exclude> + <exclude>commons-logging:*</exclude> + <exclude>org.javassist:*</exclude> + <exclude>io.opentelemetry:*</exclude> + </excludes> + </artifactSet> + </configuration> + </execution> + </executions> + </plugin> + </plugins> + </build> </project> diff --git a/hbase-shaded/hbase-shaded-with-hadoop-check-invariants/pom.xml b/hbase-shaded/hbase-shaded-with-hadoop-check-invariants/pom.xml index 48c4a3d131b8..01a5fceb3e53 100644 --- a/hbase-shaded/hbase-shaded-with-hadoop-check-invariants/pom.xml +++ b/hbase-shaded/hbase-shaded-with-hadoop-check-invariants/pom.xml @@ -40,7 +40,6 @@ <dependency> <groupId>org.apache.hbase</groupId> <artifactId>hbase-shaded-client</artifactId> - <version>${project.version}</version> </dependency> <!-- parent pom defines these for children. :( :( :( --> <dependency> @@ -49,8 +48,18 @@ <scope>provided</scope> </dependency> <dependency> - <groupId>ch.qos.reload4j</groupId> - <artifactId>reload4j</artifactId> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-api</artifactId> + <scope>provided</scope> + </dependency> + <dependency> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-core</artifactId> + <scope>provided</scope> + </dependency> + <dependency> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-slf4j-impl</artifactId> <scope>provided</scope> </dependency> <!-- Test dependencies --> diff --git a/hbase-shaded/pom.xml b/hbase-shaded/pom.xml index 1da6ad46158c..ca60766b900d 100644 --- a/hbase-shaded/pom.xml +++ b/hbase-shaded/pom.xml @@ -1,5 +1,7 @@ <?xml version="1.0"?> -<project xmlns="https://maven.apache.org/POM/4.0.0" xmlns:xsi="https://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="https://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd"> +<project xmlns="https://maven.apache.org/POM/4.0.0" + xmlns:xsi="https://www.w3.org/2001/XMLSchema-instance" + xsi:schemaLocation="https://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd"> <!-- /** * Licensed to the Apache Software Foundation (ASF) under one @@ -19,546 +21,553 @@ * limitations under the License. */ --> - <modelVersion>4.0.0</modelVersion> - <parent> - <artifactId>hbase-build-configuration</artifactId> - <groupId>org.apache.hbase</groupId> - <version>2.6.0-SNAPSHOT</version> - <relativePath>../hbase-build-configuration</relativePath> - </parent> - <artifactId>hbase-shaded</artifactId> - <name>Apache HBase - Shaded</name> - <description>Module of HBase with most deps shaded.</description> - <packaging>pom</packaging> - <properties> + <modelVersion>4.0.0</modelVersion> + <parent> + <artifactId>hbase-build-configuration</artifactId> + <groupId>org.apache.hbase</groupId> + <version>2.6.0-SNAPSHOT</version> + <relativePath>../hbase-build-configuration</relativePath> + </parent> + <artifactId>hbase-shaded</artifactId> + <name>Apache HBase - Shaded</name> + <description>Module of HBase with most deps shaded.</description> + <packaging>pom</packaging> + <properties> <!-- Don't make a test-jar --> - <maven.test.skip>true</maven.test.skip> + <maven.test.skip>true</maven.test.skip> <!-- Don't make a source-jar --> - <source.skip>true</source.skip> - <license.bundles.dependencies>true</license.bundles.dependencies> - <shaded.prefix>org.apache.hadoop.hbase.shaded</shaded.prefix> - </properties> - <modules> - <module>hbase-shaded-client-byo-hadoop</module> - <module>hbase-shaded-client</module> - <module>hbase-shaded-mapreduce</module> - <module>hbase-shaded-testing-util</module> - <module>hbase-shaded-testing-util-tester</module> - <module>hbase-shaded-check-invariants</module> - <module>hbase-shaded-with-hadoop-check-invariants</module> - </modules> - <dependencies> - <dependency> - <groupId>org.apache.hbase</groupId> - <artifactId>hbase-resource-bundle</artifactId> - <optional>true</optional> - </dependency> - <!-- put the log implementations to optional --> - <dependency> - <groupId>ch.qos.reload4j</groupId> - <artifactId>reload4j</artifactId> - <optional>true</optional> - </dependency> - <dependency> - <groupId>org.slf4j</groupId> - <artifactId>slf4j-reload4j</artifactId> - <optional>true</optional> - </dependency> - </dependencies> - <build> - <plugins> - <plugin> - <!--Make it so assembly:single does nothing in here--> - <artifactId>maven-assembly-plugin</artifactId> - <configuration> - <skipAssembly>true</skipAssembly> - </configuration> - </plugin> - <!-- licensing info from our dependencies --> - <plugin> - <groupId>org.apache.maven.plugins</groupId> - <artifactId>maven-remote-resources-plugin</artifactId> - <executions> - <execution> - <id>aggregate-licenses</id> - <goals> - <goal>process</goal> - </goals> - <configuration> - <properties> - <copyright-end-year>${build.year}</copyright-end-year> - <debug-print-included-work-info>${license.debug.print.included}</debug-print-included-work-info> - <bundled-dependencies>${license.bundles.dependencies}</bundled-dependencies> - <bundled-jquery>${license.bundles.jquery}</bundled-jquery> - <bundled-logo>${license.bundles.logo}</bundled-logo> - <bundled-bootstrap>${license.bundles.bootstrap}</bundled-bootstrap> - </properties> - <resourceBundles> - <resourceBundle>${project.groupId}:hbase-resource-bundle:${project.version}</resourceBundle> - </resourceBundles> - <supplementalModelArtifacts> - <supplementalModelArtifact>${project.groupId}:hbase-resource-bundle:${project.version}</supplementalModelArtifact> - </supplementalModelArtifacts> - <supplementalModels> - <supplementalModel>supplemental-models.xml</supplementalModel> - </supplementalModels> - </configuration> - </execution> - </executions> - </plugin> - </plugins> - <pluginManagement> - <plugins> - <plugin> - <!--Make it so assembly:single does nothing in here--> - <artifactId>maven-assembly-plugin</artifactId> - <configuration> - <skipAssembly>true</skipAssembly> - </configuration> - </plugin> - <plugin> - <groupId>org.apache.maven.plugins</groupId> - <artifactId>maven-shade-plugin</artifactId> - <version>3.2.4</version> - <executions> - <execution> - <id>aggregate-into-a-jar-with-relocated-third-parties</id> - <phase>package</phase> - <goals> - <goal>shade</goal> - </goals> - <configuration> - <createSourcesJar>false</createSourcesJar> - <shadedArtifactAttached>false</shadedArtifactAttached> - <promoteTransitiveDependencies>true</promoteTransitiveDependencies> - <shadeTestJar>false</shadeTestJar> - <artifactSet> - <excludes> - <!-- exclude J2EE modules that come in for JDK11+ (since - hadoop-3.2.0) or modules that come in for JDK8+ but - need not be included --> - <exclude>javax.annotation:javax.annotation-api</exclude> - <exclude>javax.activation:javax.activation-api</exclude> - <exclude>jakarta.activation:jakarta.activation-api</exclude> <!-- Hadoop 3.3.1 --> - <exclude>jakarta.ws.rs:jakarta.ws.rs-api</exclude> - <exclude>jakarta.annotation:jakarta.annotation-api</exclude> - <exclude>jakarta.validation:jakarta.validation-api</exclude> - <exclude>org.glassfish.hk2.external:jakarta.inject</exclude> - <!-- default to excluding Hadoop, have module that want - to include it redefine the exclude list --> - <exclude>org.apache.hadoop:*</exclude> - <!-- the rest of this needs to be kept in sync with any - hadoop-including module --> - <exclude>org.apache.hbase:hbase-resource-bundle</exclude> - <exclude>org.slf4j:*</exclude> - <exclude>com.google.code.findbugs:*</exclude> - <exclude>com.github.stephenc.findbugs:*</exclude> - <exclude>com.github.spotbugs:*</exclude> - <exclude>org.apache.htrace:*</exclude> - <exclude>org.apache.yetus:*</exclude> - <exclude>log4j:*</exclude> - <exclude>ch.qos.reload4j:*</exclude> - <exclude>commons-logging:*</exclude> - <exclude>org.javassist:*</exclude> - <exclude>io.opentelemetry:*</exclude> - </excludes> - </artifactSet> - <relocations> - <!-- top level com not including sun--> - <relocation> - <pattern>com.cedarsoftware</pattern> - <shadedPattern>${shaded.prefix}.com.cedarsoftware</shadedPattern> - </relocation> - <relocation> - <pattern>com.codahale</pattern> - <shadedPattern>${shaded.prefix}.com.codahale</shadedPattern> - </relocation> - <relocation> - <pattern>com.ctc</pattern> - <shadedPattern>${shaded.prefix}.com.ctc</shadedPattern> - </relocation> - <relocation> - <pattern>com.dropwizard</pattern> - <shadedPattern>${shaded.prefix}.com.dropwizard</shadedPattern> - </relocation> - <relocation> - <pattern>com.fasterxml</pattern> - <shadedPattern>${shaded.prefix}.com.fasterxml</shadedPattern> - </relocation> - <relocation> - <pattern>com.github.benmanes.caffeine</pattern> - <shadedPattern>${shaded.prefix}.com.github.benmanes.caffeine</shadedPattern> - </relocation> - <relocation> - <pattern>com.google</pattern> - <shadedPattern>${shaded.prefix}.com.google</shadedPattern> - </relocation> - <relocation> - <pattern>com.jamesmurty</pattern> - <shadedPattern>${shaded.prefix}.com.jamesmurty</shadedPattern> - </relocation> - <relocation> - <pattern>com.jcraft</pattern> - <shadedPattern>${shaded.prefix}.com.jcraft</shadedPattern> - </relocation> - <relocation> - <pattern>com.lmax</pattern> - <shadedPattern>${shaded.prefix}.com.lmax</shadedPattern> - </relocation> - <relocation> - <pattern>com.microsoft</pattern> - <shadedPattern>${shaded.prefix}.com.microsoft</shadedPattern> - </relocation> - <relocation> - <pattern>com.nimbusds</pattern> - <shadedPattern>${shaded.prefix}.com.nimbusds</shadedPattern> - </relocation> - <relocation> - <pattern>com.squareup</pattern> - <shadedPattern>${shaded.prefix}.com.squareup</shadedPattern> - </relocation> - <relocation> - <pattern>com.thoughtworks</pattern> - <shadedPattern>${shaded.prefix}.com.thoughtworks</shadedPattern> - </relocation> - <relocation> - <pattern>com.zaxxer</pattern> - <shadedPattern>${shaded.prefix}.com.zaxxer</shadedPattern> - </relocation> + <source.skip>true</source.skip> + <license.bundles.dependencies>true</license.bundles.dependencies> + <shaded.prefix>org.apache.hadoop.hbase.shaded</shaded.prefix> + </properties> + <modules> + <module>hbase-shaded-client-byo-hadoop</module> + <module>hbase-shaded-client</module> + <module>hbase-shaded-mapreduce</module> + <module>hbase-shaded-testing-util</module> + <module>hbase-shaded-testing-util-tester</module> + <module>hbase-shaded-check-invariants</module> + <module>hbase-shaded-with-hadoop-check-invariants</module> + </modules> + <dependencies> + <dependency> + <groupId>org.apache.hbase</groupId> + <artifactId>hbase-resource-bundle</artifactId> + <optional>true</optional> + </dependency> + <!-- put the log implementations to optional --> + <dependency> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-api</artifactId> + <optional>true</optional> + </dependency> + <dependency> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-core</artifactId> + <optional>true</optional> + </dependency> + <dependency> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-slf4j-impl</artifactId> + <optional>true</optional> + </dependency> + </dependencies> + <build> + <plugins> + <plugin> + <!--Make it so assembly:single does nothing in here--> + <artifactId>maven-assembly-plugin</artifactId> + <configuration> + <skipAssembly>true</skipAssembly> + </configuration> + </plugin> + <!-- licensing info from our dependencies --> + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-remote-resources-plugin</artifactId> + <executions> + <execution> + <id>aggregate-licenses</id> + <goals> + <goal>process</goal> + </goals> + <configuration> + <properties> + <copyright-end-year>${build.year}</copyright-end-year> + <debug-print-included-work-info>${license.debug.print.included}</debug-print-included-work-info> + <bundled-dependencies>${license.bundles.dependencies}</bundled-dependencies> + <bundled-jquery>${license.bundles.jquery}</bundled-jquery> + <bundled-logo>${license.bundles.logo}</bundled-logo> + <bundled-bootstrap>${license.bundles.bootstrap}</bundled-bootstrap> + </properties> + <resourceBundles> + <resourceBundle>${project.groupId}:hbase-resource-bundle:${project.version}</resourceBundle> + </resourceBundles> + <supplementalModelArtifacts> + <supplementalModelArtifact>${project.groupId}:hbase-resource-bundle:${project.version}</supplementalModelArtifact> + </supplementalModelArtifacts> + <supplementalModels> + <supplementalModel>supplemental-models.xml</supplementalModel> + </supplementalModels> + </configuration> + </execution> + </executions> + </plugin> + </plugins> + <pluginManagement> + <plugins> + <plugin> + <!--Make it so assembly:single does nothing in here--> + <artifactId>maven-assembly-plugin</artifactId> + <configuration> + <skipAssembly>true</skipAssembly> + </configuration> + </plugin> + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-shade-plugin</artifactId> + <version>3.2.4</version> + <executions> + <execution> + <id>aggregate-into-a-jar-with-relocated-third-parties</id> + <phase>package</phase> + <goals> + <goal>shade</goal> + </goals> + <configuration> + <createSourcesJar>false</createSourcesJar> + <shadedArtifactAttached>false</shadedArtifactAttached> + <promoteTransitiveDependencies>true</promoteTransitiveDependencies> + <shadeTestJar>false</shadeTestJar> + <artifactSet> + <excludes> + <!-- exclude J2EE modules that come in for JDK11+ (since + hadoop-3.2.0) or modules that come in for JDK8+ but + need not be included --> + <exclude>javax.annotation:javax.annotation-api</exclude> + <exclude>javax.activation:javax.activation-api</exclude> + <exclude>jakarta.activation:jakarta.activation-api</exclude> <!-- Hadoop 3.3.1 --> + <exclude>jakarta.ws.rs:jakarta.ws.rs-api</exclude> + <exclude>jakarta.annotation:jakarta.annotation-api</exclude> + <exclude>jakarta.validation:jakarta.validation-api</exclude> + <exclude>org.glassfish.hk2.external:jakarta.inject</exclude> + <!-- default to excluding Hadoop, have module that want + to include it redefine the exclude list --> + <exclude>org.apache.hadoop:*</exclude> + <!-- the rest of this needs to be kept in sync with any + hadoop-including module --> + <exclude>org.apache.hbase:hbase-resource-bundle</exclude> + <exclude>org.slf4j:*</exclude> + <exclude>com.google.code.findbugs:*</exclude> + <exclude>com.github.stephenc.findbugs:*</exclude> + <exclude>com.github.spotbugs:*</exclude> + <exclude>org.apache.htrace:*</exclude> + <exclude>org.apache.yetus:*</exclude> + <exclude>org.apache.logging.log4j:*</exclude> + <exclude>commons-logging:*</exclude> + <exclude>org.javassist:*</exclude> + <exclude>io.opentelemetry:*</exclude> + </excludes> + </artifactSet> + <relocations> + <!-- top level com not including sun--> + <relocation> + <pattern>com.cedarsoftware</pattern> + <shadedPattern>${shaded.prefix}.com.cedarsoftware</shadedPattern> + </relocation> + <relocation> + <pattern>com.codahale</pattern> + <shadedPattern>${shaded.prefix}.com.codahale</shadedPattern> + </relocation> + <relocation> + <pattern>com.ctc</pattern> + <shadedPattern>${shaded.prefix}.com.ctc</shadedPattern> + </relocation> + <relocation> + <pattern>com.dropwizard</pattern> + <shadedPattern>${shaded.prefix}.com.dropwizard</shadedPattern> + </relocation> + <relocation> + <pattern>com.fasterxml</pattern> + <shadedPattern>${shaded.prefix}.com.fasterxml</shadedPattern> + </relocation> + <relocation> + <pattern>com.github.benmanes.caffeine</pattern> + <shadedPattern>${shaded.prefix}.com.github.benmanes.caffeine</shadedPattern> + </relocation> + <relocation> + <pattern>com.google</pattern> + <shadedPattern>${shaded.prefix}.com.google</shadedPattern> + </relocation> + <relocation> + <pattern>com.jamesmurty</pattern> + <shadedPattern>${shaded.prefix}.com.jamesmurty</shadedPattern> + </relocation> + <relocation> + <pattern>com.jcraft</pattern> + <shadedPattern>${shaded.prefix}.com.jcraft</shadedPattern> + </relocation> + <relocation> + <pattern>com.lmax</pattern> + <shadedPattern>${shaded.prefix}.com.lmax</shadedPattern> + </relocation> + <relocation> + <pattern>com.microsoft</pattern> + <shadedPattern>${shaded.prefix}.com.microsoft</shadedPattern> + </relocation> + <relocation> + <pattern>com.nimbusds</pattern> + <shadedPattern>${shaded.prefix}.com.nimbusds</shadedPattern> + </relocation> + <relocation> + <pattern>com.squareup</pattern> + <shadedPattern>${shaded.prefix}.com.squareup</shadedPattern> + </relocation> + <relocation> + <pattern>com.thoughtworks</pattern> + <shadedPattern>${shaded.prefix}.com.thoughtworks</shadedPattern> + </relocation> + <relocation> + <pattern>com.zaxxer</pattern> + <shadedPattern>${shaded.prefix}.com.zaxxer</shadedPattern> + </relocation> - <!-- dnsjava --> - <relocation> - <pattern>org.xbill</pattern> - <shadedPattern>${shaded.prefix}.org.xbill</shadedPattern> - </relocation> + <!-- dnsjava --> + <relocation> + <pattern>org.xbill</pattern> + <shadedPattern>${shaded.prefix}.org.xbill</shadedPattern> + </relocation> - <!-- netty family --> - <relocation> - <pattern>org.jboss.netty</pattern> - <shadedPattern>${shaded.prefix}.org.jboss.netty</shadedPattern> - </relocation> - <relocation> - <pattern>io.netty</pattern> - <shadedPattern>${shaded.prefix}.io.netty</shadedPattern> - </relocation> + <!-- netty family --> + <relocation> + <pattern>org.jboss.netty</pattern> + <shadedPattern>${shaded.prefix}.org.jboss.netty</shadedPattern> + </relocation> + <relocation> + <pattern>io.netty</pattern> + <shadedPattern>${shaded.prefix}.io.netty</shadedPattern> + </relocation> - <!-- top level okio --> - <relocation> - <pattern>okio</pattern> - <shadedPattern>${shaded.prefix}.okio</shadedPattern> - </relocation> + <!-- top level okio --> + <relocation> + <pattern>okio</pattern> + <shadedPattern>${shaded.prefix}.okio</shadedPattern> + </relocation> - <!-- top level org --> - <relocation> - <pattern>org.checkerframework</pattern> - <shadedPattern>${shaded.prefix}.org.checkerframework</shadedPattern> - </relocation> - <relocation> - <pattern>org.codehaus</pattern> - <shadedPattern>${shaded.prefix}.org.codehaus</shadedPattern> - </relocation> - <relocation> - <pattern>org.eclipse</pattern> - <shadedPattern>${shaded.prefix}.org.eclipse</shadedPattern> - </relocation> - <relocation> - <pattern>org.ehcache</pattern> - <shadedPattern>${shaded.prefix}.org.ehcache</shadedPattern> - </relocation> - <relocation> - <pattern>org.jcodings</pattern> - <shadedPattern>${shaded.prefix}.org.jcodings</shadedPattern> - </relocation> - <relocation> - <pattern>org.joni</pattern> - <shadedPattern>${shaded.prefix}.org.joni</shadedPattern> - </relocation> - <relocation> - <pattern>org.mortbay</pattern> - <shadedPattern>${shaded.prefix}.org.mortbay</shadedPattern> - </relocation> - <relocation> - <pattern>org.nustaq</pattern> - <shadedPattern>${shaded.prefix}.org.nustaq</shadedPattern> - </relocation> - <relocation> - <pattern>org.terracotta</pattern> - <shadedPattern>${shaded.prefix}.org.terracotta</shadedPattern> - </relocation> - <relocation> - <pattern>org.tukaani</pattern> - <shadedPattern>${shaded.prefix}.org.tukaani</shadedPattern> - </relocation> - <relocation> - <pattern>org.xerial</pattern> - <shadedPattern>${shaded.prefix}.org.xerial</shadedPattern> - </relocation> - <relocation> - <pattern>org.znerd</pattern> - <shadedPattern>${shaded.prefix}.org.znerd</shadedPattern> - </relocation> - <relocation> - <pattern>org.aopalliance</pattern> - <shadedPattern>${shaded.prefix}.org.aopalliance</shadedPattern> - </relocation> - <relocation> - <pattern>org.fusesource</pattern> - <shadedPattern>${shaded.prefix}.org.fusesource</shadedPattern> - </relocation> - <relocation> - <pattern>org.iq80</pattern> - <shadedPattern>${shaded.prefix}.org.iq80</shadedPattern> - </relocation> - <relocation> - <pattern>org.jamon</pattern> - <shadedPattern>${shaded.prefix}.org.jamon</shadedPattern> - </relocation> - <relocation> - <pattern>org.jets3t</pattern> - <shadedPattern>${shaded.prefix}.org.jets3t</shadedPattern> - </relocation> - <!-- poorly named add-on package from jets3t dependency. TODO can we just exclude these? --> - <relocation> - <pattern>contribs.mx</pattern> - <shadedPattern>${shaded.prefix}.contribs.mx</shadedPattern> - </relocation> - <relocation> - <pattern>org.objectweb</pattern> - <shadedPattern>${shaded.prefix}.org.objectweb</shadedPattern> - </relocation> + <!-- top level org --> + <relocation> + <pattern>org.checkerframework</pattern> + <shadedPattern>${shaded.prefix}.org.checkerframework</shadedPattern> + </relocation> + <relocation> + <pattern>org.codehaus</pattern> + <shadedPattern>${shaded.prefix}.org.codehaus</shadedPattern> + </relocation> + <relocation> + <pattern>org.eclipse</pattern> + <shadedPattern>${shaded.prefix}.org.eclipse</shadedPattern> + </relocation> + <relocation> + <pattern>org.ehcache</pattern> + <shadedPattern>${shaded.prefix}.org.ehcache</shadedPattern> + </relocation> + <relocation> + <pattern>org.jcodings</pattern> + <shadedPattern>${shaded.prefix}.org.jcodings</shadedPattern> + </relocation> + <relocation> + <pattern>org.joni</pattern> + <shadedPattern>${shaded.prefix}.org.joni</shadedPattern> + </relocation> + <relocation> + <pattern>org.mortbay</pattern> + <shadedPattern>${shaded.prefix}.org.mortbay</shadedPattern> + </relocation> + <relocation> + <pattern>org.nustaq</pattern> + <shadedPattern>${shaded.prefix}.org.nustaq</shadedPattern> + </relocation> + <relocation> + <pattern>org.terracotta</pattern> + <shadedPattern>${shaded.prefix}.org.terracotta</shadedPattern> + </relocation> + <relocation> + <pattern>org.tukaani</pattern> + <shadedPattern>${shaded.prefix}.org.tukaani</shadedPattern> + </relocation> + <relocation> + <pattern>org.xerial</pattern> + <shadedPattern>${shaded.prefix}.org.xerial</shadedPattern> + </relocation> + <relocation> + <pattern>org.znerd</pattern> + <shadedPattern>${shaded.prefix}.org.znerd</shadedPattern> + </relocation> + <relocation> + <pattern>org.aopalliance</pattern> + <shadedPattern>${shaded.prefix}.org.aopalliance</shadedPattern> + </relocation> + <relocation> + <pattern>org.fusesource</pattern> + <shadedPattern>${shaded.prefix}.org.fusesource</shadedPattern> + </relocation> + <relocation> + <pattern>org.iq80</pattern> + <shadedPattern>${shaded.prefix}.org.iq80</shadedPattern> + </relocation> + <relocation> + <pattern>org.jamon</pattern> + <shadedPattern>${shaded.prefix}.org.jamon</shadedPattern> + </relocation> + <relocation> + <pattern>org.jets3t</pattern> + <shadedPattern>${shaded.prefix}.org.jets3t</shadedPattern> + </relocation> + <!-- poorly named add-on package from jets3t dependency. TODO can we just exclude these? --> + <relocation> + <pattern>contribs.mx</pattern> + <shadedPattern>${shaded.prefix}.contribs.mx</shadedPattern> + </relocation> + <relocation> + <pattern>org.objectweb</pattern> + <shadedPattern>${shaded.prefix}.org.objectweb</shadedPattern> + </relocation> - <!-- org.apache relocations not in org.apache.hadoop or org.apache.commons --> - <relocation> - <pattern>org.apache.avro</pattern> - <shadedPattern>${shaded.prefix}.org.apache.avro</shadedPattern> - </relocation> - <relocation> - <pattern>org.apache.curator</pattern> - <shadedPattern>${shaded.prefix}.org.apache.curator</shadedPattern> - </relocation> - <relocation> - <pattern>org.apache.directory</pattern> - <shadedPattern>${shaded.prefix}.org.apache.directory</shadedPattern> - </relocation> - <relocation> - <pattern>org.apache.http</pattern> - <shadedPattern>${shaded.prefix}.org.apache.http</shadedPattern> - </relocation> - <relocation> - <pattern>org.apache.jasper</pattern> - <shadedPattern>${shaded.prefix}.org.apache.jasper</shadedPattern> - </relocation> - <relocation> - <pattern>org.apache.jute</pattern> - <shadedPattern>${shaded.prefix}.org.apache.jute</shadedPattern> - </relocation> - <relocation> - <pattern>org.apache.kerby</pattern> - <shadedPattern>${shaded.prefix}.org.apache.kerby</shadedPattern> - </relocation> - <relocation> - <pattern>org.apache.taglibs</pattern> - <shadedPattern>${shaded.prefix}.org.apache.taglibs</shadedPattern> - </relocation> - <relocation> - <pattern>org.apache.zookeeper</pattern> - <shadedPattern>${shaded.prefix}.org.apache.zookeeper</shadedPattern> - </relocation> + <!-- org.apache relocations not in org.apache.hadoop or org.apache.commons --> + <relocation> + <pattern>org.apache.avro</pattern> + <shadedPattern>${shaded.prefix}.org.apache.avro</shadedPattern> + </relocation> + <relocation> + <pattern>org.apache.curator</pattern> + <shadedPattern>${shaded.prefix}.org.apache.curator</shadedPattern> + </relocation> + <relocation> + <pattern>org.apache.directory</pattern> + <shadedPattern>${shaded.prefix}.org.apache.directory</shadedPattern> + </relocation> + <relocation> + <pattern>org.apache.http</pattern> + <shadedPattern>${shaded.prefix}.org.apache.http</shadedPattern> + </relocation> + <relocation> + <pattern>org.apache.jasper</pattern> + <shadedPattern>${shaded.prefix}.org.apache.jasper</shadedPattern> + </relocation> + <relocation> + <pattern>org.apache.jute</pattern> + <shadedPattern>${shaded.prefix}.org.apache.jute</shadedPattern> + </relocation> + <relocation> + <pattern>org.apache.kerby</pattern> + <shadedPattern>${shaded.prefix}.org.apache.kerby</shadedPattern> + </relocation> + <relocation> + <pattern>org.apache.taglibs</pattern> + <shadedPattern>${shaded.prefix}.org.apache.taglibs</shadedPattern> + </relocation> + <relocation> + <pattern>org.apache.zookeeper</pattern> + <shadedPattern>${shaded.prefix}.org.apache.zookeeper</shadedPattern> + </relocation> - <!-- org.apache.commons not including logging --> - <relocation> - <pattern>org.apache.commons.beanutils</pattern> - <shadedPattern>${shaded.prefix}.org.apache.commons.beanutils</shadedPattern> - </relocation> - <relocation> - <pattern>org.apache.commons.cli</pattern> - <shadedPattern>${shaded.prefix}.org.apache.commons.cli</shadedPattern> - </relocation> - <relocation> - <pattern>org.apache.commons.collections</pattern> - <shadedPattern>${shaded.prefix}.org.apache.commons.collections</shadedPattern> - </relocation> - <relocation> - <pattern>org.apache.commons.configuration</pattern> - <shadedPattern>${shaded.prefix}.org.apache.commons.configuration</shadedPattern> - </relocation> - <relocation> - <pattern>org.apache.commons.crypto</pattern> - <shadedPattern>${shaded.prefix}.org.apache.commons.crypto</shadedPattern> - </relocation> - <relocation> - <pattern>org.apache.commons.csv</pattern> - <shadedPattern>${shaded.prefix}.org.apache.commons.csv</shadedPattern> - </relocation> - <relocation> - <pattern>org.apache.commons.daemon</pattern> - <shadedPattern>${shaded.prefix}.org.apache.commons.daemon</shadedPattern> - </relocation> - <relocation> - <pattern>org.apache.commons.io</pattern> - <shadedPattern>${shaded.prefix}.org.apache.commons.io</shadedPattern> - </relocation> - <relocation> - <pattern>org.apache.commons.math</pattern> - <shadedPattern>${shaded.prefix}.org.apache.commons.math</shadedPattern> - </relocation> - <relocation> - <pattern>org.apache.commons.math3</pattern> - <shadedPattern>${shaded.prefix}.org.apache.commons.math3</shadedPattern> - </relocation> - <relocation> - <pattern>org.apache.commons.net</pattern> - <shadedPattern>${shaded.prefix}.org.apache.commons.net</shadedPattern> - </relocation> - <relocation> - <pattern>org.apache.commons.lang</pattern> - <shadedPattern>${shaded.prefix}.org.apache.commons.lang</shadedPattern> - </relocation> - <relocation> - <pattern>org.apache.commons.lang3</pattern> - <shadedPattern>${shaded.prefix}.org.apache.commons.lang3</shadedPattern> - </relocation> - <relocation> - <pattern>org.apache.commons.el</pattern> - <shadedPattern>${shaded.prefix}.org.apache.commons.el</shadedPattern> - </relocation> - <relocation> - <pattern>org.apache.commons.httpclient</pattern> - <shadedPattern>${shaded.prefix}.org.apache.commons.httpclient</shadedPattern> - </relocation> - <relocation> - <pattern>org.apache.commons.compress</pattern> - <shadedPattern>${shaded.prefix}.org.apache.commons.compress</shadedPattern> - </relocation> - <relocation> - <pattern>org.apache.commons.digester</pattern> - <shadedPattern>${shaded.prefix}.org.apache.commons.digester</shadedPattern> - </relocation> - <relocation> - <pattern>org.apache.commons.codec</pattern> - <shadedPattern>${shaded.prefix}.org.apache.commons.codec</shadedPattern> - </relocation> - <relocation> - <pattern>org.apache.commons.text</pattern> - <shadedPattern>${shaded.prefix}.org.apache.commons.text</shadedPattern> - </relocation> + <!-- org.apache.commons not including logging --> + <relocation> + <pattern>org.apache.commons.beanutils</pattern> + <shadedPattern>${shaded.prefix}.org.apache.commons.beanutils</shadedPattern> + </relocation> + <relocation> + <pattern>org.apache.commons.cli</pattern> + <shadedPattern>${shaded.prefix}.org.apache.commons.cli</shadedPattern> + </relocation> + <relocation> + <pattern>org.apache.commons.collections</pattern> + <shadedPattern>${shaded.prefix}.org.apache.commons.collections</shadedPattern> + </relocation> + <relocation> + <pattern>org.apache.commons.configuration</pattern> + <shadedPattern>${shaded.prefix}.org.apache.commons.configuration</shadedPattern> + </relocation> + <relocation> + <pattern>org.apache.commons.crypto</pattern> + <shadedPattern>${shaded.prefix}.org.apache.commons.crypto</shadedPattern> + </relocation> + <relocation> + <pattern>org.apache.commons.csv</pattern> + <shadedPattern>${shaded.prefix}.org.apache.commons.csv</shadedPattern> + </relocation> + <relocation> + <pattern>org.apache.commons.daemon</pattern> + <shadedPattern>${shaded.prefix}.org.apache.commons.daemon</shadedPattern> + </relocation> + <relocation> + <pattern>org.apache.commons.io</pattern> + <shadedPattern>${shaded.prefix}.org.apache.commons.io</shadedPattern> + </relocation> + <relocation> + <pattern>org.apache.commons.math</pattern> + <shadedPattern>${shaded.prefix}.org.apache.commons.math</shadedPattern> + </relocation> + <relocation> + <pattern>org.apache.commons.math3</pattern> + <shadedPattern>${shaded.prefix}.org.apache.commons.math3</shadedPattern> + </relocation> + <relocation> + <pattern>org.apache.commons.net</pattern> + <shadedPattern>${shaded.prefix}.org.apache.commons.net</shadedPattern> + </relocation> + <relocation> + <pattern>org.apache.commons.lang</pattern> + <shadedPattern>${shaded.prefix}.org.apache.commons.lang</shadedPattern> + </relocation> + <relocation> + <pattern>org.apache.commons.lang3</pattern> + <shadedPattern>${shaded.prefix}.org.apache.commons.lang3</shadedPattern> + </relocation> + <relocation> + <pattern>org.apache.commons.el</pattern> + <shadedPattern>${shaded.prefix}.org.apache.commons.el</shadedPattern> + </relocation> + <relocation> + <pattern>org.apache.commons.httpclient</pattern> + <shadedPattern>${shaded.prefix}.org.apache.commons.httpclient</shadedPattern> + </relocation> + <relocation> + <pattern>org.apache.commons.compress</pattern> + <shadedPattern>${shaded.prefix}.org.apache.commons.compress</shadedPattern> + </relocation> + <relocation> + <pattern>org.apache.commons.digester</pattern> + <shadedPattern>${shaded.prefix}.org.apache.commons.digester</shadedPattern> + </relocation> + <relocation> + <pattern>org.apache.commons.codec</pattern> + <shadedPattern>${shaded.prefix}.org.apache.commons.codec</shadedPattern> + </relocation> + <relocation> + <pattern>org.apache.commons.text</pattern> + <shadedPattern>${shaded.prefix}.org.apache.commons.text</shadedPattern> + </relocation> - <!-- top level net--> - <relocation> - <pattern>net/</pattern> - <shadedPattern>${shaded.prefix}.net.</shadedPattern> - </relocation> - <relocation> - <pattern>org.agrona</pattern> - <shadedPattern>${shaded.prefix}.org.agrona</shadedPattern> - </relocation> - </relocations> - <transformers> - <!-- Need to filter out some extraneous license files. - Don't use the ApacheLicenseRT because it just removes all - META-INF/LICENSE(.txt)? files, including ours. --> - <transformer implementation="org.apache.maven.plugins.shade.resource.DontIncludeResourceTransformer"> - <resources> - <resource>LICENSE.txt</resource> - <resource>ASL2.0</resource> + <!-- top level net--> + <relocation> + <pattern>net/</pattern> + <shadedPattern>${shaded.prefix}.net.</shadedPattern> + </relocation> + <relocation> + <pattern>org.agrona</pattern> + <shadedPattern>${shaded.prefix}.org.agrona</shadedPattern> + </relocation> + </relocations> + <transformers> + <!-- Need to filter out some extraneous license files. + Don't use the ApacheLicenseRT because it just removes all + META-INF/LICENSE(.txt)? files, including ours. --> + <transformer + implementation="org.apache.maven.plugins.shade.resource.DontIncludeResourceTransformer"> + <resources> + <resource>LICENSE.txt</resource> + <resource>ASL2.0</resource> <!-- also this unneeded doc --> - <resource>overview.html</resource> - </resources> - </transformer> - <!-- Where notices exist, just concat them --> - <transformer implementation="org.apache.maven.plugins.shade.resource.ApacheNoticeResourceTransformer"> - <addHeader>false</addHeader> - <projectName>${project.name}</projectName> - </transformer> - <transformer implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer"> - </transformer> - </transformers> - <filters> - <!-- remove utility classes which are not required from dnsjava --> - <filter> - <artifact>dnsjava:dnsjava</artifact> - <excludes> - <exclude>dig*</exclude> - <exclude>jnamed*</exclude> - <exclude>lookup*</exclude> - <exclude>update*</exclude> - </excludes> - </filter> - <filter> - <!-- this is a signed osgi bundle --> - <artifact>org.eclipse.jetty.orbit:javax.servlet.jsp.jstl</artifact> - <excludes> - <exclude>META-INF/ECLIPSEF.SF</exclude> - <exclude>META-INF/ECLIPSEF.RSA</exclude> - </excludes> - </filter> - <filter> - <!-- Duplication of classes that ship in commons-collections 2.x and 3.x - If we stop bundling a relevant commons-collections artifact we'll - need to revisit. See: https://s.apache.org/e09o - --> - <artifact>commons-beanutils:commons-beanutils-core</artifact> - <excludes> - <exclude>org/apache/commons/collections/*.class</exclude> - </excludes> - </filter> - <filter> - <!-- server side webapps that we don't need --> - <artifact>org.apache.hadoop:hadoop-yarn-common</artifact> - <excludes> - <exclude>webapps/*</exclude> - <exclude>webapps/**/*</exclude> - </excludes> - </filter> - <filter> - <artifact>*:*</artifact> - <excludes> - <!-- proto source files aren't needed --> - <exclude>*.proto</exclude> - <exclude>**/*.proto</exclude> - <!-- We already concat NOTICE, safe to drop individual ones --> - <exclude>LICENSE</exclude> - <exclude>NOTICE</exclude> - </excludes> - </filter> - <filter> - <!-- skip french localization --> - <artifact>org.apache.commons:commons-math3</artifact> - <excludes> - <exclude>assets/org/apache/commons/math3/**/*</exclude> - </excludes> - </filter> - <filter> - <!-- appears to be the result of a conflict in hadoop artifacts --> - <artifact>org.apache.hadoop:*</artifact> - <excludes> - <exclude>mapred-default.xml.orig</exclude> - </excludes> - </filter> - <!-- unnecessary files that mess up our invariants check --> - <filter> - <artifact>org.eclipse.jetty:*</artifact> - <excludes> - <exclude>about.html</exclude> - <exclude>jetty-dir.css</exclude> - </excludes> - </filter> - <filter> - <artifact>org.apache.kerby:*</artifact> - <excludes> - <exclude>krb5-template.conf</exclude> - <exclude>krb5_udp-template.conf</exclude> - <exclude>ccache.txt</exclude> - <exclude>keytab.txt</exclude> - </excludes> - </filter> - </filters> - </configuration> - </execution> - </executions> - </plugin> - </plugins> - </pluginManagement> - </build> + <resource>overview.html</resource> + </resources> + </transformer> + <!-- Where notices exist, just concat them --> + <transformer + implementation="org.apache.maven.plugins.shade.resource.ApacheNoticeResourceTransformer"> + <addHeader>false</addHeader> + <projectName>${project.name}</projectName> + </transformer> + <transformer + implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer"> + </transformer> + </transformers> + <filters> + <!-- remove utility classes which are not required from dnsjava --> + <filter> + <artifact>dnsjava:dnsjava</artifact> + <excludes> + <exclude>dig*</exclude> + <exclude>jnamed*</exclude> + <exclude>lookup*</exclude> + <exclude>update*</exclude> + </excludes> + </filter> + <filter> + <!-- this is a signed osgi bundle --> + <artifact>org.eclipse.jetty.orbit:javax.servlet.jsp.jstl</artifact> + <excludes> + <exclude>META-INF/ECLIPSEF.SF</exclude> + <exclude>META-INF/ECLIPSEF.RSA</exclude> + </excludes> + </filter> + <filter> + <!-- Duplication of classes that ship in commons-collections 2.x and 3.x + If we stop bundling a relevant commons-collections artifact we'll + need to revisit. See: https://s.apache.org/e09o + --> + <artifact>commons-beanutils:commons-beanutils-core</artifact> + <excludes> + <exclude>org/apache/commons/collections/*.class</exclude> + </excludes> + </filter> + <filter> + <!-- server side webapps that we don't need --> + <artifact>org.apache.hadoop:hadoop-yarn-common</artifact> + <excludes> + <exclude>webapps/*</exclude> + <exclude>webapps/**/*</exclude> + </excludes> + </filter> + <filter> + <artifact>*:*</artifact> + <excludes> + <!-- proto source files aren't needed --> + <exclude>*.proto</exclude> + <exclude>**/*.proto</exclude> + <!-- We already concat NOTICE, safe to drop individual ones --> + <exclude>LICENSE</exclude> + <exclude>NOTICE</exclude> + </excludes> + </filter> + <filter> + <!-- skip french localization --> + <artifact>org.apache.commons:commons-math3</artifact> + <excludes> + <exclude>assets/org/apache/commons/math3/**/*</exclude> + </excludes> + </filter> + <filter> + <!-- appears to be the result of a conflict in hadoop artifacts --> + <artifact>org.apache.hadoop:*</artifact> + <excludes> + <exclude>mapred-default.xml.orig</exclude> + </excludes> + </filter> + <!-- unnecessary files that mess up our invariants check --> + <filter> + <artifact>org.eclipse.jetty:*</artifact> + <excludes> + <exclude>about.html</exclude> + <exclude>jetty-dir.css</exclude> + </excludes> + </filter> + <filter> + <artifact>org.apache.kerby:*</artifact> + <excludes> + <exclude>krb5-template.conf</exclude> + <exclude>krb5_udp-template.conf</exclude> + <exclude>ccache.txt</exclude> + <exclude>keytab.txt</exclude> + </excludes> + </filter> + </filters> + </configuration> + </execution> + </executions> + </plugin> + </plugins> + </pluginManagement> + </build> </project> diff --git a/hbase-shell/pom.xml b/hbase-shell/pom.xml index cb63fe5c2cda..e3333a8c9edc 100644 --- a/hbase-shell/pom.xml +++ b/hbase-shell/pom.xml @@ -147,13 +147,23 @@ <scope>test</scope> </dependency> <dependency> - <groupId>org.slf4j</groupId> - <artifactId>slf4j-reload4j</artifactId> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-api</artifactId> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-core</artifactId> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-slf4j-impl</artifactId> <scope>test</scope> </dependency> <dependency> - <groupId>ch.qos.reload4j</groupId> - <artifactId>reload4j</artifactId> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-1.2-api</artifactId> <scope>test</scope> </dependency> </dependencies> diff --git a/hbase-testing-util/pom.xml b/hbase-testing-util/pom.xml index c82ccf799373..70f74463862d 100644 --- a/hbase-testing-util/pom.xml +++ b/hbase-testing-util/pom.xml @@ -1,5 +1,7 @@ <?xml version="1.0"?> -<project xmlns="https://maven.apache.org/POM/4.0.0" xmlns:xsi="https://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="https://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd"> +<project xmlns="https://maven.apache.org/POM/4.0.0" + xmlns:xsi="https://www.w3.org/2001/XMLSchema-instance" + xsi:schemaLocation="https://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd"> <!-- /** * Licensed to the Apache Software Foundation (ASF) under one @@ -19,288 +21,299 @@ * limitations under the License. */ --> - <modelVersion>4.0.0</modelVersion> - <parent> - <artifactId>hbase-build-configuration</artifactId> - <groupId>org.apache.hbase</groupId> - <version>2.6.0-SNAPSHOT</version> - <relativePath>../hbase-build-configuration</relativePath> - </parent> - <artifactId>hbase-testing-util</artifactId> - <name>Apache HBase - Testing Util</name> - <description>HBase Testing Utilities.</description> - <dependencies> + <modelVersion>4.0.0</modelVersion> + <parent> + <artifactId>hbase-build-configuration</artifactId> + <groupId>org.apache.hbase</groupId> + <version>2.6.0-SNAPSHOT</version> + <relativePath>../hbase-build-configuration</relativePath> + </parent> + <artifactId>hbase-testing-util</artifactId> + <name>Apache HBase - Testing Util</name> + <description>HBase Testing Utilities.</description> + <dependencies> <!-- Intra-project dependencies --> <!-- we do not want to introduce this to downstream users so still set the scope to test --> + <dependency> + <groupId>org.apache.hbase</groupId> + <artifactId>hbase-logging</artifactId> + <type>test-jar</type> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.hbase</groupId> + <artifactId>hbase-common</artifactId> + <type>jar</type> + <scope>compile</scope> + </dependency> + <dependency> + <groupId>org.apache.hbase</groupId> + <artifactId>hbase-common</artifactId> + <type>test-jar</type> + <scope>compile</scope> + </dependency> + <dependency> + <groupId>org.apache.hbase</groupId> + <artifactId>hbase-annotations</artifactId> + <type>test-jar</type> + <scope>compile</scope> + <exclusions> + <exclusion> + <groupId>jdk.tools</groupId> + <artifactId>jdk.tools</artifactId> + </exclusion> + </exclusions> + </dependency> + <dependency> + <groupId>org.apache.hbase</groupId> + <artifactId>hbase-protocol</artifactId> + <type>jar</type> + <scope>compile</scope> + </dependency> + <dependency> + <groupId>org.apache.hbase</groupId> + <artifactId>hbase-client</artifactId> + <type>jar</type> + <scope>compile</scope> + </dependency> + <dependency> + <groupId>org.apache.hbase</groupId> + <artifactId>hbase-zookeeper</artifactId> + <type>jar</type> + <scope>compile</scope> + </dependency> + <dependency> + <groupId>org.apache.hbase</groupId> + <artifactId>hbase-zookeeper</artifactId> + <type>test-jar</type> + <scope>compile</scope> + </dependency> + <dependency> + <groupId>org.apache.hbase</groupId> + <artifactId>hbase-server</artifactId> + <type>jar</type> + <scope>compile</scope> + </dependency> + <dependency> + <groupId>org.apache.hbase</groupId> + <artifactId>hbase-server</artifactId> + <type>test-jar</type> + <scope>compile</scope> + </dependency> + <dependency> + <groupId>org.apache.hbase</groupId> + <artifactId>hbase-asyncfs</artifactId> + <type>test-jar</type> + <scope>compile</scope> + </dependency> + <dependency> + <groupId>org.apache.hbase</groupId> + <artifactId>hbase-hadoop-compat</artifactId> + <type>jar</type> + <scope>compile</scope> + </dependency> + <dependency> + <groupId>org.apache.hbase</groupId> + <artifactId>hbase-hadoop-compat</artifactId> + <type>test-jar</type> + <scope>compile</scope> + </dependency> + <dependency> + <groupId>org.apache.hbase</groupId> + <artifactId>${compat.module}</artifactId> + <type>jar</type> + <scope>compile</scope> + </dependency> + <dependency> + <groupId>org.apache.hbase</groupId> + <artifactId>${compat.module}</artifactId> + <type>test-jar</type> + <scope>compile</scope> + </dependency> + <dependency> + <groupId>org.slf4j</groupId> + <artifactId>jcl-over-slf4j</artifactId> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.slf4j</groupId> + <artifactId>jul-to-slf4j</artifactId> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-api</artifactId> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-core</artifactId> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-slf4j-impl</artifactId> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-1.2-api</artifactId> + <scope>test</scope> + </dependency> + </dependencies> + + <profiles> + <!-- Profiles for building against different hadoop versions --> + <!-- There are a lot of common dependencies used here, should investigate + if we can combine these profiles somehow --> + + <!-- profile for building against Hadoop 2.x. This is the default --> + <profile> + <id>hadoop-2.0</id> + <activation> + <property> + <!--Below formatting for dev-support/generate-hadoopX-poms.sh--> + <!--h2--> + <name>!hadoop.profile</name> + </property> + </activation> + <dependencies> <dependency> - <groupId>org.apache.hbase</groupId> - <artifactId>hbase-logging</artifactId> - <type>test-jar</type> - <scope>test</scope> - </dependency> - <dependency> - <groupId>org.apache.hbase</groupId> - <artifactId>hbase-common</artifactId> - <type>jar</type> - <scope>compile</scope> - </dependency> - <dependency> - <groupId>org.apache.hbase</groupId> - <artifactId>hbase-common</artifactId> - <type>test-jar</type> - <scope>compile</scope> - </dependency> - <dependency> - <groupId>org.apache.hbase</groupId> - <artifactId>hbase-annotations</artifactId> - <type>test-jar</type> - <scope>compile</scope> - <exclusions> - <exclusion> - <groupId>jdk.tools</groupId> - <artifactId>jdk.tools</artifactId> - </exclusion> - </exclusions> - </dependency> - <dependency> - <groupId>org.apache.hbase</groupId> - <artifactId>hbase-protocol</artifactId> - <type>jar</type> - <scope>compile</scope> - </dependency> - <dependency> - <groupId>org.apache.hbase</groupId> - <artifactId>hbase-client</artifactId> - <type>jar</type> - <scope>compile</scope> - </dependency> - <dependency> - <groupId>org.apache.hbase</groupId> - <artifactId>hbase-zookeeper</artifactId> - <type>jar</type> - <scope>compile</scope> - </dependency> - <dependency> - <groupId>org.apache.hbase</groupId> - <artifactId>hbase-zookeeper</artifactId> - <type>test-jar</type> - <scope>compile</scope> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-common</artifactId> + <scope>compile</scope> + <exclusions> + <exclusion> + <groupId>javax.xml.bind</groupId> + <artifactId>jaxb-api</artifactId> + </exclusion> + </exclusions> </dependency> <dependency> - <groupId>org.apache.hbase</groupId> - <artifactId>hbase-server</artifactId> - <type>jar</type> - <scope>compile</scope> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-auth</artifactId> + <scope>compile</scope> </dependency> <dependency> - <groupId>org.apache.hbase</groupId> - <artifactId>hbase-server</artifactId> - <type>test-jar</type> - <scope>compile</scope> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-client</artifactId> + <scope>compile</scope> + <exclusions> + <exclusion> + <groupId>com.google.guava</groupId> + <artifactId>guava</artifactId> + </exclusion> + <exclusion> + <groupId>javax.xml.bind</groupId> + <artifactId>jaxb-api</artifactId> + </exclusion> + </exclusions> </dependency> <dependency> - <groupId>org.apache.hbase</groupId> - <artifactId>hbase-asyncfs</artifactId> - <type>test-jar</type> - <scope>compile</scope> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-mapreduce-client-core</artifactId> + <scope>compile</scope> + <exclusions> + <exclusion> + <groupId>com.google.guava</groupId> + <artifactId>guava</artifactId> + </exclusion> + <exclusion> + <groupId>javax.xml.bind</groupId> + <artifactId>jaxb-api</artifactId> + </exclusion> + </exclusions> </dependency> <dependency> - <groupId>org.apache.hbase</groupId> - <artifactId>hbase-hadoop-compat</artifactId> - <type>jar</type> - <scope>compile</scope> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-mapreduce-client-jobclient</artifactId> + <scope>compile</scope> + <exclusions> + <exclusion> + <groupId>com.google.guava</groupId> + <artifactId>guava</artifactId> + </exclusion> + </exclusions> </dependency> <dependency> - <groupId>org.apache.hbase</groupId> - <artifactId>hbase-hadoop-compat</artifactId> - <type>test-jar</type> - <scope>compile</scope> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-hdfs</artifactId> + <scope>compile</scope> </dependency> <dependency> - <groupId>org.apache.hbase</groupId> - <artifactId>${compat.module}</artifactId> - <type>jar</type> - <scope>compile</scope> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-hdfs</artifactId> + <type>test-jar</type> + <scope>compile</scope> </dependency> <dependency> - <groupId>org.apache.hbase</groupId> - <artifactId>${compat.module}</artifactId> - <type>test-jar</type> - <scope>compile</scope> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-minicluster</artifactId> + <scope>compile</scope> + <exclusions> + <exclusion> + <groupId>com.google.guava</groupId> + <artifactId>guava</artifactId> + </exclusion> + <exclusion> + <groupId>org.apache.zookeeper</groupId> + <artifactId>zookeeper</artifactId> + </exclusion> + </exclusions> </dependency> <dependency> - <groupId>org.slf4j</groupId> - <artifactId>jcl-over-slf4j</artifactId> - <scope>test</scope> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-minikdc</artifactId> </dependency> + </dependencies> + </profile> + <!-- + profile for building against Hadoop 3.0.x. Activate using: + mvn -Dhadoop.profile=3.0 + --> + <profile> + <id>hadoop-3.0</id> + <activation> + <property> + <name>hadoop.profile</name> + <value>3.0</value> + </property> + </activation> + <dependencies> <dependency> - <groupId>org.slf4j</groupId> - <artifactId>jul-to-slf4j</artifactId> - <scope>test</scope> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-common</artifactId> + <exclusions> + <exclusion> + <groupId>javax.xml.bind</groupId> + <artifactId>jaxb-api</artifactId> + </exclusion> + <exclusion> + <groupId>javax.ws.rs</groupId> + <artifactId>jsr311-api</artifactId> + </exclusion> + </exclusions> </dependency> <dependency> - <groupId>org.slf4j</groupId> - <artifactId>slf4j-reload4j</artifactId> - <scope>test</scope> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-minicluster</artifactId> + <scope>compile</scope> + <exclusions> + <exclusion> + <groupId>com.google.guava</groupId> + <artifactId>guava</artifactId> + </exclusion> + <exclusion> + <groupId>javax.ws.rs</groupId> + <artifactId>jsr311-api</artifactId> + </exclusion> + </exclusions> </dependency> <dependency> - <groupId>ch.qos.reload4j</groupId> - <artifactId>reload4j</artifactId> - <scope>test</scope> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-minikdc</artifactId> </dependency> - </dependencies> - - <profiles> - <!-- Profiles for building against different hadoop versions --> - <!-- There are a lot of common dependencies used here, should investigate - if we can combine these profiles somehow --> - - <!-- profile for building against Hadoop 2.x. This is the default --> - <profile> - <id>hadoop-2.0</id> - <activation> - <property> - <!--Below formatting for dev-support/generate-hadoopX-poms.sh--> - <!--h2--><name>!hadoop.profile</name> - </property> - </activation> - <dependencies> - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-common</artifactId> - <scope>compile</scope> - <exclusions> - <exclusion> - <groupId>javax.xml.bind</groupId> - <artifactId>jaxb-api</artifactId> - </exclusion> - </exclusions> - </dependency> - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-auth</artifactId> - <scope>compile</scope> - </dependency> - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-client</artifactId> - <scope>compile</scope> - <exclusions> - <exclusion> - <groupId>com.google.guava</groupId> - <artifactId>guava</artifactId> - </exclusion> - <exclusion> - <groupId>javax.xml.bind</groupId> - <artifactId>jaxb-api</artifactId> - </exclusion> - </exclusions> - </dependency> - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-mapreduce-client-core</artifactId> - <scope>compile</scope> - <exclusions> - <exclusion> - <groupId>com.google.guava</groupId> - <artifactId>guava</artifactId> - </exclusion> - <exclusion> - <groupId>javax.xml.bind</groupId> - <artifactId>jaxb-api</artifactId> - </exclusion> - </exclusions> - </dependency> - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-mapreduce-client-jobclient</artifactId> - <scope>compile</scope> - <exclusions> - <exclusion> - <groupId>com.google.guava</groupId> - <artifactId>guava</artifactId> - </exclusion> - </exclusions> - </dependency> - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-hdfs</artifactId> - <scope>compile</scope> - </dependency> - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-hdfs</artifactId> - <type>test-jar</type> - <scope>compile</scope> - </dependency> - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-minicluster</artifactId> - <scope>compile</scope> - <exclusions> - <exclusion> - <groupId>com.google.guava</groupId> - <artifactId>guava</artifactId> - </exclusion> - <exclusion> - <groupId>org.apache.zookeeper</groupId> - <artifactId>zookeeper</artifactId> - </exclusion> - </exclusions> - </dependency> - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-minikdc</artifactId> - </dependency> - </dependencies> - </profile> - <!-- - profile for building against Hadoop 3.0.x. Activate using: - mvn -Dhadoop.profile=3.0 - --> - <profile> - <id>hadoop-3.0</id> - <activation> - <property> - <name>hadoop.profile</name> - <value>3.0</value> - </property> - </activation> - <dependencies> - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-common</artifactId> - <exclusions> - <exclusion> - <groupId>javax.xml.bind</groupId> - <artifactId>jaxb-api</artifactId> - </exclusion> - <exclusion> - <groupId>javax.ws.rs</groupId> - <artifactId>jsr311-api</artifactId> - </exclusion> - </exclusions> - </dependency> - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-minicluster</artifactId> - <scope>compile</scope> - <exclusions> - <exclusion> - <groupId>com.google.guava</groupId> - <artifactId>guava</artifactId> - </exclusion> - <exclusion> - <groupId>javax.ws.rs</groupId> - <artifactId>jsr311-api</artifactId> - </exclusion> - </exclusions> - </dependency> - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-minikdc</artifactId> - </dependency> - </dependencies> - </profile> - </profiles> + </dependencies> + </profile> + </profiles> </project> diff --git a/hbase-thrift/pom.xml b/hbase-thrift/pom.xml index 54c1f607182e..6a8b3df7cba1 100644 --- a/hbase-thrift/pom.xml +++ b/hbase-thrift/pom.xml @@ -255,13 +255,23 @@ <scope>test</scope> </dependency> <dependency> - <groupId>org.slf4j</groupId> - <artifactId>slf4j-reload4j</artifactId> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-api</artifactId> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-core</artifactId> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-slf4j-impl</artifactId> <scope>test</scope> </dependency> <dependency> - <groupId>ch.qos.reload4j</groupId> - <artifactId>reload4j</artifactId> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-1.2-api</artifactId> <scope>test</scope> </dependency> </dependencies> diff --git a/hbase-zookeeper/pom.xml b/hbase-zookeeper/pom.xml index 45029db88518..b2a5b1ca9bbf 100644 --- a/hbase-zookeeper/pom.xml +++ b/hbase-zookeeper/pom.xml @@ -174,13 +174,18 @@ <scope>test</scope> </dependency> <dependency> - <groupId>org.slf4j</groupId> - <artifactId>slf4j-reload4j</artifactId> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-api</artifactId> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-core</artifactId> <scope>test</scope> </dependency> <dependency> - <groupId>ch.qos.reload4j</groupId> - <artifactId>reload4j</artifactId> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-slf4j-impl</artifactId> <scope>test</scope> </dependency> </dependencies> diff --git a/pom.xml b/pom.xml index e247e5856a1a..b9dea2d37b8d 100755 --- a/pom.xml +++ b/pom.xml @@ -688,7 +688,7 @@ </goals> <configuration> <excludes> - <exclude>log4j.properties</exclude> + <exclude>log4j2.xml</exclude> </excludes> </configuration> </execution> @@ -1060,11 +1060,28 @@ <rules> <bannedDependencies> <excludes> - <exclude>log4j:**</exclude> + <exclude>log4j:log4j</exclude> + </excludes> + <message> + We do not allow log4j dependencies as now we use log4j2 + </message> + </bannedDependencies> + </rules> + </configuration> + </execution> + <execution> + <id>banned-slf4j-log4j12</id> + <goals> + <goal>enforce</goal> + </goals> + <configuration> + <rules> + <bannedDependencies> + <excludes> <exclude>org.slf4j:slf4j-log4j12</exclude> </excludes> <message> - Use reload4j instead + We do not allow slf4j-log4j12 dependency as now we use log4j-slf4j-impl </message> </bannedDependencies> </rules> @@ -1119,16 +1136,18 @@ <reason>Use SLF4j for logging</reason> <bannedImports> <bannedImport>org.apache.commons.logging.**</bannedImport> + <bannedImport>org.apache.log4j.**</bannedImport> + <bannedImport>org.apache.logging.log4j.**</bannedImport> </bannedImports> </restrictImports> <restrictImports implementation="de.skuzzle.enforcer.restrictimports.rule.RestrictImports"> <includeTestCode>false</includeTestCode> <commentLineBufferSize>512</commentLineBufferSize> <reason> - Do not use log4j directly in code, see Log4jUtils in hbase-logging for more details. + Do not use log4j2 directly in code, see Log4jUtils in hbase-logging for more details. </reason> <bannedImports> - <bannedImport>org.apache.log4j.**</bannedImport> + <bannedImport>org.apache.logging.log4j.**</bannedImport> </bannedImports> </restrictImports> <restrictImports implementation="de.skuzzle.enforcer.restrictimports.rule.RestrictImports"> @@ -1533,7 +1552,7 @@ <hamcrest.version>1.3</hamcrest.version> <opentelemetry.version>1.0.1</opentelemetry.version> <opentelemetry-javaagent.version>1.0.1</opentelemetry-javaagent.version> - <reload4j.version>1.2.19</reload4j.version> + <log4j2.version>2.17.2</log4j2.version> <mockito-core.version>2.28.2</mockito-core.version> <!--Internally we use a different version of protobuf. See hbase-protocol-shaded--> <external.protobuf.groupid>com.google.protobuf</external.protobuf.groupid> @@ -1993,8 +2012,8 @@ </dependency> <!-- Logging dependencies. In general, we use slf4j as the log facade in HBase, so all sub - modules should depend on slf4j-api at compile scope, and then depend on slf4j-log4j12 - and log4j at test scope(and in hbase-assembly when shipping the binary) to redirect the + modules should depend on slf4j-api at compile scope, and then depend on log4j-slf4j-impl + and log4j2 at test scope(and in hbase-assembly when shipping the binary) to redirect the log message to log4j. Do not introduce logging dependencies other than slf4j-api at compile scope as it will mess up the logging framework for downstream users. Here we also depend on jcl-over-slf4j and jul-to-slf4j, as some of the libraries we depend @@ -2006,16 +2025,12 @@ <artifactId>jettison</artifactId> <version>${jettison.version}</version> </dependency> + <!-- Logging --> <dependency> <groupId>org.slf4j</groupId> <artifactId>slf4j-api</artifactId> <version>${slf4j.version}</version> </dependency> - <dependency> - <groupId>org.slf4j</groupId> - <artifactId>slf4j-reload4j</artifactId> - <version>${slf4j.version}</version> - </dependency> <dependency> <groupId>org.slf4j</groupId> <artifactId>jcl-over-slf4j</artifactId> @@ -2027,9 +2042,24 @@ <version>${slf4j.version}</version> </dependency> <dependency> - <groupId>ch.qos.reload4j</groupId> - <artifactId>reload4j</artifactId> - <version>${reload4j.version}</version> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-api</artifactId> + <version>${log4j2.version}</version> + </dependency> + <dependency> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-core</artifactId> + <version>${log4j2.version}</version> + </dependency> + <dependency> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-slf4j-impl</artifactId> + <version>${log4j2.version}</version> + </dependency> + <dependency> + <groupId>org.apache.logging.log4j</groupId> + <artifactId>log4j-1.2-api</artifactId> + <version>${log4j2.version}</version> </dependency> <!-- Avro dependencies we mostly get transitively, manual version coallescing --> <dependency> @@ -2037,8 +2067,6 @@ <artifactId>avro</artifactId> <version>${avro.version}</version> </dependency> - <!--This is not used by hbase directly. Used by thrift, - dropwizard and zk.--> <dependency> <groupId>com.github.ben-manes.caffeine</groupId> <artifactId>caffeine</artifactId> @@ -3388,6 +3416,46 @@ </exclusion> </exclusions> </dependency> + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-mapreduce-client-app</artifactId> + <version>${hadoop-three.version}</version> + <type>test-jar</type> + <exclusions> + <exclusion> + <groupId>org.codehaus.jackson</groupId> + <artifactId>jackson-mapper-asl</artifactId> + </exclusion> + <exclusion> + <groupId>org.codehaus.jackson</groupId> + <artifactId>jackson-core-asl</artifactId> + </exclusion> + <exclusion> + <groupId>org.codehaus.jackson</groupId> + <artifactId>jackson-jaxrs</artifactId> + </exclusion> + <exclusion> + <groupId>org.codehaus.jackson</groupId> + <artifactId>jackson-xc</artifactId> + </exclusion> + <exclusion> + <groupId>javax.xml.bind</groupId> + <artifactId>jaxb-api</artifactId> + </exclusion> + <exclusion> + <groupId>javax.ws.rs</groupId> + <artifactId>jsr311-api</artifactId> + </exclusion> + <exclusion> + <groupId>org.slf4j</groupId> + <artifactId>slf4j-log4j12</artifactId> + </exclusion> + <exclusion> + <groupId>log4j</groupId> + <artifactId>log4j</artifactId> + </exclusion> + </exclusions> + </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-mapreduce-client-jobclient</artifactId> @@ -3414,10 +3482,6 @@ <groupId>org.slf4j</groupId> <artifactId>slf4j-log4j12</artifactId> </exclusion> - <exclusion> - <groupId>log4j</groupId> - <artifactId>log4j</artifactId> - </exclusion> </exclusions> </dependency> <dependency> @@ -3447,10 +3511,6 @@ <groupId>org.slf4j</groupId> <artifactId>slf4j-log4j12</artifactId> </exclusion> - <exclusion> - <groupId>log4j</groupId> - <artifactId>log4j</artifactId> - </exclusion> </exclusions> </dependency> <dependency> @@ -3863,10 +3923,6 @@ <groupId>org.slf4j</groupId> <artifactId>slf4j-log4j12</artifactId> </exclusion> - <exclusion> - <groupId>log4j</groupId> - <artifactId>log4j</artifactId> - </exclusion> </exclusions> </dependency> <dependency>