messages = logCaptureContext.getMessages();
assertThat(
messages,
- hasItem("Returning empty iterator for fetch with invalid key range: from > to." +
+ hasItem(containsString("Returning empty iterator for fetch with invalid key range: from > to." +
" This may be due to serdes that don't preserve ordering when lexicographically comparing the serialized bytes." +
- " Note that the built-in numerical serdes do not follow this for negative numbers")
+ " Note that the built-in numerical serdes do not follow this for negative numbers"))
);
}
}
diff --git a/streams/src/test/java/org/apache/kafka/test/LogCaptureContext.java b/streams/src/test/java/org/apache/kafka/test/LogCaptureContext.java
new file mode 100644
index 0000000000000..05dd0d27065e1
--- /dev/null
+++ b/streams/src/test/java/org/apache/kafka/test/LogCaptureContext.java
@@ -0,0 +1,152 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.kafka.test;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.TimeUnit;
+
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.core.Logger;
+import org.apache.logging.log4j.core.LoggerContext;
+import org.apache.logging.log4j.core.layout.PatternLayout;
+import org.apache.logging.log4j.test.appender.ListAppender;
+
+/**
+ * This class provides an isolated logging context for logging tests. You can also set the logging
+ * level of the loggers for a given context differently.
+ *
+ *
By default, the context uses the definition in src/test/resources/log4j2.properties:
+ *
+ * // Creates a logging context with default configurations
+ * try (final LogCaptureContext logCaptureContext = LogCaptureContext.create()) {
+ * ...
+ * }
+ *
+ *
+ * You can override the default logging levels by passing a map from the logger name to the desired level, like:
+ *
+ * // A logging context with default configuration, but 'foo.bar' logger's level is set to WARN.
+ * try (final LogCaptureContext logCaptureContext = LogCaptureContext.create(
+ * Collections.singletonMap("foo.bar", "WARN")
+ * )) {
+ * ...
+ * }
+ *
+ *
+ * Since the logging messages are appended asynchronously, you should wait until the appender process
+ * the given messages with {@link #setLatch(int)} and {@link #await(long, TimeUnit)} methods, like:
+ *
+ * try (final LogCaptureContext logCaptureContext = LogCaptureContext.create(...)) {
+ * // We expect there will be at least 5 logging messages.
+ * logCaptureContext.setLatch(5);
+ *
+ * // The routine to test ...
+ *
+ * // Wait for the appender to finish processing the logging messages, 10 seconds in maximum.
+ * logCaptureContext.await(10L, TimeUnit.SECONDS);
+ * assertThat(
+ * logCaptureContext.getMessages(),
+ * hasItem("the logging message is appended"));
+ * }
+ *
+ *
+ * Note: The tests may hang up if you set the messages count too high.
+ */
+public class LogCaptureContext implements AutoCloseable {
+ private final ListAppender listAppender;
+ private final Map prevLevelMap = new HashMap<>();
+
+ public static LogCaptureContext create() {
+ return create(new HashMap<>());
+ }
+
+ public static LogCaptureContext create(final Map levelMap) {
+ return new LogCaptureContext(levelMap);
+ }
+
+ private LogCaptureContext(final Map levelMap) {
+ final LoggerContext loggerContext = LoggerContext.getContext(false);
+ listAppender = ListAppender.createAppender("logger-context-" + TestUtils.randomString(8),
+ false, false, PatternLayout.newBuilder().withPattern("%p %m %throwable").build(), null);
+ listAppender.start();
+ loggerContext.getConfiguration().addAppender(listAppender);
+ loggerContext.getRootLogger().addAppender(listAppender);
+
+ for (final String loggerName : levelMap.keySet()) {
+ final Logger logger = loggerContext.getLogger(loggerName);
+
+ // Store the previous logger level
+ this.prevLevelMap.put(loggerName, logger.getLevel());
+
+ // Change the logger level
+ logger.setLevel(Level.getLevel(levelMap.get(loggerName)));
+ }
+ }
+
+ /**
+ * Set the expected number of events.
+ *
+ * @param size number of expected logging events
+ */
+ public void setLatch(final int size) {
+ this.listAppender.countDownLatch = new CountDownLatch(size);
+ }
+
+ /**
+ * Wait for the appender to finish processing the expected number of events.
+ *
+ * @throws InterruptedException
+ */
+ public void await() throws InterruptedException {
+ await(10, TimeUnit.SECONDS);
+ }
+
+ /**
+ * Wait for the appender to finish processing the expected number of events.
+ *
+ * @throws InterruptedException
+ */
+ public void await(final long l, final TimeUnit timeUnit) throws InterruptedException {
+ this.listAppender.countDownLatch.await(l, timeUnit);
+ }
+
+ /**
+ * Returns the appended log messages.
+ *
+ * @return appended log messages
+ */
+ public List getMessages() {
+ return listAppender.getMessages();
+ }
+
+ @Override
+ public void close() {
+ final LoggerContext loggerContext = LoggerContext.getContext(false);
+ loggerContext.getRootLogger().removeAppender(listAppender);
+ listAppender.stop();
+
+ for (final String loggerName : this.prevLevelMap.keySet()) {
+ final Logger logger = loggerContext.getLogger(loggerName);
+
+ // Restore previous logger level
+ logger.setLevel(this.prevLevelMap.get(loggerName));
+ }
+ }
+}
diff --git a/streams/src/test/resources/log4j.properties b/streams/src/test/resources/log4j.properties
deleted file mode 100644
index 050cd679f06e7..0000000000000
--- a/streams/src/test/resources/log4j.properties
+++ /dev/null
@@ -1,35 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-log4j.rootLogger=INFO, stdout
-
-log4j.appender.stdout=org.apache.log4j.ConsoleAppender
-log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
-log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c:%L)%n
-
-log4j.logger.kafka=ERROR
-log4j.logger.state.change.logger=ERROR
-log4j.logger.org.apache.kafka=ERROR
-log4j.logger.org.apache.zookeeper=ERROR
-
-# printing out the configs takes up a huge amount of the allotted characters,
-# and provides little value as we can always figure out the test configs without the logs
-log4j.logger.org.apache.kafka.clients.producer.ProducerConfig=ERROR
-log4j.logger.org.apache.kafka.clients.consumer.ConsumerConfig=ERROR
-log4j.logger.org.apache.kafka.clients.admin.AdminClientConfig=ERROR
-log4j.logger.org.apache.kafka.streams.StreamsConfig=ERROR
-
-# These are the only logs we will likely ever find anything useful in to debug Streams test failures
-log4j.logger.org.apache.kafka.clients=INFO
-log4j.logger.org.apache.kafka.streams=INFO
diff --git a/streams/src/test/resources/log4j2.properties b/streams/src/test/resources/log4j2.properties
new file mode 100644
index 0000000000000..8fd8a3e425035
--- /dev/null
+++ b/streams/src/test/resources/log4j2.properties
@@ -0,0 +1,62 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+name=TestConfig
+appenders=console
+
+appender.console.type=Console
+appender.console.name=STDOUT
+appender.console.layout.type=PatternLayout
+appender.console.layout.pattern=[%d] %p %m (%c:%L)%n
+
+rootLogger.level=INFO
+rootLogger.appenderRefs=stdout
+rootLogger.appenderRef.stdout.ref=STDOUT
+
+loggers=kafka,org.apache.zookeeper,org.apache.kafka.clients.producer.ProducerConfig,org.apache.kafka.clients.consumer.ConsumerConfig,org.apache.kafka.clients.admin.AdminClientConfig,org.apache.kafka.clients,org.apache.kafka.streams.StreamsConfig,org.apache.kafka.streams,org.apache.kafka,state.change.logger
+
+logger.kafka.name=kafka
+logger.kafka.level=ERROR
+
+logger.org.apache.zookeeper.name=org.apache.zookeeper
+logger.org.apache.zookeeper.level=ERROR
+
+# printing out the configs takes up a huge amount of the allotted characters,
+# and provides little value as we can always figure out the test configs without the logs
+
+logger.org.apache.kafka.clients.producer.ProducerConfig.name=org.apache.kafka.clients.producer.ProducerConfig
+logger.org.apache.kafka.clients.producer.ProducerConfig.level=ERROR
+
+logger.org.apache.kafka.clients.consumer.ConsumerConfig.name=org.apache.kafka.clients.consumer.ConsumerConfig
+logger.org.apache.kafka.clients.consumer.ConsumerConfig.level=ERROR
+
+logger.org.apache.kafka.clients.admin.AdminClientConfig.name=org.apache.kafka.clients.admin.AdminClientConfig
+logger.org.apache.kafka.clients.admin.AdminClientConfig.level=ERROR
+
+logger.org.apache.kafka.streams.StreamsConfig.name=org.apache.kafka.streams.StreamsConfig
+logger.org.apache.kafka.streams.StreamsConfig.level=WARN
+
+# These are the only logs we will likely ever find anything useful in to debug Streams test failures
+logger.org.apache.kafka.clients.name=org.apache.kafka.clients
+logger.org.apache.kafka.clients.level=INFO
+
+logger.org.apache.kafka.streams.name=org.apache.kafka.streams
+logger.org.apache.kafka.streams.level=INFO
+
+logger.org.apache.kafka.name=org.apache.kafka
+logger.org.apache.kafka.level=ERROR
+
+logger.state.change.logger.name=state.change.logger
+logger.state.change.logger.level=ERROR
+
diff --git a/streams/test-utils/src/test/resources/log4j.properties b/streams/test-utils/src/test/resources/log4j.properties
deleted file mode 100644
index be36f90299a77..0000000000000
--- a/streams/test-utils/src/test/resources/log4j.properties
+++ /dev/null
@@ -1,21 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-log4j.rootLogger=INFO, stdout
-
-log4j.appender.stdout=org.apache.log4j.ConsoleAppender
-log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
-log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c:%L)%n
-
-log4j.logger.org.apache.kafka=INFO
diff --git a/metadata/src/test/resources/log4j.properties b/streams/test-utils/src/test/resources/log4j2.properties
similarity index 57%
rename from metadata/src/test/resources/log4j.properties
rename to streams/test-utils/src/test/resources/log4j2.properties
index db3879386f10f..691a13e25819e 100644
--- a/metadata/src/test/resources/log4j.properties
+++ b/streams/test-utils/src/test/resources/log4j2.properties
@@ -12,11 +12,25 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-log4j.rootLogger=DEBUG, stdout
+name=TestConfig
+status = INFO
-log4j.appender.stdout=org.apache.log4j.ConsoleAppender
-log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
-log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c:%L)%n
+appenders = console
+
+appender.console.type=Console
+appender.console.name=STDOUT
+appender.console.layout.type=PatternLayout
+appender.console.layout.pattern=[%d] %p %m (%c:%L)%n
+
+rootLogger.level = info
+rootLogger.appenderRefs = stdout
+rootLogger.appenderRef.stdout.ref = STDOUT
+
+loggers=org.apache.kafka.raft,org.apache.kafka.snapshot
+
+logger.org.apache.kafka.raft.name=org.apache.kafka.raft
+logger.org.apache.kafka.raft.level=ERROR
+
+logger.org.apache.kafka.snapshot.name=org.apache.kafka.snapshot
+logger.org.apache.kafka.snapshot.level=ERROR
-log4j.logger.org.apache.kafka=DEBUG
-log4j.logger.org.apache.zookeeper=WARN
diff --git a/tests/kafkatest/services/connect.py b/tests/kafkatest/services/connect.py
index 41c33ccb9e102..19415d92b3a8d 100644
--- a/tests/kafkatest/services/connect.py
+++ b/tests/kafkatest/services/connect.py
@@ -38,7 +38,7 @@ class ConnectServiceBase(KafkaPathResolverMixin, Service):
LOG_FILE = os.path.join(PERSISTENT_ROOT, "connect.log")
STDOUT_FILE = os.path.join(PERSISTENT_ROOT, "connect.stdout")
STDERR_FILE = os.path.join(PERSISTENT_ROOT, "connect.stderr")
- LOG4J_CONFIG_FILE = os.path.join(PERSISTENT_ROOT, "connect-log4j.properties")
+ LOG4J2_CONFIG_FILE = os.path.join(PERSISTENT_ROOT, "connect-log4j2.properties")
PID_FILE = os.path.join(PERSISTENT_ROOT, "connect.pid")
EXTERNAL_CONFIGS_FILE = os.path.join(PERSISTENT_ROOT, "connect-external-configs.properties")
CONNECT_REST_PORT = 8083
@@ -317,7 +317,7 @@ def node(self):
return self.nodes[0]
def start_cmd(self, node, connector_configs):
- cmd = "( export KAFKA_LOG4J_OPTS=\"-Dlog4j.configuration=file:%s\"; " % self.LOG4J_CONFIG_FILE
+ cmd = "( export KAFKA_LOG4J_OPTS=\"-Dlog4j.configurationFile=file:%s\"; " % self.LOG4J2_CONFIG_FILE
heap_kafka_opts = "-XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=%s" % \
self.logs["connect_heap_dump_file"]["path"]
other_kafka_opts = self.security_config.kafka_opts.strip('\"')
@@ -341,7 +341,7 @@ def start_node(self, node):
if self.external_config_template_func:
node.account.create_file(self.EXTERNAL_CONFIGS_FILE, self.external_config_template_func(node))
node.account.create_file(self.CONFIG_FILE, self.config_template_func(node))
- node.account.create_file(self.LOG4J_CONFIG_FILE, self.render('connect_log4j.properties', log_file=self.LOG_FILE))
+ node.account.create_file(self.LOG4J2_CONFIG_FILE, self.render('connect_log4j2.properties', log_file=self.LOG_FILE))
remote_connector_configs = []
for idx, template in enumerate(self.connector_config_templates):
target_file = os.path.join(self.PERSISTENT_ROOT, "connect-connector-" + str(idx) + ".properties")
@@ -377,7 +377,7 @@ def __init__(self, context, num_nodes, kafka, files, offsets_topic="connect-offs
# connector_configs argument is intentionally ignored in distributed service.
def start_cmd(self, node, connector_configs):
- cmd = "( export KAFKA_LOG4J_OPTS=\"-Dlog4j.configuration=file:%s\"; " % self.LOG4J_CONFIG_FILE
+ cmd = "( export KAFKA_LOG4J_OPTS=\"-Dlog4j.configurationFile=file:%s\"; " % self.LOG4J2_CONFIG_FILE
heap_kafka_opts = "-XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=%s" % \
self.logs["connect_heap_dump_file"]["path"]
other_kafka_opts = self.security_config.kafka_opts.strip('\"')
@@ -398,7 +398,7 @@ def start_node(self, node):
if self.external_config_template_func:
node.account.create_file(self.EXTERNAL_CONFIGS_FILE, self.external_config_template_func(node))
node.account.create_file(self.CONFIG_FILE, self.config_template_func(node))
- node.account.create_file(self.LOG4J_CONFIG_FILE, self.render('connect_log4j.properties', log_file=self.LOG_FILE))
+ node.account.create_file(self.LOG4J2_CONFIG_FILE, self.render('connect_log4j2.properties', log_file=self.LOG_FILE))
if self.connector_config_templates:
raise DucktapeError("Config files are not valid in distributed mode, submit connectors via the REST API")
diff --git a/tests/kafkatest/services/kafka/kafka.py b/tests/kafkatest/services/kafka/kafka.py
index 55b5b7b87141b..d3e69648c37b8 100644
--- a/tests/kafkatest/services/kafka/kafka.py
+++ b/tests/kafkatest/services/kafka/kafka.py
@@ -144,7 +144,7 @@ class for details.
"""
PERSISTENT_ROOT = "/mnt/kafka"
STDOUT_STDERR_CAPTURE = os.path.join(PERSISTENT_ROOT, "server-start-stdout-stderr.log")
- LOG4J_CONFIG = os.path.join(PERSISTENT_ROOT, "kafka-log4j.properties")
+ LOG4J2_CONFIG = os.path.join(PERSISTENT_ROOT, "kafka-log4j2.properties")
# Logs such as controller.log, server.log, etc all go here
OPERATIONAL_LOG_DIR = os.path.join(PERSISTENT_ROOT, "kafka-operational-logs")
OPERATIONAL_LOG_INFO_DIR = os.path.join(OPERATIONAL_LOG_DIR, "info")
@@ -744,7 +744,7 @@ def render_configs(self, configs):
def start_cmd(self, node):
cmd = "export JMX_PORT=%d; " % self.jmx_port
- cmd += "export KAFKA_LOG4J_OPTS=\"-Dlog4j.configuration=file:%s\"; " % self.LOG4J_CONFIG
+ cmd += "export KAFKA_LOG4J_OPTS=\"-Dlog4j.configurationFile=file:%s\"; " % self.LOG4J2_CONFIG
heap_kafka_opts = "-XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=%s" % \
self.logs["kafka_heap_dump_file"]["path"]
security_kafka_opts = self.security_config.kafka_opts.strip('\"')
@@ -807,7 +807,7 @@ def start_node(self, node, timeout_sec=60):
self.logger.info("kafka.properties:")
self.logger.info(prop_file)
node.account.create_file(KafkaService.CONFIG_FILE, prop_file)
- node.account.create_file(self.LOG4J_CONFIG, self.render('log4j.properties', log_dir=KafkaService.OPERATIONAL_LOG_DIR))
+ node.account.create_file(self.LOG4J2_CONFIG, self.render('log4j2.properties', log_dir=KafkaService.OPERATIONAL_LOG_DIR))
if self.quorum_info.using_kraft:
# format log directories if necessary
diff --git a/tests/kafkatest/services/kafka/templates/log4j.properties b/tests/kafkatest/services/kafka/templates/log4j.properties
deleted file mode 100644
index 5963c39c089df..0000000000000
--- a/tests/kafkatest/services/kafka/templates/log4j.properties
+++ /dev/null
@@ -1,136 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-log4j.rootLogger={{ log_level|default("DEBUG") }}, stdout
-
-log4j.appender.stdout=org.apache.log4j.ConsoleAppender
-log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
-log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c)%n
-
-# INFO level appenders
-log4j.appender.kafkaInfoAppender=org.apache.log4j.DailyRollingFileAppender
-log4j.appender.kafkaInfoAppender.DatePattern='.'yyyy-MM-dd-HH
-log4j.appender.kafkaInfoAppender.File={{ log_dir }}/info/server.log
-log4j.appender.kafkaInfoAppender.layout=org.apache.log4j.PatternLayout
-log4j.appender.kafkaInfoAppender.layout.ConversionPattern=[%d] %p %m (%c)%n
-log4j.appender.kafkaInfoAppender.Threshold=INFO
-
-log4j.appender.stateChangeInfoAppender=org.apache.log4j.DailyRollingFileAppender
-log4j.appender.stateChangeInfoAppender.DatePattern='.'yyyy-MM-dd-HH
-log4j.appender.stateChangeInfoAppender.File={{ log_dir }}/info/state-change.log
-log4j.appender.stateChangeInfoAppender.layout=org.apache.log4j.PatternLayout
-log4j.appender.stateChangeInfoAppender.layout.ConversionPattern=[%d] %p %m (%c)%n
-log4j.appender.stateChangeInfoAppender.Threshold=INFO
-
-log4j.appender.requestInfoAppender=org.apache.log4j.DailyRollingFileAppender
-log4j.appender.requestInfoAppender.DatePattern='.'yyyy-MM-dd-HH
-log4j.appender.requestInfoAppender.File={{ log_dir }}/info/kafka-request.log
-log4j.appender.requestInfoAppender.layout=org.apache.log4j.PatternLayout
-log4j.appender.requestInfoAppender.layout.ConversionPattern=[%d] %p %m (%c)%n
-log4j.appender.requestInfoAppender.Threshold=INFO
-
-log4j.appender.cleanerInfoAppender=org.apache.log4j.DailyRollingFileAppender
-log4j.appender.cleanerInfoAppender.DatePattern='.'yyyy-MM-dd-HH
-log4j.appender.cleanerInfoAppender.File={{ log_dir }}/info/log-cleaner.log
-log4j.appender.cleanerInfoAppender.layout=org.apache.log4j.PatternLayout
-log4j.appender.cleanerInfoAppender.layout.ConversionPattern=[%d] %p %m (%c)%n
-log4j.appender.cleanerInfoAppender.Threshold=INFO
-
-log4j.appender.controllerInfoAppender=org.apache.log4j.DailyRollingFileAppender
-log4j.appender.controllerInfoAppender.DatePattern='.'yyyy-MM-dd-HH
-log4j.appender.controllerInfoAppender.File={{ log_dir }}/info/controller.log
-log4j.appender.controllerInfoAppender.layout=org.apache.log4j.PatternLayout
-log4j.appender.controllerInfoAppender.layout.ConversionPattern=[%d] %p %m (%c)%n
-log4j.appender.controllerInfoAppender.Threshold=INFO
-
-log4j.appender.authorizerInfoAppender=org.apache.log4j.DailyRollingFileAppender
-log4j.appender.authorizerInfoAppender.DatePattern='.'yyyy-MM-dd-HH
-log4j.appender.authorizerInfoAppender.File={{ log_dir }}/info/kafka-authorizer.log
-log4j.appender.authorizerInfoAppender.layout=org.apache.log4j.PatternLayout
-log4j.appender.authorizerInfoAppender.layout.ConversionPattern=[%d] %p %m (%c)%n
-log4j.appender.authorizerInfoAppender.Threshold=INFO
-
-# DEBUG level appenders
-log4j.appender.kafkaDebugAppender=org.apache.log4j.DailyRollingFileAppender
-log4j.appender.kafkaDebugAppender.DatePattern='.'yyyy-MM-dd-HH
-log4j.appender.kafkaDebugAppender.File={{ log_dir }}/debug/server.log
-log4j.appender.kafkaDebugAppender.layout=org.apache.log4j.PatternLayout
-log4j.appender.kafkaDebugAppender.layout.ConversionPattern=[%d] %p %m (%c)%n
-log4j.appender.kafkaDebugAppender.Threshold=DEBUG
-
-log4j.appender.stateChangeDebugAppender=org.apache.log4j.DailyRollingFileAppender
-log4j.appender.stateChangeDebugAppender.DatePattern='.'yyyy-MM-dd-HH
-log4j.appender.stateChangeDebugAppender.File={{ log_dir }}/debug/state-change.log
-log4j.appender.stateChangeDebugAppender.layout=org.apache.log4j.PatternLayout
-log4j.appender.stateChangeDebugAppender.layout.ConversionPattern=[%d] %p %m (%c)%n
-log4j.appender.stateChangeDebugAppender.Threshold=DEBUG
-
-log4j.appender.requestDebugAppender=org.apache.log4j.DailyRollingFileAppender
-log4j.appender.requestDebugAppender.DatePattern='.'yyyy-MM-dd-HH
-log4j.appender.requestDebugAppender.File={{ log_dir }}/debug/kafka-request.log
-log4j.appender.requestDebugAppender.layout=org.apache.log4j.PatternLayout
-log4j.appender.requestDebugAppender.layout.ConversionPattern=[%d] %p %m (%c)%n
-log4j.appender.requestDebugAppender.Threshold=DEBUG
-
-log4j.appender.cleanerDebugAppender=org.apache.log4j.DailyRollingFileAppender
-log4j.appender.cleanerDebugAppender.DatePattern='.'yyyy-MM-dd-HH
-log4j.appender.cleanerDebugAppender.File={{ log_dir }}/debug/log-cleaner.log
-log4j.appender.cleanerDebugAppender.layout=org.apache.log4j.PatternLayout
-log4j.appender.cleanerDebugAppender.layout.ConversionPattern=[%d] %p %m (%c)%n
-log4j.appender.cleanerDebugAppender.Threshold=DEBUG
-
-log4j.appender.controllerDebugAppender=org.apache.log4j.DailyRollingFileAppender
-log4j.appender.controllerDebugAppender.DatePattern='.'yyyy-MM-dd-HH
-log4j.appender.controllerDebugAppender.File={{ log_dir }}/debug/controller.log
-log4j.appender.controllerDebugAppender.layout=org.apache.log4j.PatternLayout
-log4j.appender.controllerDebugAppender.layout.ConversionPattern=[%d] %p %m (%c)%n
-log4j.appender.controllerDebugAppender.Threshold=DEBUG
-
-log4j.appender.authorizerDebugAppender=org.apache.log4j.DailyRollingFileAppender
-log4j.appender.authorizerDebugAppender.DatePattern='.'yyyy-MM-dd-HH
-log4j.appender.authorizerDebugAppender.File={{ log_dir }}/debug/kafka-authorizer.log
-log4j.appender.authorizerDebugAppender.layout=org.apache.log4j.PatternLayout
-log4j.appender.authorizerDebugAppender.layout.ConversionPattern=[%d] %p %m (%c)%n
-log4j.appender.authorizerDebugAppender.Threshold=DEBUG
-
-# Turn on all our debugging info
-log4j.logger.kafka.producer.async.DefaultEventHandler={{ log_level|default("DEBUG") }}, kafkaInfoAppender, kafkaDebugAppender
-log4j.logger.kafka.client.ClientUtils={{ log_level|default("DEBUG") }}, kafkaInfoAppender, kafkaDebugAppender
-log4j.logger.kafka.perf={{ log_level|default("DEBUG") }}, kafkaInfoAppender, kafkaDebugAppender
-log4j.logger.kafka.perf.ProducerPerformance$ProducerThread={{ log_level|default("DEBUG") }}, kafkaInfoAppender, kafkaDebugAppender
-log4j.logger.kafka={{ log_level|default("DEBUG") }}, kafkaInfoAppender, kafkaDebugAppender
-
-log4j.logger.kafka.network.RequestChannel$={{ log_level|default("DEBUG") }}, requestInfoAppender, requestDebugAppender
-log4j.additivity.kafka.network.RequestChannel$=false
-
-log4j.logger.kafka.network.Processor={{ log_level|default("DEBUG") }}, requestInfoAppender, requestDebugAppender
-log4j.logger.kafka.server.KafkaApis={{ log_level|default("DEBUG") }}, requestInfoAppender, requestDebugAppender
-log4j.additivity.kafka.server.KafkaApis=false
-log4j.logger.kafka.request.logger={{ log_level|default("DEBUG") }}, requestInfoAppender, requestDebugAppender
-log4j.additivity.kafka.request.logger=false
-
-log4j.logger.kafka.controller={{ log_level|default("DEBUG") }}, controllerInfoAppender, controllerDebugAppender
-log4j.additivity.kafka.controller=false
-
-log4j.logger.kafka.log.LogCleaner={{ log_level|default("DEBUG") }}, cleanerInfoAppender, cleanerDebugAppender
-log4j.additivity.kafka.log.LogCleaner=false
-
-log4j.logger.state.change.logger={{ log_level|default("DEBUG") }}, stateChangeInfoAppender, stateChangeDebugAppender
-log4j.additivity.state.change.logger=false
-
-#Change this to debug to get the actual audit log for authorizer.
-log4j.logger.kafka.authorizer.logger={{ log_level|default("DEBUG") }}, authorizerInfoAppender, authorizerDebugAppender
-log4j.additivity.kafka.authorizer.logger=false
-
diff --git a/tests/kafkatest/services/kafka/templates/log4j2.properties b/tests/kafkatest/services/kafka/templates/log4j2.properties
new file mode 100644
index 0000000000000..d182c8afa8dd0
--- /dev/null
+++ b/tests/kafkatest/services/kafka/templates/log4j2.properties
@@ -0,0 +1,297 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+name=TestConfig
+
+appenders=stdout,kafkaInfoAppender,requestInfoAppender,controllerInfoAppender,cleanerInfoAppender,stateChangeInfoAppender,authorizerInfoAppender,kafkaDebugAppender,requestDebugAppender,controllerDebugAppender,cleanerDebugAppender,stateChangeDebugAppender,authorizerDebugAppender
+
+appender.stdout.type=Console
+appender.stdout.name=STDOUT
+appender.stdout.layout.type=PatternLayout
+appender.stdout.layout.pattern=[%d] %p %m (%c)%n
+
+# INFO level appenders
+appender.kafkaInfoAppender.type=RollingFile
+appender.kafkaInfoAppender.name=KAFKA_INFO_APPENDER
+appender.kafkaInfoAppender.fileName={{ log_dir }}/info/server.log
+appender.kafkaInfoAppender.filePattern={{ log_dir }}/info/server.log.%d{yyyy-MM-dd}.log.gz
+appender.kafkaInfoAppender.layout.type=PatternLayout
+appender.kafkaInfoAppender.layout.pattern=[%d] %p %m (%c)%n
+appender.kafkaInfoAppender.filter.threshold.type=ThresholdFilter
+appender.kafkaInfoAppender.filter.threshold.level=INFO
+appender.kafkaInfoAppender.policies.type=Policies
+appender.kafkaInfoAppender.policies.time.type=TimeBasedTriggeringPolicy
+appender.kafkaInfoAppender.policies.time.interval=1
+appender.kafkaInfoAppender.policies.time.modulate=true
+appender.kafkaInfoAppender.strategy.type=DefaultRolloverStrategy
+appender.kafkaInfoAppender.strategy.max=1
+
+appender.requestInfoAppender.type=RollingFile
+appender.requestInfoAppender.name=REQUEST_INFO_APPENDER
+appender.requestInfoAppender.fileName={{ log_dir }}/info/kafka-request.log
+appender.requestInfoAppender.filePattern={{ log_dir }}/info/kafka-request.log.%d{yyyy-MM-dd}.log.gz
+appender.requestInfoAppender.layout.type=PatternLayout
+appender.requestInfoAppender.layout.pattern=[%d] %p %m (%c)%n
+appender.requestInfoAppender.filter.threshold.type=ThresholdFilter
+appender.requestInfoAppender.filter.threshold.level=INFO
+appender.requestInfoAppender.policies.type=Policies
+appender.requestInfoAppender.policies.time.type=TimeBasedTriggeringPolicy
+appender.requestInfoAppender.policies.time.interval=1
+appender.requestInfoAppender.policies.time.modulate=true
+appender.requestInfoAppender.strategy.type=DefaultRolloverStrategy
+appender.requestInfoAppender.strategy.max=1
+
+appender.controllerInfoAppender.type=RollingFile
+appender.controllerInfoAppender.name=CONTROLLER_INFO_APPENDER
+appender.controllerInfoAppender.fileName={{ log_dir }}/info/controller.log
+appender.controllerInfoAppender.filePattern={{ log_dir }}/info/controller.log.%d{yyyy-MM-dd}.log.gz
+appender.controllerInfoAppender.layout.type=PatternLayout
+appender.controllerInfoAppender.layout.pattern=[%d] %p %m (%c)%n
+appender.controllerInfoAppender.filter.threshold.type=ThresholdFilter
+appender.controllerInfoAppender.filter.threshold.level=INFO
+appender.controllerInfoAppender.policies.type=Policies
+appender.controllerInfoAppender.policies.time.type=TimeBasedTriggeringPolicy
+appender.controllerInfoAppender.policies.time.interval=1
+appender.controllerInfoAppender.policies.time.modulate=true
+appender.controllerInfoAppender.strategy.type=DefaultRolloverStrategy
+appender.controllerInfoAppender.strategy.max=1
+
+appender.cleanerInfoAppender.type=RollingFile
+appender.cleanerInfoAppender.name=CLEANER_INFO_APPENDER
+appender.cleanerInfoAppender.fileName={{ log_dir }}/info/log-cleaner.log
+appender.cleanerInfoAppender.filePattern={{ log_dir }}/info/log-cleaner.log.%d{yyyy-MM-dd}.log.gz
+appender.cleanerInfoAppender.layout.type=PatternLayout
+appender.cleanerInfoAppender.layout.pattern=[%d] %p %m (%c)%n
+appender.cleanerInfoAppender.filter.threshold.type=ThresholdFilter
+appender.cleanerInfoAppender.filter.threshold.level=INFO
+appender.cleanerInfoAppender.policies.type=Policies
+appender.cleanerInfoAppender.policies.time.type=TimeBasedTriggeringPolicy
+appender.cleanerInfoAppender.policies.time.interval=1
+appender.cleanerInfoAppender.policies.time.modulate=true
+appender.cleanerInfoAppender.strategy.type=DefaultRolloverStrategy
+appender.cleanerInfoAppender.strategy.max=1
+
+appender.stateChangeInfoAppender.type=RollingFile
+appender.stateChangeInfoAppender.name=STATE_CHANGE_INFO_APPENDER
+appender.stateChangeInfoAppender.fileName={{ log_dir }}/info/state-change.log
+appender.stateChangeInfoAppender.filePattern={{ log_dir }}/info/state-change.log.%d{yyyy-MM-dd}.log.gz
+appender.stateChangeInfoAppender.layout.type=PatternLayout
+appender.stateChangeInfoAppender.layout.pattern=[%d] %p %m (%c)%n
+appender.stateChangeInfoAppender.filter.threshold.type=ThresholdFilter
+appender.stateChangeInfoAppender.filter.threshold.level=INFO
+appender.stateChangeInfoAppender.policies.type=Policies
+appender.stateChangeInfoAppender.policies.time.type=TimeBasedTriggeringPolicy
+appender.stateChangeInfoAppender.policies.time.interval=1
+appender.stateChangeInfoAppender.policies.time.modulate=true
+appender.stateChangeInfoAppender.strategy.type=DefaultRolloverStrategy
+appender.stateChangeInfoAppender.strategy.max=1
+
+appender.authorizerInfoAppender.type=RollingFile
+appender.authorizerInfoAppender.name=AUTHORIZER_INFO_APPENDER
+appender.authorizerInfoAppender.fileName={{ log_dir }}/info/kafka-authorizer.log
+appender.authorizerInfoAppender.filePattern={{ log_dir }}/info/kafka-authorizer.log.%d{yyyy-MM-dd}.log.gz
+appender.authorizerInfoAppender.layout.type=PatternLayout
+appender.authorizerInfoAppender.layout.pattern=[%d] %p %m (%c)%n
+appender.authorizerInfoAppender.filter.threshold.type=ThresholdFilter
+appender.authorizerInfoAppender.filter.threshold.level=INFO
+appender.authorizerInfoAppender.policies.type=Policies
+appender.authorizerInfoAppender.policies.time.type=TimeBasedTriggeringPolicy
+appender.authorizerInfoAppender.policies.time.interval=1
+appender.authorizerInfoAppender.policies.time.modulate=true
+appender.authorizerInfoAppender.strategy.type=DefaultRolloverStrategy
+appender.authorizerInfoAppender.strategy.max=1
+
+# DEBUG level appenders
+appender.kafkaDebugAppender.type=RollingFile
+appender.kafkaDebugAppender.name=KAFKA_DEBUG_APPENDER
+appender.kafkaDebugAppender.fileName={{ log_dir }}/debug/server.log
+appender.kafkaDebugAppender.filePattern={{ log_dir }}/debug/server.log.%d{yyyy-MM-dd}.log.gz
+appender.kafkaDebugAppender.layout.type=PatternLayout
+appender.kafkaDebugAppender.layout.pattern=[%d] %p %m (%c)%n
+appender.kafkaDebugAppender.filter.threshold.type=ThresholdFilter
+appender.kafkaDebugAppender.filter.threshold.level=DEBUG
+appender.kafkaDebugAppender.policies.type=Policies
+appender.kafkaDebugAppender.policies.time.type=TimeBasedTriggeringPolicy
+appender.kafkaDebugAppender.policies.time.interval=1
+appender.kafkaDebugAppender.policies.time.modulate=true
+appender.kafkaDebugAppender.strategy.type=DefaultRolloverStrategy
+appender.kafkaDebugAppender.strategy.max=1
+
+appender.requestDebugAppender.type=RollingFile
+appender.requestDebugAppender.name=REQUEST_DEBUG_APPENDER
+appender.requestDebugAppender.fileName={{ log_dir }}/debug/kafka-request.log
+appender.requestDebugAppender.filePattern={{ log_dir }}/debug/kafka-request.log.%d{yyyy-MM-dd}.log.gz
+appender.requestDebugAppender.layout.type=PatternLayout
+appender.requestDebugAppender.layout.pattern=[%d] %p %m (%c)%n
+appender.requestDebugAppender.filter.threshold.type=ThresholdFilter
+appender.requestDebugAppender.filter.threshold.level=DEBUG
+appender.requestDebugAppender.policies.type=Policies
+appender.requestDebugAppender.policies.time.type=TimeBasedTriggeringPolicy
+appender.requestDebugAppender.policies.time.interval=1
+appender.requestDebugAppender.policies.time.modulate=true
+appender.requestDebugAppender.strategy.type=DefaultRolloverStrategy
+appender.requestDebugAppender.strategy.max=1
+
+appender.controllerDebugAppender.type=RollingFile
+appender.controllerDebugAppender.name=CONTROLLER_DEBUG_APPENDER
+appender.controllerDebugAppender.fileName={{ log_dir }}/debug/controller.log
+appender.controllerDebugAppender.filePattern={{ log_dir }}/debug/controller.log.%d{yyyy-MM-dd}.log.gz
+appender.controllerDebugAppender.layout.type=PatternLayout
+appender.controllerDebugAppender.layout.pattern=[%d] %p %m (%c)%n
+appender.controllerDebugAppender.filter.threshold.type=ThresholdFilter
+appender.controllerDebugAppender.filter.threshold.level=DEBUG
+appender.controllerDebugAppender.policies.type=Policies
+appender.controllerDebugAppender.policies.time.type=TimeBasedTriggeringPolicy
+appender.controllerDebugAppender.policies.time.interval=1
+appender.controllerDebugAppender.policies.time.modulate=true
+appender.controllerDebugAppender.strategy.type=DefaultRolloverStrategy
+appender.controllerDebugAppender.strategy.max=1
+
+appender.cleanerDebugAppender.type=RollingFile
+appender.cleanerDebugAppender.name=CLEANER_DEBUG_APPENDER
+appender.cleanerDebugAppender.fileName={{ log_dir }}/debug/log-cleaner.log
+appender.cleanerDebugAppender.filePattern={{ log_dir }}/debug/log-cleaner.log.%d{yyyy-MM-dd}.log.gz
+appender.cleanerDebugAppender.layout.type=PatternLayout
+appender.cleanerDebugAppender.layout.pattern=[%d] %p %m (%c)%n
+appender.cleanerDebugAppender.filter.threshold.type=ThresholdFilter
+appender.cleanerDebugAppender.filter.threshold.level=DEBUG
+appender.cleanerDebugAppender.policies.type=Policies
+appender.cleanerDebugAppender.policies.time.type=TimeBasedTriggeringPolicy
+appender.cleanerDebugAppender.policies.time.interval=1
+appender.cleanerDebugAppender.policies.time.modulate=true
+appender.cleanerDebugAppender.strategy.type=DefaultRolloverStrategy
+appender.cleanerDebugAppender.strategy.max=1
+
+appender.stateChangeDebugAppender.type=RollingFile
+appender.stateChangeDebugAppender.name=STATE_CHANGE_DEBUG_APPENDER
+appender.stateChangeDebugAppender.fileName={{ log_dir }}/debug/state-change.log
+appender.stateChangeDebugAppender.filePattern={{ log_dir }}/debug/state-change.log.%d{yyyy-MM-dd}.log.gz
+appender.stateChangeDebugAppender.layout.type=PatternLayout
+appender.stateChangeDebugAppender.layout.pattern=[%d] %p %m (%c)%n
+appender.stateChangeDebugAppender.filter.threshold.type=ThresholdFilter
+appender.stateChangeDebugAppender.filter.threshold.level=DEBUG
+appender.stateChangeDebugAppender.policies.type=Policies
+appender.stateChangeDebugAppender.policies.time.type=TimeBasedTriggeringPolicy
+appender.stateChangeDebugAppender.policies.time.interval=1
+appender.stateChangeDebugAppender.policies.time.modulate=true
+appender.stateChangeDebugAppender.strategy.type=DefaultRolloverStrategy
+appender.stateChangeDebugAppender.strategy.max=1
+
+appender.authorizerDebugAppender.type=RollingFile
+appender.authorizerDebugAppender.name=AUTHORIZER_DEBUG_APPENDER
+appender.authorizerDebugAppender.fileName={{ log_dir }}/debug/kafka-authorizer.log
+appender.authorizerDebugAppender.filePattern={{ log_dir }}/debug/kafka-authorizer.log.%d{yyyy-MM-dd}.log.gz
+appender.authorizerDebugAppender.layout.type=PatternLayout
+appender.authorizerDebugAppender.layout.pattern=[%d] %p %m (%c)%n
+appender.authorizerDebugAppender.filter.threshold.type=ThresholdFilter
+appender.authorizerDebugAppender.filter.threshold.level=DEBUG
+appender.authorizerDebugAppender.policies.type=Policies
+appender.authorizerDebugAppender.policies.time.type=TimeBasedTriggeringPolicy
+appender.authorizerDebugAppender.policies.time.interval=1
+appender.authorizerDebugAppender.policies.time.modulate=true
+appender.authorizerDebugAppender.strategy.type=DefaultRolloverStrategy
+appender.authorizerDebugAppender.strategy.max=1
+
+rootLogger.level={{ log_level|default("DEBUG") }}
+rootLogger.appenderRefs=stdout
+rootLogger.appenderRef.stdout.ref=STDOUT
+
+# Turn on all our debugging info
+loggers=state.change.logger,kafka.server.KafkaApis,kafka.request.logger,kafka.producer.async.DefaultEventHandler,kafka.perf.ProducerPerformance$ProducerThread,kafka.perf,kafka.network.RequestChannel$,kafka.network.Processor,kafka.log.LogCleaner,kafka.controller,kafka.client.ClientUtils,kafka.authorizer.logger,kafka
+
+logger.state.change.logger.name=state.change.logger
+logger.state.change.logger.level={{ log_level|default("DEBUG") }}
+logger.state.change.logger.additivity=false
+logger.state.change.logger.appenderRefs=stateChangeInfoAppender,stateChangeDebugAppender
+logger.state.change.logger.stateChangeInfoAppender.ref=STATE_CHANGE_INFO_APPENDER
+logger.state.change.logger.stateChangeDebugAppender.ref=STATE_CHANGE_DEBUG_APPENDER
+
+logger.kafka.server.KafkaApis.name=kafka.server.KafkaApis
+logger.kafka.server.KafkaApis.level={{ log_level|default("DEBUG") }}
+logger.kafka.server.KafkaApis.additivity=false
+logger.kafka.server.KafkaApis.appenderRefs=requestInfoAppender,requestDebugAppender
+logger.kafka.server.KafkaApis.requestInfoAppender.ref=REQUEST_INFO_APPENDER
+logger.kafka.server.KafkaApis.requestDebugAppender.ref=REQUEST_DEBUG_APPENDER
+
+logger.kafka.request.logger.name=kafka.request.logger
+logger.kafka.request.logger.level={{ log_level|default("DEBUG") }}
+logger.kafka.request.logger.additivity=false
+logger.kafka.request.logger.appenderRefs=requestInfoAppender,requestDebugAppender
+logger.kafka.request.logger.requestInfoAppender.ref=REQUEST_INFO_APPENDER
+logger.kafka.request.logger.requestDebugAppender.ref=REQUEST_DEBUG_APPENDER
+
+logger.kafka.producer.async.DefaultEventHandler.name=kafka.producer.async.DefaultEventHandler
+logger.kafka.producer.async.DefaultEventHandler.level={{ log_level|default("DEBUG") }}
+logger.kafka.producer.async.DefaultEventHandler.appenderRefs=kafkaInfoAppender,kafkaDebugAppender
+logger.kafka.producer.async.DefaultEventHandler.kafkaInfoAppender.ref=KAFKA_INFO_APPENDER
+logger.kafka.producer.async.DefaultEventHandler.kafkaDebugAppender.ref=KAFKA_DEBUG_APPENDER
+
+logger.kafka.perf.ProducerPerformance$ProducerThread.name=kafka.perf.ProducerPerformance$ProducerThread
+logger.kafka.perf.ProducerPerformance$ProducerThread.level={{ log_level|default("DEBUG") }}
+logger.kafka.perf.ProducerPerformance$ProducerThread.appenderRefs=kafkaInfoAppender,kafkaDebugAppender
+logger.kafka.perf.ProducerPerformance$ProducerThread.kafkaInfoAppender.ref=KAFKA_INFO_APPENDER
+logger.kafka.perf.ProducerPerformance$ProducerThread.kafkaDebugAppender.ref=KAFKA_DEBUG_APPENDER
+
+logger.kafka.perf.name=kafka.perf
+logger.kafka.perf.level={{ log_level|default("DEBUG") }}
+logger.kafka.perf.appenderRefs=kafkaInfoAppender,kafkaDebugAppender
+logger.kafka.perf.kafkaInfoAppender.ref=KAFKA_INFO_APPENDER
+logger.kafka.perf.kafkaDebugAppender.ref=KAFKA_DEBUG_APPENDER
+
+logger.kafka.network.RequestChannel$.name=kafka.network.RequestChannel$
+logger.kafka.network.RequestChannel$.level={{ log_level|default("DEBUG") }}
+logger.kafka.network.RequestChannel$.additivity=false
+logger.kafka.network.RequestChannel$.appenderRefs=requestInfoAppender,requestDebugAppender
+logger.kafka.network.RequestChannel$.requestInfoAppender.ref=REQUEST_INFO_APPENDER
+logger.kafka.network.RequestChannel$.requestDebugAppender.ref=REQUEST_DEBUG_APPENDER
+
+logger.kafka.network.Processor.name=kafka.network.Processor
+logger.kafka.network.Processor.level={{ log_level|default("DEBUG") }}
+logger.kafka.network.Processor.appenderRefs=requestInfoAppender,requestDebugAppender
+logger.kafka.network.Processor.requestInfoAppender.ref=REQUEST_INFO_APPENDER
+logger.kafka.network.Processor.requestDebugAppender.ref=REQUEST_DEBUG_APPENDER
+
+logger.kafka.log.LogCleaner.name=kafka.log.LogCleaner
+logger.kafka.log.LogCleaner.level={{ log_level|default("DEBUG") }}
+logger.kafka.log.LogCleaner.additivity=false
+logger.kafka.log.LogCleaner.appenderRefs=cleanerInfoAppender,cleanerDebugAppender
+logger.kafka.log.LogCleaner.cleanerInfoAppender.ref=CLEANER_INFO_APPENDER
+logger.kafka.log.LogCleaner.cleanerDebugAppender.ref=CLEANER_DEBUG_APPENDER
+
+logger.kafka.controller.name=kafka.controller
+logger.kafka.controller.level={{ log_level|default("DEBUG") }}
+logger.kafka.controller.additivity=false
+logger.kafka.controller.appenderRefs=controllerInfoAppender,controllerDebugAppender
+logger.kafka.controller.controllerInfoAppender.ref=CONTROLLER_INFO_APPENDER
+logger.kafka.controller.controllerDebugAppender.ref=CONTROLLER_DEBUG_APPENDER
+
+logger.kafka.client.ClientUtils.name=kafka.client.ClientUtils
+logger.kafka.client.ClientUtils.level={{ log_level|default("DEBUG") }}
+logger.kafka.client.ClientUtils.appenderRefs=kafkaInfoAppender,kafkaDebugAppender
+logger.kafka.client.ClientUtils.kafkaInfoAppender.ref=KAFKA_INFO_APPENDER
+logger.kafka.client.ClientUtils.kafkaDebugAppender.ref=KAFKA_DEBUG_APPENDER
+
+# Change this to debug to get the actual audit log for authorizer.
+logger.kafka.authorizer.logger.name=kafka.authorizer.logger
+logger.kafka.authorizer.logger.level={{ log_level|default("DEBUG") }}
+logger.kafka.authorizer.logger.additivity=false
+logger.kafka.authorizer.logger.appenderRefs=authorizerInfoAppender,authorizerDebugAppender
+logger.kafka.authorizer.logger.authorizerInfoAppender.ref=AUTHORIZER_INFO_APPENDER
+logger.kafka.authorizer.logger.authorizerDebugAppender.ref=AUTHORIZER_DEBUG_APPENDER
+
+logger.kafka.name=kafka
+logger.kafka.level={{ log_level|default("DEBUG") }}
+logger.kafka.appenderRefs=kafkaInfoAppender,kafkaDebugAppender
+logger.kafka.kafkaInfoAppender.ref=KAFKA_INFO_APPENDER
+logger.kafka.kafkaDebugAppender.ref=KAFKA_DEBUG_APPENDER
diff --git a/tests/kafkatest/services/streams.py b/tests/kafkatest/services/streams.py
index 5dedc57916331..beb0c66adb450 100644
--- a/tests/kafkatest/services/streams.py
+++ b/tests/kafkatest/services/streams.py
@@ -306,7 +306,7 @@ def start_node(self, node):
node.account.mkdirs(self.PERSISTENT_ROOT)
prop_file = self.prop_file()
node.account.create_file(self.CONFIG_FILE, prop_file)
- node.account.create_file(self.LOG4J_CONFIG_FILE, self.render('tools_log4j.properties', log_file=self.LOG_FILE))
+ node.account.create_file(self.LOG4J_CONFIG_FILE, self.render('tools_log4j2.properties', log_file=self.LOG_FILE))
self.logger.info("Starting StreamsTest process on " + str(node.account))
with node.account.monitor_log(self.STDOUT_FILE) as monitor:
@@ -368,7 +368,7 @@ def start_cmd(self, node):
args['version'] = self.KAFKA_STREAMS_VERSION
args['kafka_run_class'] = self.path.script("kafka-run-class.sh", node)
- cmd = "( export KAFKA_LOG4J_OPTS=\"-Dlog4j.configuration=file:%(log4j)s\";" \
+ cmd = "( export KAFKA_LOG4J_OPTS=\"-Dlog4j.configurationFile=file:%(log4j)s\";" \
" INCLUDE_TEST_JARS=true UPGRADE_KAFKA_STREAMS_TEST_VERSION=%(version)s" \
" %(kafka_run_class)s %(streams_class_name)s" \
" %(config_file)s %(user_test_args1)s" \
diff --git a/tests/kafkatest/services/templates/connect_log4j2.properties b/tests/kafkatest/services/templates/connect_log4j2.properties
new file mode 100644
index 0000000000000..4867aebabd483
--- /dev/null
+++ b/tests/kafkatest/services/templates/connect_log4j2.properties
@@ -0,0 +1,39 @@
+##
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+name=TestConfig
+
+appenders=file
+
+appender.file.type=File
+appender.file.name=FILE
+appender.file.fileName={{ log_file }}
+appender.file.append=true
+appender.file.immediateFlush=true
+appender.file.layout.type=PatternLayout
+appender.file.layout.pattern=[%d] %p %m (%c)%n
+
+# Define the root logger with appender file
+rootLogger.level={{ log_level|default("INFO`") }}
+rootLogger.appenderRefs=file
+rootLogger.appenderRef.file.ref=FILE
+
+loggers=org.apache.zookeeper,org.reflections
+
+logger.org.apache.zookeeper.name=org.apache.zookeeper
+logger.org.apache.zookeeper.level=ERROR
+
+logger.org.reflections.name=org.reflections
+logger.org.reflections.level=ERROR
diff --git a/tests/kafkatest/tests/streams/templates/log4j_template.properties b/tests/kafkatest/services/templates/tools_log4j2.properties
similarity index 64%
rename from tests/kafkatest/tests/streams/templates/log4j_template.properties
rename to tests/kafkatest/services/templates/tools_log4j2.properties
index 3f83b4220a1f5..36436e8b56438 100644
--- a/tests/kafkatest/tests/streams/templates/log4j_template.properties
+++ b/tests/kafkatest/services/templates/tools_log4j2.properties
@@ -1,3 +1,4 @@
+##
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
@@ -12,20 +13,28 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
+name=TestConfig
+
+appenders=file
+
+appender.file.type=File
+appender.file.name=FILE
+appender.file.fileName={{ log_file }}
+appender.file.append=true
+appender.file.immediateFlush=true
+appender.file.layout.type=PatternLayout
+appender.file.layout.pattern=[%d] %p %m (%c)%n
# Define the root logger with appender file
-log4j.rootLogger = {{ log_level|default("INFO") }}, FILE
+rootLogger.level={{ log_level|default("INFO") }}
+rootLogger.appenderRefs=file
+rootLogger.appenderRef.file.ref=FILE
{% if loggers is defined %}
+loggers={{ loggers|sort(reverse=true)|join(',') }}
+
{% for logger, log_level in loggers.items() %}
-log4j.logger.{{ logger }}={{ log_level }}
+logger.{{ logger }}.name={{ logger }}
+logger.{{ logger }}.level={{ log_level }}
{% endfor %}
{% endif %}
-
-log4j.appender.FILE=org.apache.log4j.FileAppender
-log4j.appender.FILE.File={{ log_file }}
-log4j.appender.FILE.ImmediateFlush=true
-# Set the append to true
-log4j.appender.FILE.Append=true
-log4j.appender.FILE.layout=org.apache.log4j.PatternLayout
-log4j.appender.FILE.layout.conversionPattern=[%d] %p %m (%c)%n
diff --git a/tests/kafkatest/services/trogdor/templates/log4j.properties b/tests/kafkatest/services/trogdor/templates/log4j.properties
deleted file mode 100644
index 252668e3dabf8..0000000000000
--- a/tests/kafkatest/services/trogdor/templates/log4j.properties
+++ /dev/null
@@ -1,23 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-log4j.rootLogger=DEBUG, mylogger
-log4j.logger.kafka=DEBUG
-log4j.logger.org.apache.kafka=DEBUG
-log4j.logger.org.eclipse=INFO
-log4j.appender.mylogger=org.apache.log4j.FileAppender
-log4j.appender.mylogger.File={{ log_path }}
-log4j.appender.mylogger.layout=org.apache.log4j.PatternLayout
-log4j.appender.mylogger.layout.ConversionPattern=[%d] %p %m (%c)%n
diff --git a/tests/kafkatest/services/trogdor/templates/log4j2.properties b/tests/kafkatest/services/trogdor/templates/log4j2.properties
new file mode 100644
index 0000000000000..32079002b14ea
--- /dev/null
+++ b/tests/kafkatest/services/trogdor/templates/log4j2.properties
@@ -0,0 +1,39 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+name=TestConfig
+
+appenders=file
+
+appender.file.type=File
+appender.file.name=FILE
+appender.file.fileName={{ log_path }}
+appender.file.layout.type=PatternLayout
+appender.file.layout.pattern=[%d] %p %m (%c)%n
+
+# Define the root logger with appender file
+rootLogger.level=DEBUG
+rootLogger.appenderRefs=file
+rootLogger.appenderRef.file.ref=FILE
+
+loggers=kafka,org.apache.kafka,org.eclipse
+
+logger.kafka.name=kafka
+logger.kafka.level=DEBUG
+
+logger.org.apache.kafka.name=org.apache.kafka
+logger.org.apache.kafka.level=DEBUG
+
+logger.org.eclipse.name=org.eclipse
+logger.org.eclipse.level=INFO
diff --git a/tests/kafkatest/services/trogdor/trogdor.py b/tests/kafkatest/services/trogdor/trogdor.py
index bd18bddb6268e..a42775bb75c2f 100644
--- a/tests/kafkatest/services/trogdor/trogdor.py
+++ b/tests/kafkatest/services/trogdor/trogdor.py
@@ -34,8 +34,8 @@ class TrogdorService(KafkaPathResolverMixin, Service):
AGENT_STDOUT_STDERR The path where we store the agents's stdout/stderr output.
COORDINATOR_LOG The path where we store the coordinator's log4j output.
AGENT_LOG The path where we store the agent's log4j output.
- AGENT_LOG4J_PROPERTIES The path to the agent log4j.properties file for log config.
- COORDINATOR_LOG4J_PROPERTIES The path to the coordinator log4j.properties file for log config.
+ AGENT_LOG4J2_PROPERTIES The path to the agent log4j2.properties file for log config.
+ COORDINATOR_LOG4J2_PROPERTIES The path to the coordinator log4j2.properties file for log config.
CONFIG_PATH The path to the trogdor configuration file.
DEFAULT_AGENT_PORT The default port to use for trogdor_agent daemons.
DEFAULT_COORDINATOR_PORT The default port to use for trogdor_coordinator daemons.
@@ -48,8 +48,8 @@ class TrogdorService(KafkaPathResolverMixin, Service):
AGENT_STDOUT_STDERR = os.path.join(PERSISTENT_ROOT, "trogdor-agent-stdout-stderr.log")
COORDINATOR_LOG = os.path.join(PERSISTENT_ROOT, "trogdor-coordinator.log")
AGENT_LOG = os.path.join(PERSISTENT_ROOT, "trogdor-agent.log")
- COORDINATOR_LOG4J_PROPERTIES = os.path.join(PERSISTENT_ROOT, "trogdor-coordinator-log4j.properties")
- AGENT_LOG4J_PROPERTIES = os.path.join(PERSISTENT_ROOT, "trogdor-agent-log4j.properties")
+ COORDINATOR_LOG4J2_PROPERTIES = os.path.join(PERSISTENT_ROOT, "trogdor-coordinator-log4j2.properties")
+ AGENT_LOG4J2_PROPERTIES = os.path.join(PERSISTENT_ROOT, "trogdor-agent-log4j2.properties")
CONFIG_PATH = os.path.join(PERSISTENT_ROOT, "trogdor.conf")
DEFAULT_AGENT_PORT=8888
DEFAULT_COORDINATOR_PORT=8889
@@ -141,26 +141,26 @@ def start_node(self, node):
self._start_agent_node(node)
def _start_coordinator_node(self, node):
- node.account.create_file(TrogdorService.COORDINATOR_LOG4J_PROPERTIES,
- self.render('log4j.properties',
+ node.account.create_file(TrogdorService.COORDINATOR_LOG4J2_PROPERTIES,
+ self.render('log4j2.properties',
log_path=TrogdorService.COORDINATOR_LOG))
self._start_trogdor_daemon("coordinator", TrogdorService.COORDINATOR_STDOUT_STDERR,
- TrogdorService.COORDINATOR_LOG4J_PROPERTIES,
+ TrogdorService.COORDINATOR_LOG4J2_PROPERTIES,
TrogdorService.COORDINATOR_LOG, node)
self.logger.info("Started trogdor coordinator on %s." % node.name)
def _start_agent_node(self, node):
- node.account.create_file(TrogdorService.AGENT_LOG4J_PROPERTIES,
- self.render('log4j.properties',
+ node.account.create_file(TrogdorService.AGENT_LOG4J2_PROPERTIES,
+ self.render('log4j2.properties',
log_path=TrogdorService.AGENT_LOG))
self._start_trogdor_daemon("agent", TrogdorService.AGENT_STDOUT_STDERR,
- TrogdorService.AGENT_LOG4J_PROPERTIES,
+ TrogdorService.AGENT_LOG4J2_PROPERTIES,
TrogdorService.AGENT_LOG, node)
self.logger.info("Started trogdor agent on %s." % node.name)
def _start_trogdor_daemon(self, daemon_name, stdout_stderr_capture_path,
- log4j_properties_path, log_path, node):
- cmd = "export KAFKA_LOG4J_OPTS='-Dlog4j.configuration=file:%s'; " % log4j_properties_path
+ log4j2_properties_path, log_path, node):
+ cmd = "export KAFKA_LOG4J_OPTS='-Dlog4j.configurationFile=file:%s'; " % log4j2_properties_path
cmd += "%s %s --%s.config %s --node-name %s 1>> %s 2>> %s &" % \
(self.path.script("trogdor.sh", node),
daemon_name,
diff --git a/tests/kafkatest/tests/streams/streams_relational_smoke_test.py b/tests/kafkatest/tests/streams/streams_relational_smoke_test.py
index fe10a29369e91..836bba8ba8fa7 100644
--- a/tests/kafkatest/tests/streams/streams_relational_smoke_test.py
+++ b/tests/kafkatest/tests/streams/streams_relational_smoke_test.py
@@ -32,10 +32,10 @@ def __init__(self, test_context, kafka, mode, nodeId, processing_guarantee):
self.mode = mode
self.nodeId = nodeId
self.processing_guarantee = processing_guarantee
- self.log4j_template = 'log4j_template.properties'
+ self.log4j_template = 'log4j2_template.properties'
def start_cmd(self, node):
- return "( export KAFKA_LOG4J_OPTS=\"-Dlog4j.configuration=file:%(log4j)s\"; " \
+ return "( export KAFKA_LOG4J_OPTS=\"-Dlog4j.configurationFile=file:%(log4j)s\"; " \
"INCLUDE_TEST_JARS=true %(kafka_run_class)s org.apache.kafka.streams.tests.RelationalSmokeTest " \
" %(mode)s %(kafka)s %(nodeId)s %(processing_guarantee)s %(state_dir)s" \
" & echo $! >&3 ) 1>> %(stdout)s 2>> %(stderr)s 3> %(pidfile)s" % {
@@ -54,7 +54,7 @@ def start_cmd(self, node):
def start_node(self, node):
node.account.mkdirs(self.PERSISTENT_ROOT)
node.account.create_file(self.LOG4J_CONFIG_FILE,
- self.render("log4j_template.properties", log_file=self.LOG_FILE))
+ self.render("log4j2_template.properties", log_file=self.LOG_FILE))
self.logger.info("Starting process on " + str(node.account))
node.account.ssh(self.start_cmd(node))
diff --git a/tests/kafkatest/tests/streams/templates/log4j2_template.properties b/tests/kafkatest/tests/streams/templates/log4j2_template.properties
new file mode 100644
index 0000000000000..6b30fdc84a469
--- /dev/null
+++ b/tests/kafkatest/tests/streams/templates/log4j2_template.properties
@@ -0,0 +1,40 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+name=TestConfig
+
+appenders=file
+
+appender.file.type=File
+appender.file.name=FILE
+appender.file.fileName={{ log_file }}
+# Set the append to true
+appender.file.append=true
+appender.file.immediateFlush=true
+appender.file.layout.type=PatternLayout
+appender.file.layout.pattern=[%d] %p %m (%c)%n
+
+# Define the root logger with appender file
+rootLogger.level={{ log_level | default("INFO") }}
+rootLogger.appenderRefs=file
+rootLogger.appenderRef.file.ref=FILE
+
+{% if loggers is defined %}
+loggers={{ loggers | sort(reverse=true) | join(',') }}
+
+{% for logger, log_level in loggers.iteritems() %}
+logger.{{ logger }}.name={{ logger }}
+logger.{{ logger }}.level={{ log_level }}
+{% endfor %}
+{% endif %}