Skip to content

Commit

Permalink
KAFKA-16373: KIP-1028: Adding 3.7.0 docker official images static ass…
Browse files Browse the repository at this point in the history
…ets (apache#16267)

This PR aims to add the static Dockerfile and scripts for AK 3.7.0 version. As mentioned in KIP-1028 this PR aims to start the release of the kafka:3.7.0 Docker Official image. This will also help us validate the process and allow us to address any changes suggested by Dockerhub before the 3.8.0 release.

The static Dockerfile and scripts have been generated via the github actions workflows and scripts added as part of apache#16027. The reports of build and testing the 3.7.0 Docker official image are below.

Reviewers: Manikumar Reddy <[email protected]>, Vedarth Sharma <[email protected]>
  • Loading branch information
KrishVora01 authored and gongxuanzhang committed Jun 12, 2024
1 parent 0a1e886 commit 01d53f3
Show file tree
Hide file tree
Showing 7 changed files with 422 additions and 0 deletions.
95 changes: 95 additions & 0 deletions docker/docker_official_images/3.7.0/jvm/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,95 @@
###############################################################################
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################

FROM eclipse-temurin:21-jre-alpine AS build-jsa

USER root

# Get Kafka from https://downloads.apache.org/kafka, url passed as env var, for version 3.7.0
ENV kafka_url https://downloads.apache.org/kafka/3.7.0/kafka_2.13-3.7.0.tgz

COPY jsa_launch /etc/kafka/docker/jsa_launch

RUN set -eux ; \
apk update ; \
apk upgrade ; \
apk add --no-cache wget gcompat gpg gpg-agent procps bash; \
mkdir opt/kafka; \
wget -nv -O kafka.tgz "$kafka_url"; \
wget -nv -O kafka.tgz.asc "$kafka_url.asc"; \
tar xfz kafka.tgz -C /opt/kafka --strip-components 1; \
wget -nv -O KEYS https://downloads.apache.org/kafka/KEYS; \
gpg --import KEYS; \
gpg --batch --verify kafka.tgz.asc kafka.tgz

# Generate jsa files using dynamic CDS for kafka server start command and kafka storage format command
RUN /etc/kafka/docker/jsa_launch


FROM eclipse-temurin:21-jre-alpine

# exposed ports
EXPOSE 9092

USER root

# Get Kafka from https://downloads.apache.org/kafka, url passed as env var, for version 3.7.0
ENV kafka_url https://downloads.apache.org/kafka/3.7.0/kafka_2.13-3.7.0.tgz
ENV build_date 2024-06-10


LABEL org.label-schema.name="kafka" \
org.label-schema.description="Apache Kafka" \
org.label-schema.build-date="${build_date}" \
org.label-schema.vcs-url="https://github.com/apache/kafka" \
maintainer="Apache Kafka"

RUN set -eux ; \
apk update ; \
apk upgrade ; \
apk add --no-cache wget gcompat gpg gpg-agent procps bash; \
mkdir opt/kafka; \
wget -nv -O kafka.tgz "$kafka_url"; \
wget -nv -O kafka.tgz.asc "$kafka_url.asc"; \
tar xfz kafka.tgz -C /opt/kafka --strip-components 1; \
wget -nv -O KEYS https://downloads.apache.org/kafka/KEYS; \
gpg --import KEYS; \
gpg --batch --verify kafka.tgz.asc kafka.tgz; \
mkdir -p /var/lib/kafka/data /etc/kafka/secrets; \
mkdir -p /etc/kafka/docker /usr/logs /mnt/shared/config; \
adduser -h /home/appuser -D --shell /bin/bash appuser; \
chown appuser:appuser -R /usr/logs /opt/kafka /mnt/shared/config; \
chown appuser:root -R /var/lib/kafka /etc/kafka/secrets /etc/kafka; \
chmod -R ug+w /etc/kafka /var/lib/kafka /etc/kafka/secrets; \
cp /opt/kafka/config/log4j.properties /etc/kafka/docker/log4j.properties; \
cp /opt/kafka/config/tools-log4j.properties /etc/kafka/docker/tools-log4j.properties; \
cp /opt/kafka/config/kraft/server.properties /etc/kafka/docker/server.properties; \
rm kafka.tgz kafka.tgz.asc KEYS; \
apk del wget gpg gpg-agent; \
apk cache clean;

COPY --from=build-jsa kafka.jsa /opt/kafka/kafka.jsa
COPY --from=build-jsa storage.jsa /opt/kafka/storage.jsa
COPY --chown=appuser:appuser resources/common-scripts /etc/kafka/docker
COPY --chown=appuser:appuser launch /etc/kafka/docker/launch

USER appuser

VOLUME ["/etc/kafka/secrets", "/var/lib/kafka/data", "/mnt/shared/config"]

CMD ["/etc/kafka/docker/run"]
49 changes: 49 additions & 0 deletions docker/docker_official_images/3.7.0/jvm/jsa_launch
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

KAFKA_CLUSTER_ID="$(opt/kafka/bin/kafka-storage.sh random-uuid)"
TOPIC="test-topic"

KAFKA_JVM_PERFORMANCE_OPTS="-XX:ArchiveClassesAtExit=storage.jsa" opt/kafka/bin/kafka-storage.sh format -t $KAFKA_CLUSTER_ID -c opt/kafka/config/kraft/server.properties

KAFKA_JVM_PERFORMANCE_OPTS="-XX:ArchiveClassesAtExit=kafka.jsa" opt/kafka/bin/kafka-server-start.sh opt/kafka/config/kraft/server.properties &

check_timeout() {
if [ $TIMEOUT -eq 0 ]; then
echo "Server startup timed out"
exit 1
fi
echo "Check will timeout in $(( TIMEOUT-- )) seconds"
sleep 1
}

opt/kafka/bin/kafka-topics.sh --create --topic $TOPIC --bootstrap-server localhost:9092
[ $? -eq 0 ] || exit 1

echo "test" | opt/kafka/bin/kafka-console-producer.sh --topic $TOPIC --bootstrap-server localhost:9092
[ $? -eq 0 ] || exit 1

opt/kafka/bin/kafka-console-consumer.sh --topic $TOPIC --from-beginning --bootstrap-server localhost:9092 --max-messages 1 --timeout-ms 20000
[ $? -eq 0 ] || exit 1

opt/kafka/bin/kafka-server-stop.sh

# Wait until jsa file is generated
TIMEOUT=20
until [ -f /kafka.jsa ]
do
check_timeout
done
68 changes: 68 additions & 0 deletions docker/docker_official_images/3.7.0/jvm/launch
Original file line number Diff line number Diff line change
@@ -0,0 +1,68 @@
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

# Override this section from the script to include the com.sun.management.jmxremote.rmi.port property.
if [ -z "${KAFKA_JMX_OPTS-}" ]; then
export KAFKA_JMX_OPTS="-Dcom.sun.management.jmxremote=true \
-Dcom.sun.management.jmxremote.authenticate=false \
-Dcom.sun.management.jmxremote.ssl=false "
fi

# The JMX client needs to be able to connect to java.rmi.server.hostname.
# The default for bridged n/w is the bridged IP so you will only be able to connect from another docker container.
# For host n/w, this is the IP that the hostname on the host resolves to.

# If you have more than one n/w configured, hostname -i gives you all the IPs,
# the default is to pick the first IP (or network).
export KAFKA_JMX_HOSTNAME=${KAFKA_JMX_HOSTNAME:-$(hostname -i | cut -d" " -f1)}

if [ "${KAFKA_JMX_PORT-}" ]; then
# This ensures that the "if" section for JMX_PORT in kafka launch script does not trigger.
export JMX_PORT=$KAFKA_JMX_PORT
export KAFKA_JMX_OPTS="${KAFKA_JMX_OPTS-} -Djava.rmi.server.hostname=$KAFKA_JMX_HOSTNAME \
-Dcom.sun.management.jmxremote.local.only=false \
-Dcom.sun.management.jmxremote.rmi.port=$JMX_PORT \
-Dcom.sun.management.jmxremote.port=$JMX_PORT"
fi

# Make a temp env variable to store user provided performance otps
if [ -z "${KAFKA_JVM_PERFORMANCE_OPTS-}" ]; then
export TEMP_KAFKA_JVM_PERFORMANCE_OPTS=""
else
export TEMP_KAFKA_JVM_PERFORMANCE_OPTS="$KAFKA_JVM_PERFORMANCE_OPTS"
fi

# We will first use CDS for storage to format storage
export KAFKA_JVM_PERFORMANCE_OPTS="${KAFKA_JVM_PERFORMANCE_OPTS-} -XX:SharedArchiveFile=/opt/kafka/storage.jsa"

echo "===> Using provided cluster id $CLUSTER_ID ..."

# Invoke the docker wrapper to setup property files and format storage
result=$(/opt/kafka/bin/kafka-run-class.sh kafka.docker.KafkaDockerWrapper setup \
--default-configs-dir /etc/kafka/docker \
--mounted-configs-dir /mnt/shared/config \
--final-configs-dir /opt/kafka/config 2>&1) || \
echo $result | grep -i "already formatted" || \
{ echo $result && (exit 1) }

# Using temp env variable to get rid of storage CDS command
export KAFKA_JVM_PERFORMANCE_OPTS="$TEMP_KAFKA_JVM_PERFORMANCE_OPTS"

# Now we will use CDS for kafka to start kafka server
export KAFKA_JVM_PERFORMANCE_OPTS="$KAFKA_JVM_PERFORMANCE_OPTS -XX:SharedArchiveFile=/opt/kafka/kafka.jsa"

# Start kafka broker
exec /opt/kafka/bin/kafka-server-start.sh /opt/kafka/config/server.properties
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

set -o nounset \
-o errexit

# Trace may expose passwords/credentials by printing them to stdout, so turn on with care.
if [ "${TRACE:-}" == "true" ]; then
set -o verbose \
-o xtrace
fi
Original file line number Diff line number Diff line change
@@ -0,0 +1,121 @@
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

ensure() {
if [[ -z "${!1}" ]]; then
echo "$1 environment variable not set"
exit 1
fi
}

path() {
if [[ $2 == "writable" ]]; then
if [[ ! -w "$1" ]]; then
echo "$1 file not writable"
exit 1
fi
elif [[ $2 == "existence" ]]; then
if [[ ! -e "$1" ]]; then
echo "$1 file does not exist"
exit 1
fi
fi
}

# unset KAFKA_ADVERTISED_LISTENERS from ENV in KRaft mode when running as controller only
if [[ -n "${KAFKA_PROCESS_ROLES-}" ]]
then
echo "Running in KRaft mode..."
ensure CLUSTER_ID
if [[ $KAFKA_PROCESS_ROLES == "controller" ]]
then
if [[ -n "${KAFKA_ADVERTISED_LISTENERS-}" ]]
then
echo "KAFKA_ADVERTISED_LISTENERS is not supported on a KRaft controller."
exit 1
else
# Unset in case env variable is set with empty value
unset KAFKA_ADVERTISED_LISTENERS
fi
fi
fi

# By default, LISTENERS is derived from ADVERTISED_LISTENERS by replacing
# hosts with 0.0.0.0. This is good default as it ensures that the broker
# process listens on all ports.
if [[ -z "${KAFKA_LISTENERS-}" ]] && ( [[ -z "${KAFKA_PROCESS_ROLES-}" ]] || [[ $KAFKA_PROCESS_ROLES != "controller" ]] ) && [[ -n "${KAFKA_ADVERTISED_LISTENERS-}" ]]
then
export KAFKA_LISTENERS
KAFKA_LISTENERS=$(echo "$KAFKA_ADVERTISED_LISTENERS" | sed -e 's|://[^:]*:|://0.0.0.0:|g')
fi

path /opt/kafka/config/ writable

# Set if ADVERTISED_LISTENERS has SSL:// or SASL_SSL:// endpoints.
if [[ -n "${KAFKA_ADVERTISED_LISTENERS-}" ]] && [[ $KAFKA_ADVERTISED_LISTENERS == *"SSL://"* ]]
then
echo "SSL is enabled."

ensure KAFKA_SSL_KEYSTORE_FILENAME
export KAFKA_SSL_KEYSTORE_LOCATION="/etc/kafka/secrets/$KAFKA_SSL_KEYSTORE_FILENAME"
path "$KAFKA_SSL_KEYSTORE_LOCATION" existence

ensure KAFKA_SSL_KEY_CREDENTIALS
KAFKA_SSL_KEY_CREDENTIALS_LOCATION="/etc/kafka/secrets/$KAFKA_SSL_KEY_CREDENTIALS"
path "$KAFKA_SSL_KEY_CREDENTIALS_LOCATION" existence
export KAFKA_SSL_KEY_PASSWORD
KAFKA_SSL_KEY_PASSWORD=$(cat "$KAFKA_SSL_KEY_CREDENTIALS_LOCATION")

ensure KAFKA_SSL_KEYSTORE_CREDENTIALS
KAFKA_SSL_KEYSTORE_CREDENTIALS_LOCATION="/etc/kafka/secrets/$KAFKA_SSL_KEYSTORE_CREDENTIALS"
path "$KAFKA_SSL_KEYSTORE_CREDENTIALS_LOCATION" existence
export KAFKA_SSL_KEYSTORE_PASSWORD
KAFKA_SSL_KEYSTORE_PASSWORD=$(cat "$KAFKA_SSL_KEYSTORE_CREDENTIALS_LOCATION")

if [[ -n "${KAFKA_SSL_CLIENT_AUTH-}" ]] && ( [[ $KAFKA_SSL_CLIENT_AUTH == *"required"* ]] || [[ $KAFKA_SSL_CLIENT_AUTH == *"requested"* ]] )
then
ensure KAFKA_SSL_TRUSTSTORE_FILENAME
export KAFKA_SSL_TRUSTSTORE_LOCATION="/etc/kafka/secrets/$KAFKA_SSL_TRUSTSTORE_FILENAME"
path "$KAFKA_SSL_TRUSTSTORE_LOCATION" existence

ensure KAFKA_SSL_TRUSTSTORE_CREDENTIALS
KAFKA_SSL_TRUSTSTORE_CREDENTIALS_LOCATION="/etc/kafka/secrets/$KAFKA_SSL_TRUSTSTORE_CREDENTIALS"
path "$KAFKA_SSL_TRUSTSTORE_CREDENTIALS_LOCATION" existence
export KAFKA_SSL_TRUSTSTORE_PASSWORD
KAFKA_SSL_TRUSTSTORE_PASSWORD=$(cat "$KAFKA_SSL_TRUSTSTORE_CREDENTIALS_LOCATION")
fi
fi

# Set if KAFKA_ADVERTISED_LISTENERS has SASL_PLAINTEXT:// or SASL_SSL:// endpoints.
if [[ -n "${KAFKA_ADVERTISED_LISTENERS-}" ]] && [[ $KAFKA_ADVERTISED_LISTENERS =~ .*SASL_.*://.* ]]
then
echo "SASL" is enabled.

ensure KAFKA_OPTS

if [[ ! $KAFKA_OPTS == *"java.security.auth.login.config"* ]]
then
echo "KAFKA_OPTS should contain 'java.security.auth.login.config' property."
fi
fi

if [[ -n "${KAFKA_JMX_OPTS-}" ]]
then
if [[ ! $KAFKA_JMX_OPTS == *"com.sun.management.jmxremote.rmi.port"* ]]
then
echo "KAFKA_OPTS should contain 'com.sun.management.jmxremote.rmi.port' property. It is required for accessing the JMX metrics externally."
fi
fi
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

declare -A env_defaults
env_defaults=(
# Replace CLUSTER_ID with a unique base64 UUID using "bin/kafka-storage.sh random-uuid"
["CLUSTER_ID"]="5L6g3nShT-eMCtK--X86sw"
)

for key in "${!env_defaults[@]}"; do
if [[ -z "${!key:-}" ]]; then
echo ${key} not set. Setting it to default value: \"${env_defaults[$key]}\"
export "$key"="${env_defaults[$key]}"
fi
done
Loading

0 comments on commit 01d53f3

Please sign in to comment.