Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[#3430] improvement(hdfs): Add a docker kerberized CI image to test kerberos authentication #3431

Closed
wants to merge 13 commits into from
3 changes: 2 additions & 1 deletion build.gradle.kts
Original file line number Diff line number Diff line change
Expand Up @@ -485,7 +485,8 @@ tasks.rat {
"clients/client-python/.pytest_cache/*",
"clients/client-python/gravitino.egg-info/*",
"clients/client-python/gravitino/utils/exceptions.py",
"clients/client-python/gravitino/utils/http_client.py"
"clients/client-python/gravitino/utils/http_client.py",
"dev/docker/hdfs/*"
)

// Add .gitignore excludes to the Apache Rat exclusion list.
Expand Down
3 changes: 3 additions & 0 deletions dev/docker/build-docker.sh
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,9 @@ fi
if [[ "${component_type}" == "hive" ]]; then
. ${script_dir}/hive/hive-dependency.sh
build_args="--build-arg HADOOP_PACKAGE_NAME=${HADOOP_PACKAGE_NAME} --build-arg HIVE_PACKAGE_NAME=${HIVE_PACKAGE_NAME} --build-arg JDBC_DIVER_PACKAGE_NAME=${JDBC_DIVER_PACKAGE_NAME}"
elif [[ "${component_type}" == "hdfs" ]]; then
yuqi1129 marked this conversation as resolved.
Show resolved Hide resolved
. ${script_dir}/hdfs/hdfs-dependency.sh
build_args="--build-arg HADOOP_PACKAGE_NAME=${HADOOP_PACKAGE_NAME}"
elif [ "${component_type}" == "trino" ]; then
. ${script_dir}/trino/trino-dependency.sh
elif [ "${component_type}" == "gravitino" ]; then
Expand Down
142 changes: 142 additions & 0 deletions dev/docker/hdfs/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,142 @@
#
# Copyright 2023 Datastrato Pvt Ltd.
# This software is licensed under the Apache License version 2.
#

FROM ubuntu:16.04
LABEL maintainer="[email protected]"

ARG HADOOP_PACKAGE_NAME
ARG DEBIAN_FRONTEND=noninteractive

WORKDIR /

################################################################################
# update and install basic tools
RUN apt-get update && apt-get upgrade -y && apt-get install --fix-missing -yq \
git \
libkrb5-dev \
libmysqlclient-dev \
libssl-dev \
libsasl2-dev \
libsasl2-modules-gssapi-mit \
libsqlite3-dev \
libtidy-0.99-0 \
libxml2-dev \
libxslt-dev \
libffi-dev \
libldap2-dev \
python-dev \
python-setuptools \
libgmp3-dev \
libz-dev \
curl \
software-properties-common \
vim \
openssh-server \
wget \
sudo \
openjdk-8-jdk \
krb5-kdc \
krb5-admin-server

#################################################################################
## setup ssh
RUN mkdir /root/.ssh
RUN cat /dev/zero | ssh-keygen -q -N "" > /dev/null && cat /root/.ssh/id_rsa.pub > /root/.ssh/authorized_keys

COPY packages /tmp/packages

###############################################################################
# Start kerberos related services

COPY krb5.conf /etc/krb5.conf
COPY kdc.conf /etc/krb5kdc/kdc.conf
COPY kadm5.acl /etc/krb5kdc/kadm5.acl

RUN krb5_newrealm <<EOF
masterkey
masterkey
EOF

EXPOSE 88 749

################################################################################
# set environment variables
ENV JAVA_HOME=/usr/local/jdk
ENV HADOOP_HOME=/usr/local/hadoop
ENV HADOOP_HEAPSIZE=128
ENV HADOOP_INSTALL=${HADOOP_HOME}
ENV HADOOP_MAPRED_HOME=${HADOOP_INSTALL}
ENV HADOOP_COMMON_HOME=${HADOOP_INSTALL}
ENV HADOOP_HDFS_HOME=${HADOOP_INSTALL}
ENV HADOOP_CONF_DIR=${HADOOP_HOME}/etc/hadoop
ENV YARN_HOME=${HADOOP_INSTALL}

ENV PATH=${JAVA_HOME}/bin:${HADOOP_HOME}/bin:${HADOOP_INSTALL}/sbin:${HIVE_HOME}/bin:${PATH}
ENV LD_LIBRARY_PATH=${HADOOP_HOME}/lib/native

################################################################################
# add the above env for all users
RUN ARCH=$(uname -m) && \
if [ "$ARCH" = "aarch64" ] || [ "$ARCH" = "arm64" ]; then \
ln -s /usr/lib/jvm/java-8-openjdk-arm64 ${JAVA_HOME}; \
else \
ln -s /usr/lib/jvm/java-8-openjdk-amd64 ${JAVA_HOME}; \
fi

RUN echo "JAVA_HOME=${JAVA_HOME}" >> /etc/environment
RUN echo "HADOOP_HEAPSIZE=${HADOOP_HEAPSIZE}" >> /etc/environment
RUN echo "HADOOP_HOME=${HADOOP_HOME}" >> /etc/environment
RUN echo "HADOOP_INSTALL=${HADOOP_INSTALL}" >> /etc/environment
RUN echo "HADOOP_MAPRED_HOME=${HADOOP_MAPRED_HOME}" >> /etc/environment
RUN echo "HADOOP_COMMON_HOME=${HADOOP_COMMON_HOME}" >> /etc/environment
RUN echo "HADOOP_HDFS_HOME=${HADOOP_HDFS_HOME}" >> /etc/environment
RUN echo "HADOOP_CONF_DIR=${HADOOP_CONF_DIR}" >> /etc/environment
RUN echo "HADOOP_CLASSPATH=${JAVA_HOME}/lib/tools.jar" >> /etc/environment
RUN echo "YARN_HOME=${YARN_HOME}" >> /etc/environment
RUN echo "PATH=${PATH}" >> /etc/environment
RUN echo "CLASSPATH=${CLASSPATH}" >> /etc/environment
RUN echo "LD_LIBRARY_PATH=${LD_LIBRARY_PATH}" >> /etc/environment

################################################################################
# install hadoop
RUN mkdir ${HADOOP_HOME}
RUN tar -xz -C ${HADOOP_HOME} --strip-components 1 -f /tmp/packages/${HADOOP_PACKAGE_NAME}

# replace configuration templates
RUN rm -f ${HADOOP_CONF_DIR}/core-site.xml
RUN rm -f ${HADOOP_CONF_DIR}/hadoop-env.sh
RUN rm -f ${HADOOP_CONF_DIR}/yarn-env.sh
RUN rm -f ${HADOOP_CONF_DIR}/hdfs-site.xml
RUN rm -f ${HADOOP_CONF_DIR}/mapred-site.xml
RUN rm -f ${HADOOP_CONF_DIR}/yarn-site.xml

ADD core-site.xml ${HADOOP_CONF_DIR}/core-site.xml
ADD hadoop-env.sh ${HADOOP_CONF_DIR}/hadoop-env.sh
ADD yarn-env.sh ${HADOOP_CONF_DIR}/yarn-env.sh
ADD hdfs-site.xml ${HADOOP_CONF_DIR}/hdfs-site.xml
ADD mapred-site.xml ${HADOOP_CONF_DIR}/mapred-site.xml
ADD yarn-site.xml ${HADOOP_CONF_DIR}/yarn-site.xml
ADD check-status.sh /tmp/check-status.sh

################################################################################
# add users and groups
RUN groupadd hdfs && groupadd hadoop && groupadd mapred
RUN useradd -g hdfs hdfs
RUN chown -R hdfs:hdfs ${HADOOP_HOME}

################################################################################
# removed install packages and cache
RUN rm -rf /tmp/packages
RUN rm -rf /var/lib/apt/lists/*

################################################################################
# expose port
EXPOSE 8088 9000 50070 50075 50010 10001

################################################################################
# create startup script and set ENTRYPOINT
WORKDIR /
ADD start.sh /usr/local/sbin
ENTRYPOINT ["/bin/bash", "/usr/local/sbin/start.sh"]
16 changes: 16 additions & 0 deletions dev/docker/hdfs/check-status.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
#!/bin/bash
#
# Copyright 2023 Datastrato Pvt Ltd.
# This software is licensed under the Apache License version 2.
#
set -ex

hdfs_ready=$(hdfs dfsadmin -report | grep "Live datanodes" | awk '{print $3}')
if [[ ${hdfs_ready} == "(1):" ]]; then
echo "HDFS is ready"
else
echo "HDFS is not ready"
exit 1
fi

exit 0
63 changes: 63 additions & 0 deletions dev/docker/hdfs/core-site.xml
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
<configuration>
<property>
<name>fs.defaultFS</name>
<value>hdfs://0.0.0.0:9000</value>
</property>

<property>
<name>name</name>
<value>Development Cluster</value>
</property>

<property>
<name>hadoop.http.staticuser.user</name>
<value>hadoopuser</value>
</property>

<property>
<name>hadoop.proxyuser.hive.hosts</name>
<value>*</value>
</property>

<property>
<name>hadoop.proxyuser.hive.groups</name>
<value>*</value>
</property>

<property>
<name>hadoop.proxyuser.root.groups</name>
<value>*</value>
</property>

<property>
<name>hadoop.proxyuser.root.hosts</name>
<value>*</value>
</property>

<!-- Added kerberos related configuration -->
<property>
<name>hadoop.proxyuser.hdfs.hosts</name>
<value>*</value>
</property>

<property>
<name>hadoop.proxyuser.hdfs.groups</name>
<value>*</value>
</property>

<property>
<name>hadoop.proxyuser.hdfs.users</name>
<value>*</value>
</property>

<property>
<name>hadoop.security.authorization</name>
<value>true</value>
</property>

<property>
<name>hadoop.security.authentication</name>
<value>kerberos</value>
</property>

</configuration>
95 changes: 95 additions & 0 deletions dev/docker/hdfs/hadoop-env.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,95 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

# Set Hadoop-specific environment variables here.

# The only required environment variable is JAVA_HOME. All others are
# optional. When running a distributed configuration it is best to
# set JAVA_HOME in this file, so that it is correctly defined on
# remote nodes.

# The java implementation to use.
export JAVA_HOME=${JAVA_HOME}

# The jsvc implementation to use. Jsvc is required to run secure datanodes
# that bind to privileged ports to provide authentication of data transfer
# protocol. Jsvc is not required if SASL is configured for authentication of
# data transfer protocol using non-privileged ports.
#export JSVC_HOME=${JSVC_HOME}

export HADOOP_CONF_DIR=/usr/local/hadoop/etc/hadoop

# Extra Java CLASSPATH elements. Automatically insert capacity-scheduler.
for f in ${HADOOP_HOME}/contrib/capacity-scheduler/*.jar; do
if [ "${HADOOP_CLASSPATH}" ]; then
export HADOOP_CLASSPATH=${HADOOP_CLASSPATH}:$f
else
export HADOOP_CLASSPATH=$f
fi
done

# The maximum amount of heap to use, in MB. Default is 1000.
export HADOOP_HEAPSIZE=128

# Extra Java runtime options. Empty by default.
export HADOOP_OPTS="${HADOOP_OPTS} -Djava.net.preferIPv4Stack=true -XX:MaxPermSize=128m"

# Command specific options appended to HADOOP_OPTS when specified
export HADOOP_NAMENODE_OPTS="-Dhadoop.security.logger=${HADOOP_SECURITY_LOGGER:-INFO,RFAS} -Dhdfs.audit.logger=${HDFS_AUDIT_LOGGER:-INFO,NullAppender} ${HADOOP_NAMENODE_OPTS}"
export HADOOP_DATANODE_OPTS="-Dhadoop.security.logger=ERROR,RFAS ${HADOOP_DATANODE_OPTS}"

export HADOOP_SECONDARYNAMENODE_OPTS="-Dhadoop.security.logger=${HADOOP_SECURITY_LOGGER:-INFO,RFAS} -Dhdfs.audit.logger=${HDFS_AUDIT_LOGGER:-INFO,NullAppender} ${HADOOP_SECONDARYNAMENODE_OPTS}"

export HADOOP_NFS3_OPTS="${HADOOP_NFS3_OPTS}"
export HADOOP_PORTMAP_OPTS="${HADOOP_PORTMAP_OPTS}"

# The following applies to multiple commands (fs, dfs, fsck, distcp etc)
export HADOOP_CLIENT_OPTS="${HADOOP_CLIENT_OPTS}"

# On secure datanodes, user to run the datanode as after dropping privileges.
# This **MUST** be uncommented to enable secure HDFS if using privileged ports
# to provide authentication of data transfer protocol. This **MUST NOT** be
# defined if SASL is configured for authentication of data transfer protocol
# using non-privileged ports.
export HADOOP_SECURE_DN_USER=${HADOOP_SECURE_DN_USER}

# Where log files are stored. ${HADOOP_HOME}/logs by default.

# Where log files are stored in the secure data environment.
export HADOOP_SECURE_DN_LOG_DIR=${HADOOP_LOG_DIR}/${HADOOP_HDFS_USER}

###
# HDFS Mover specific parameters
###
# Specify the JVM options to be used when starting the HDFS Mover.
# These options will be appended to the options specified as HADOOP_OPTS
# and therefore may override any similar flags set in HADOOP_OPTS
#
# export HADOOP_MOVER_OPTS=""

###
# Advanced Users Only!
###

# The directory where pid files are stored. /tmp by default.
# NOTE: this should be set to a directory that can only be written to by
# the user that will run the hadoop daemons. Otherwise there is the
# potential for a symlink attack.
export HADOOP_PID_DIR=${HADOOP_PID_DIR}
export HADOOP_SECURE_DN_PID_DIR=${HADOOP_PID_DIR}

# A string representing this instance of hadoop. ${USER} by default.
export HADOOP_IDENT_STRING=${USER}
23 changes: 23 additions & 0 deletions dev/docker/hdfs/hdfs-dependency.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
#!/bin/bash
#
# Copyright 2023 Datastrato Pvt Ltd.
# This software is licensed under the Apache License version 2.
#
set -ex
hadoop_dir="$(dirname "${BASH_SOURCE-$0}")"
hadoop_dir="$(cd "${hadoop_dir}">/dev/null; pwd)"

# Environment variables definition
HADOOP_VERSION="3.1.0"

HADOOP_PACKAGE_NAME="hadoop-${HADOOP_VERSION}.tar.gz" # Must export this variable for Dockerfile
HADOOP_DOWNLOAD_URL="https://archive.apache.org/dist/hadoop/core/hadoop-${HADOOP_VERSION}/${HADOOP_PACKAGE_NAME}"

# Prepare download packages
if [[ ! -d "${hadoop_dir}/packages" ]]; then
mkdir -p "${hadoop_dir}/packages"
fi

if [ ! -f "${hadoop_dir}/packages/${HADOOP_PACKAGE_NAME}" ]; then
curl --progress-bar -L -s -o "${hadoop_dir}/packages/${HADOOP_PACKAGE_NAME}" ${HADOOP_DOWNLOAD_URL}
fi
Loading
Loading