Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

HBASE-24049 use hadoop-2.10.0 for "packaging and integration" check #1368

Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 4 additions & 1 deletion dev-support/Jenkinsfile
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,7 @@ pipeline {
}
stage ('hadoop 2 cache') {
environment {
HADOOP2_VERSION="2.8.5"
HADOOP2_VERSION="2.10.0"
}
steps {
// directory must be unique for each parallel stage, because jenkins runs them in the same workspace :(
Expand Down Expand Up @@ -656,6 +656,7 @@ pipeline {
--hbase-client-install "hbase-client" \
"hbase-install" \
"hadoop-2/bin/hadoop" \
hadoop-2/share/hadoop/yarn/timelineservice \
hadoop-2/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
hadoop-2/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
hadoop-2/bin/mapred \
Expand All @@ -675,6 +676,7 @@ pipeline {
--hbase-client-install hbase-client \
hbase-install \
hadoop-3/bin/hadoop \
hadoop-3/share/hadoop/yarn/timelineservice \
hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
hadoop-3/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
hadoop-3/bin/mapred \
Expand All @@ -690,6 +692,7 @@ pipeline {
--hbase-client-install hbase-client \
hbase-install \
hadoop-3/bin/hadoop \
hadoop-3/share/hadoop/yarn/timelineservice \
hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
hadoop-3/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
hadoop-3/bin/mapred \
Expand Down
20 changes: 14 additions & 6 deletions dev-support/hbase_nightly_pseudo-distributed-test.sh
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@

set -e
function usage {
echo "Usage: ${0} [options] /path/to/component/bin-install /path/to/hadoop/executable /path/to/hadoop/hadoop-yarn-server-tests-tests.jar /path/to/hadoop/hadoop-mapreduce-client-jobclient-tests.jar /path/to/mapred/executable"
echo "Usage: ${0} [options] /path/to/component/bin-install /path/to/hadoop/executable /path/to/share/hadoop/yarn/timelineservice /path/to/hadoop/hadoop-yarn-server-tests-tests.jar /path/to/hadoop/hadoop-mapreduce-client-jobclient-tests.jar /path/to/mapred/executable"
echo ""
echo " --zookeeper-data /path/to/use Where the embedded zookeeper instance should write its data."
echo " defaults to 'zk-data' in the working-dir."
Expand Down Expand Up @@ -67,9 +67,10 @@ if [ $# -lt 5 ]; then
fi
component_install="$(cd "$(dirname "$1")"; pwd)/$(basename "$1")"
hadoop_exec="$(cd "$(dirname "$2")"; pwd)/$(basename "$2")"
yarn_server_tests_test_jar="$(cd "$(dirname "$3")"; pwd)/$(basename "$3")"
mapred_jobclient_test_jar="$(cd "$(dirname "$4")"; pwd)/$(basename "$4")"
mapred_exec="$(cd "$(dirname "$5")"; pwd)/$(basename "$5")"
timeline_service_dir="$(cd "$(dirname "$3")"; pwd)/$(basename "$3")"
yarn_server_tests_test_jar="$(cd "$(dirname "$4")"; pwd)/$(basename "$4")"
mapred_jobclient_test_jar="$(cd "$(dirname "$5")"; pwd)/$(basename "$5")"
mapred_exec="$(cd "$(dirname "$6")"; pwd)/$(basename "$6")"

if [ ! -x "${hadoop_exec}" ]; then
echo "hadoop cli does not appear to be executable." >&2
Expand Down Expand Up @@ -285,18 +286,25 @@ echo "Starting up Hadoop"
if [ "${hadoop_version%.*.*}" -gt 2 ]; then
"${mapred_exec}" minicluster -format -writeConfig "${working_dir}/hbase-conf/core-site.xml" -writeDetails "${working_dir}/hadoop_cluster_info.json" >"${working_dir}/hadoop_cluster_command.out" 2>"${working_dir}/hadoop_cluster_command.err" &
else
HADOOP_CLASSPATH="${yarn_server_tests_test_jar}" "${hadoop_exec}" jar "${mapred_jobclient_test_jar}" minicluster -format -writeConfig "${working_dir}/hbase-conf/core-site.xml" -writeDetails "${working_dir}/hadoop_cluster_info.json" >"${working_dir}/hadoop_cluster_command.out" 2>"${working_dir}/hadoop_cluster_command.err" &
HADOOP_CLASSPATH="${timeline_service_dir}/*:${timeline_service_dir}/lib/*:${yarn_server_tests_test_jar}" "${hadoop_exec}" jar "${mapred_jobclient_test_jar}" minicluster -format -writeConfig "${working_dir}/hbase-conf/core-site.xml" -writeDetails "${working_dir}/hadoop_cluster_info.json" >"${working_dir}/hadoop_cluster_command.out" 2>"${working_dir}/hadoop_cluster_command.err" &
fi

echo "$!" > "${working_dir}/hadoop.pid"

# 2 + 4 + 8 + .. + 256 ~= 8.5 minutes.
max_sleep_time=512
sleep_time=2
until [ -s "${working_dir}/hbase-conf/core-site.xml" ]; do
until [[ -s "${working_dir}/hbase-conf/core-site.xml" || "${sleep_time}" -ge "${max_sleep_time}" ]]; do
printf '\twaiting for Hadoop to finish starting up.\n'
sleep "${sleep_time}"
sleep_time="$((sleep_time*2))"
done

if [ "${sleep_time}" -ge "${max_sleep_time}" ] ; then
echo "time out waiting for Hadoop to startup" >&2
exit 1
fi

if [ "${hadoop_version%.*.*}" -gt 2 ]; then
echo "Verifying configs"
"${hadoop_exec}" --config "${working_dir}/hbase-conf/" conftest
Expand Down
2 changes: 1 addition & 1 deletion dev-support/hbase_nightly_source-artifact.sh
Original file line number Diff line number Diff line change
Expand Up @@ -182,7 +182,7 @@ if mvn -DskipTests -Prelease --batch-mode -Dmaven.repo.local="${m2_tarbuild}" cl
fi
done
fi
echo "Building a binary tarball from the source tarball failed. see srctarball_install.log for details."
echo "Building a binary tarball from the source tarball failed. see ${working_dir}/srctarball_install.log for details."
# Copy up the rat.txt to the working dir so available in build archive in case rat complaints.
# rat.txt can be under any module target dir... copy them all up renaming them to include parent dir as we go.
find ${unpack_dir} -name rat.txt -type f | while IFS= read -r NAME; do cp -v "$NAME" "${working_dir}/${NAME//\//_}"; done
Expand Down