From d6975fdc0fd4fb4b54479eb3bf131d47bce500c2 Mon Sep 17 00:00:00 2001 From: zhangyifan27 Date: Mon, 28 Jun 2021 21:57:02 +0800 Subject: [PATCH 1/2] fix: move config_hdfs.sh to scripts directory --- .github/workflows/ci-pull-request.yaml | 2 +- config_hdfs.sh | 61 -------------------------- rdsn | 2 +- scripts/pack_server.sh | 2 +- scripts/travis.sh | 2 +- 5 files changed, 4 insertions(+), 65 deletions(-) delete mode 100755 config_hdfs.sh diff --git a/.github/workflows/ci-pull-request.yaml b/.github/workflows/ci-pull-request.yaml index e27bf3de97..544640ab60 100644 --- a/.github/workflows/ci-pull-request.yaml +++ b/.github/workflows/ci-pull-request.yaml @@ -60,5 +60,5 @@ jobs: run: ./run.sh pack_tools - name: Unit Testing run: | - source ./config_hdfs.sh + source ./scripts/config_hdfs.sh ./run.sh test --on_travis diff --git a/config_hdfs.sh b/config_hdfs.sh deleted file mode 100755 index 32dc80f381..0000000000 --- a/config_hdfs.sh +++ /dev/null @@ -1,61 +0,0 @@ -#!/bin/bash -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -# This file should be sourced to set up LD_LIBRARY_PATH and CLASSPATH to -# run Pegasus binaries which use libhdfs in the context of a dev environment. - -# Try to detect the system's JAVA_HOME -# If javac exists, then the system has a Java SDK (JRE does not have javac). -# Follow the symbolic links and use this to determine the system's JAVA_HOME. -SYSTEM_JAVA_HOME="/usr/java/default" -if [ -n "$(which javac)" ]; then - SYSTEM_JAVA_HOME=$(which javac | xargs readlink -f | sed "s:/bin/javac::") -fi -# Prefer the JAVA_HOME set in the environment, but use the system's JAVA_HOME otherwise. -export JAVA_HOME="${JAVA_HOME:-${SYSTEM_JAVA_HOME}}" -if [ ! -d "$JAVA_HOME" ]; then - echo "JAVA_HOME must be set to the location of your JDK!" - return 1 -fi -# Link jvm library. -JAVA_JVM_LIBRARY_DIR=$(dirname $(find "${JAVA_HOME}/" -name libjvm.so | head -1)) -export LD_LIBRARY_PATH=${JAVA_JVM_LIBRARY_DIR}:$LD_LIBRARY_PATH - -# Set CLASSPATH to all the Hadoop jars needed to run Hadoop itself as well as -# the right configuration directory containing core-site.xml or hdfs-site.xml. -PEGASUS_HADOOP_HOME=`pwd`/rdsn/thirdparty/build/Source/hadoop -# Prefer the HADOOP_HOME set in the environment, but use the pegasus's hadoop dir otherwise. -export HADOOP_HOME="${HADOOP_HOME:-${PEGASUS_HADOOP_HOME}}" -if [ ! -d "$HADOOP_HOME/etc/hadoop" ] || [ ! -d "$HADOOP_HOME/share/hadoop" ]; then - echo "HADOOP_HOME must be set to the location of your Hadoop jars and core-site.xml." - return 1 -fi -export CLASSPATH=$CLASSPATH:$HADOOP_HOME/etc/hadoop/ -for f in $HADOOP_HOME/share/hadoop/common/lib/*.jar; do - export CLASSPATH=$CLASSPATH:$f -done -for f in $HADOOP_HOME/share/hadoop/common/*.jar; do - export CLASSPATH=$CLASSPATH:$f -done -for f in $HADOOP_HOME/share/hadoop/hdfs/lib/*.jar; do - export CLASSPATH=$CLASSPATH:$f -done -for f in $HADOOP_HOME/share/hadoop/hdfs/*.jar; do - export CLASSPATH=$CLASSPATH:$f -done diff --git a/rdsn b/rdsn index e52e9a0d94..76577b2a40 160000 --- a/rdsn +++ b/rdsn @@ -1 +1 @@ -Subproject commit e52e9a0d94471fad442c6d3f3d46ebe1f7ffa23f +Subproject commit 76577b2a40e7a94bb4f7e4a0ab567ddef4700aad diff --git a/scripts/pack_server.sh b/scripts/pack_server.sh index e6126c24d8..40bc7a9f88 100755 --- a/scripts/pack_server.sh +++ b/scripts/pack_server.sh @@ -115,7 +115,7 @@ copy_file ./rdsn/thirdparty/output/lib/sasl2 ${pack}/bin copy_file ./scripts/sendmail.sh ${pack}/bin copy_file ./src/server/config.ini ${pack}/bin copy_file ./src/server/config.min.ini ${pack}/bin -copy_file ./config_hdfs.sh ${pack}/bin +copy_file ./scripts/config_hdfs.sh ${pack}/bin copy_file "$(get_stdcpp_lib $custom_gcc)" "${pack}/bin" diff --git a/scripts/travis.sh b/scripts/travis.sh index 5b851727fa..0dac2d33bb 100755 --- a/scripts/travis.sh +++ b/scripts/travis.sh @@ -32,7 +32,7 @@ if [ "$modified" ]; then exit 1 fi -source "${root}"/config_hdfs.sh +source "${root}"/scripts/config_hdfs.sh "${root}"/run.sh build -c --skip_thirdparty --disable_gperf && ./run.sh test --on_travis ret=$? if [ $ret ]; then From b3fcea65966dc0dfe72e2bd04d8474fd55ca9a84 Mon Sep 17 00:00:00 2001 From: zhangyifan27 Date: Tue, 29 Jun 2021 00:14:48 +0800 Subject: [PATCH 2/2] add config_hdfs.sh --- scripts/config_hdfs.sh | 61 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 61 insertions(+) create mode 100755 scripts/config_hdfs.sh diff --git a/scripts/config_hdfs.sh b/scripts/config_hdfs.sh new file mode 100755 index 0000000000..32dc80f381 --- /dev/null +++ b/scripts/config_hdfs.sh @@ -0,0 +1,61 @@ +#!/bin/bash +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# This file should be sourced to set up LD_LIBRARY_PATH and CLASSPATH to +# run Pegasus binaries which use libhdfs in the context of a dev environment. + +# Try to detect the system's JAVA_HOME +# If javac exists, then the system has a Java SDK (JRE does not have javac). +# Follow the symbolic links and use this to determine the system's JAVA_HOME. +SYSTEM_JAVA_HOME="/usr/java/default" +if [ -n "$(which javac)" ]; then + SYSTEM_JAVA_HOME=$(which javac | xargs readlink -f | sed "s:/bin/javac::") +fi +# Prefer the JAVA_HOME set in the environment, but use the system's JAVA_HOME otherwise. +export JAVA_HOME="${JAVA_HOME:-${SYSTEM_JAVA_HOME}}" +if [ ! -d "$JAVA_HOME" ]; then + echo "JAVA_HOME must be set to the location of your JDK!" + return 1 +fi +# Link jvm library. +JAVA_JVM_LIBRARY_DIR=$(dirname $(find "${JAVA_HOME}/" -name libjvm.so | head -1)) +export LD_LIBRARY_PATH=${JAVA_JVM_LIBRARY_DIR}:$LD_LIBRARY_PATH + +# Set CLASSPATH to all the Hadoop jars needed to run Hadoop itself as well as +# the right configuration directory containing core-site.xml or hdfs-site.xml. +PEGASUS_HADOOP_HOME=`pwd`/rdsn/thirdparty/build/Source/hadoop +# Prefer the HADOOP_HOME set in the environment, but use the pegasus's hadoop dir otherwise. +export HADOOP_HOME="${HADOOP_HOME:-${PEGASUS_HADOOP_HOME}}" +if [ ! -d "$HADOOP_HOME/etc/hadoop" ] || [ ! -d "$HADOOP_HOME/share/hadoop" ]; then + echo "HADOOP_HOME must be set to the location of your Hadoop jars and core-site.xml." + return 1 +fi +export CLASSPATH=$CLASSPATH:$HADOOP_HOME/etc/hadoop/ +for f in $HADOOP_HOME/share/hadoop/common/lib/*.jar; do + export CLASSPATH=$CLASSPATH:$f +done +for f in $HADOOP_HOME/share/hadoop/common/*.jar; do + export CLASSPATH=$CLASSPATH:$f +done +for f in $HADOOP_HOME/share/hadoop/hdfs/lib/*.jar; do + export CLASSPATH=$CLASSPATH:$f +done +for f in $HADOOP_HOME/share/hadoop/hdfs/*.jar; do + export CLASSPATH=$CLASSPATH:$f +done