Skip to content

Commit

Permalink
[SKV-748][SD] add hdfs dir when run build_package.sh
Browse files Browse the repository at this point in the history
  • Loading branch information
liguohao authored and acelyc111 committed Jan 31, 2024
1 parent 8abd1e0 commit e875142
Show file tree
Hide file tree
Showing 4 changed files with 29 additions and 10 deletions.
4 changes: 3 additions & 1 deletion build_package.sh
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ function build() {

local target_server_dir=PACK_OUT/skv_server
local target_tools_dir=${target_server_dir}/tools
mkdir -p ${target_server_dir}/{bin,conf,lib,tools}
mkdir -p ${target_server_dir}/{bin,conf,lib,tools,hadoop}

cp -a ${source_server_dir}/bin/meta_server ${target_server_dir}/bin/
cp -a ${source_server_dir}/bin/replica_server ${target_server_dir}/bin/
Expand All @@ -31,6 +31,8 @@ function build() {
cp -a ${source_server_dir}/META_SERVER_VERSION ${target_server_dir}/
cp -a ${source_server_dir}/REPLICA_SERVER_VERSION ${target_server_dir}/

cp -r ${source_server_dir}/hadoop ${target_server_dir}/

mkdir -p ${target_tools_dir}/DSN_ROOT/bin/pegasus_shell ${target_tools_dir}/src/shell
cp -r ${source_tools_dir}/DSN_ROOT/bin/pegasus_shell/pegasus_shell ${target_tools_dir}/DSN_ROOT/bin/pegasus_shell/
cp -r ${source_tools_dir}/src/shell/config.ini ${target_tools_dir}/src/shell
Expand Down
19 changes: 19 additions & 0 deletions scripts/get_hdfs_access_conf.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
#! /usr/bin/env python3
# coding=utf-8

# 用于获取HDFS的连接信息,即 core-site.xml 和 hdfs-site.xml 的路径

import os
import sys

sys.path.append(os.path.join(os.environ['SKV_HOME'], 'pynest'))
from pyguidance.hadoop_config.GuidanceHadoopConfig import get_access_conf

# 对应为 skv_ecosystem 库中 construction_blueprint/blueprint_2_1/declarative_desc/platform_resources.yaml 申请的资源
resource_owner = 'skv'
resource_name = 'skv_backup'
os.environ['SSDT_SYS_PRODUCT_COMPONENT_NAME'] = 'skv'
os.environ['SSDT_SYS_MODULE_NAME'] = 'skv_offline'

# 返回HDFS的连接信息
print(get_access_conf(resource_owner, resource_name, 'hdfs_dir')['access_conf']['connection_info']['conf_path'])
1 change: 1 addition & 0 deletions scripts/pack_server.sh
Original file line number Diff line number Diff line change
Expand Up @@ -133,6 +133,7 @@ copy_file ./thirdparty/output/lib/libhdfs* ${pack}/lib
copy_file ./thirdparty/output/lib/libsasl*.so.* ${pack}/lib
copy_file ./thirdparty/output/lib/libcom_err*.so.* ${pack}/lib
copy_file ./scripts/config_hdfs.sh ${pack}/bin
copy_file ./scripts/get_hdfs_access_conf.py ${pack}/bin

copy_file "$(get_stdcpp_lib $custom_gcc)" "${pack}/lib"

Expand Down
15 changes: 6 additions & 9 deletions src/server/bin/start_server.sh
Original file line number Diff line number Diff line change
Expand Up @@ -50,18 +50,15 @@ if [ ! -d "$JAVA_HOME" ]; then
exit 1
fi

# Set CLASSPATH to all the Hadoop jars needed to run Hadoop itself as well as
# the right configuration directory containing core-site.xml or hdfs-site.xml.
# Set CLASSPATH to all the Hadoop jars needed to run Hadoop client.
PEGASUS_HADOOP_HOME=${MODULE_DIR}/hadoop
if [ ! -d "${PEGASUS_HADOOP_HOME}/core-site.xml" ] || [ ! -d "${PEGASUS_HADOOP_HOME}/hdfs-site.xml" ]; then
echo "There is no core-site.xml or hdfs-site.xml in ${PEGASUS_HADOOP_HOME}."
exit 1
fi

export CLASSPATH=${PEGASUS_HADOOP_HOME}
for f in ${PEGASUS_HADOOP_HOME}/*.jar; do
export CLASSPATH=$CLASSPATH:$f
export CLASSPATH=${CLASSPATH}:${f}
done
# As well as the right configuration directory containing core-site.xml or hdfs-site.xml.
HDFS_CONN_INFO_PATH=$(${MODULE_DIR}/tools/scripts/get_hdfs_access_conf.py)
export CLASSPATH=$CLASSPATH:${HDFS_CONN_INFO_PATH}
echo "CLASSPATH: ${CLASSPATH}"

export LD_LIBRARY_PATH=${JAVA_HOME}/jre/lib/$ARCH_TYPE/server:${JAVA_HOME}/jre/lib/$ARCH_TYPE:${LIB_DIR}:${LD_LIBRARY_PATH}

Expand Down

0 comments on commit e875142

Please sign in to comment.