From 4e35305acbdae6ffe1aa494d055f5f783da76728 Mon Sep 17 00:00:00 2001
From: chaogefeng <673120261@163.com>
Date: Fri, 10 Jan 2020 19:51:39 +0800
Subject: [PATCH] 1.Automated installation enhancements. 2.Adapt to linkis
changes. 3.Add jdbc node type. 4.Resolving module dependency conflicts close
#79 #80 #81 #82
---
assembly/pom.xml | 45 +++
bin/checkEnv.sh | 6 +-
bin/{checkMicro.sh => checkServices.sh} | 59 ++-
bin/install.sh | 351 ++++++++++++------
bin/start-all.sh | 120 +++---
bin/stop-all.sh | 58 ++-
conf/config.sh | 9 +-
db/dss_dml.sql | 2 +-
.../ch1/DataSphereStudio_Compile_Manual.md | 2 +-
.../en_US/ch2/DSS Quick Installation Guide.md | 4 +-
...26\350\257\221\346\226\207\346\241\243.md" | 2 +-
...77\347\224\250\346\226\207\346\241\243.md" | 6 +-
dss-appjoint-auth/pom.xml | 5 +
dss-flow-execution-entrance/pom.xml | 18 +-
.../src/main/assembly/distribution.xml | 36 --
.../FlowExecutionAppJointSignalSharedJob.java | 16 +-
.../job/FlowExecutionJobSignalKeyCreator.java | 39 ++
.../FlowExecutionEntranceConfiguration.scala | 3 +-
.../execution/DefaultFlowExecution.scala | 9 +-
.../job/parser/FlowJobNodeParser.scala | 7 +-
.../entrance/node/AppJointJobBuilder.scala | 22 +-
.../entrance/node/DefaultNodeRunner.scala | 26 +-
dss-linkis-node-execution/pom.xml | 4 +-
.../node/execution/WorkflowContextImpl.java | 5 +-
.../conf/LinkisJobExecutionConfiguration.java | 2 +-
.../impl/LinkisNodeExecutionImpl.java | 12 +-
.../job/AbstractAppJointLinkisJob.java | 2 +-
.../job/AbstractCommonLinkisJob.java | 2 +-
.../dss/linkis/node/execution/job/Job.java | 2 +-
.../execution/job/JobSignalKeyCreator.java | 25 ++
.../node/execution/job/SignalSharedJob.java | 6 +-
.../execution/parser/JobParamsParser.java | 24 +-
dss-scheduler-appjoint-core/pom.xml | 5 +
dss-server/pom.xml | 2 +-
dss-server/src/main/assembly/distribution.xml | 4 +
.../job/AzkabanAppJointSignalSharedJob.java | 17 +-
.../job/AzkabanJobSignalKeyCreator.java | 39 ++
.../linkis/jobtype/job/AzkanbanBuilder.java | 18 +-
.../linkis/linkis-appjoint-entrance/pom.xml | 17 +-
.../src/main/assembly/distribution.xml | 41 +-
pom.xml | 2 +-
visualis-appjoint/appjoint/pom.xml | 8 +
.../execution/VisualisNodeExecution.scala | 32 +-
43 files changed, 801 insertions(+), 313 deletions(-)
rename bin/{checkMicro.sh => checkServices.sh} (50%)
create mode 100644 dss-flow-execution-entrance/src/main/java/com/webank/wedatasphere/dss/flow/execution/entrance/job/FlowExecutionJobSignalKeyCreator.java
create mode 100644 dss-linkis-node-execution/src/main/java/com/webank/wedatasphere/dss/linkis/node/execution/job/JobSignalKeyCreator.java
create mode 100644 plugins/azkaban/linkis-jobtype/src/main/java/com/webank/wedatasphere/dss/plugins/azkaban/linkis/jobtype/job/AzkabanJobSignalKeyCreator.java
diff --git a/assembly/pom.xml b/assembly/pom.xml
index 4ec1c8b647..29934071c0 100644
--- a/assembly/pom.xml
+++ b/assembly/pom.xml
@@ -105,6 +105,51 @@
jackson-core
2.9.6
+
+ net.databinder.dispatch
+ dispatch-core_2.11
+ 0.11.2
+
+
+ net.databinder.dispatch
+ dispatch-json4s-jackson_2.11
+ 0.11.2
+
+
+ org.apache.htrace
+ htrace-core
+ 3.1.0-incubating
+
+
+ org.apache.commons
+ commons-math3
+ 3.1.1
+
+
+ org.apache.httpcomponents
+ httpclient
+ 4.5.4
+
+
+ org.apache.httpcomponents
+ httpcore
+ 4.4.7
+
+
+ com.ning
+ async-http-client
+ 1.8.10
+
+
+ commons-beanutils
+ commons-beanutils
+ 1.7.0
+
+
+ commons-beanutils
+ commons-beanutils-core
+ 1.8.0
+
dss-assembly
diff --git a/bin/checkEnv.sh b/bin/checkEnv.sh
index bdf48659ae..d51bd5ca21 100644
--- a/bin/checkEnv.sh
+++ b/bin/checkEnv.sh
@@ -1,3 +1,4 @@
+#!/bin/sh
#
# Copyright 2019 WeBank
#
@@ -13,7 +14,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-#!/bin/sh
say() {
printf 'check command fail \n %s\n' "$1"
}
@@ -32,7 +32,7 @@ need_cmd() {
err "need '$1' (your linux command not found)"
fi
}
-echo "<-----start to check linux cmd:yum java mysql unzip expect telnet sed tar---->"
+echo "<-----start to check used cmd---->"
need_cmd yum
need_cmd java
need_cmd mysql
@@ -42,4 +42,4 @@ need_cmd telnet
need_cmd tar
need_cmd sed
need_cmd dos2unix
-echo "<-----end to check linux cmd:yum java mysql unzip expect telnet sed tar------>"
+echo "<-----end to check used cmd---->"
diff --git a/bin/checkMicro.sh b/bin/checkServices.sh
similarity index 50%
rename from bin/checkMicro.sh
rename to bin/checkServices.sh
index 3f8ff95fa9..a540d525fa 100644
--- a/bin/checkMicro.sh
+++ b/bin/checkServices.sh
@@ -20,18 +20,59 @@ shellDir=`dirname $0`
workDir=`cd ${shellDir}/..;pwd`
##load config
-source ${workDir}/conf/config.sh
-source ${workDir}/conf/db.sh
+export LINKIS_DSS_CONF_FILE=${LINKIS_DSS_CONF_FILE:-"${workDir}/conf/config.sh"}
+export DISTRIBUTION=${DISTRIBUTION:-"${workDir}/conf/config.sh"}
+source ${LINKIS_DSS_CONF_FILE}
+source ${DISTRIBUTION}
MICRO_SERVICE_NAME=$1
MICRO_SERVICE_IP=$2
MICRO_SERVICE_PORT=$3
-echo "<--------------------------------------------------------------------------->"
-echo "Start to Check if your microservice:$MICRO_SERVICE_NAME is normal via telnet"
-echo ""
-if [ ! -d $DSS_INSTALL_HOME/$MICRO_SERVICE_NAME ];then
- echo "$MICRO_SERVICE_NAME is not installed,the check steps will be skipped"
- exit 0
+
+local_host="`hostname --fqdn`"
+
+ipaddr="`hostname -i`"
+
+function isLocal(){
+ if [ "$1" == "127.0.0.1" ];then
+ return 0
+ elif [ $1 == "localhost" ]; then
+ return 0
+ elif [ $1 == $local_host ]; then
+ return 0
+ elif [ $1 == $ipaddr ]; then
+ return 0
+ fi
+ return 1
+}
+
+function executeCMD(){
+ isLocal $1
+ flag=$?
+ echo "Is local "$flag
+ if [ $flag == "0" ];then
+ eval $2
+ else
+ ssh -p $SSH_PORT $1 $2
+ fi
+
+}
+
+#echo "<--------------------------------------------------------------------------->"
+#echo "Start to Check if your microservice:$MICRO_SERVICE_NAME is normal via telnet"
+#echo ""
+#if ! executeCMD $SERVER_IP "test -e $DSS_INSTALL_HOME/$MICRO_SERVICE_NAME"; then
+# echo "$MICRO_SERVICE_NAME is not installed,the check steps will be skipped"
+# exit 0
+#fi
+echo "==========================================================="
+echo $MICRO_SERVICE_NAME
+echo $MICRO_SERVICE_IP
+echo $MICRO_SERVICE_PORT
+echo "==========================================================="
+
+if [ $MICRO_SERVICE_NAME == "visualis-server" ]||[ $MICRO_SERVICE_IP == "127.0.0.1" ]; then
+ MICRO_SERVICE_IP="`hostname -i`"
fi
result=`echo -e "\n" | telnet $MICRO_SERVICE_IP $MICRO_SERVICE_PORT 2>/dev/null | grep Connected | wc -l`
@@ -42,7 +83,7 @@ else
echo "ERROR your $MICRO_SERVICE_NAME microservice is not start successful !!! ERROR logs as follows :"
echo "PLEAESE CHECK DETAIL LOG,LOCATION:$DSS_INSTALL_HOME/$MICRO_SERVICE_NAME/logs/linkis.out"
echo '<------------------------------------------------------------->'
- tail -n 50 $DSS_INSTALL_HOME/$MICRO_SERVICE_NAME/logs/*.out
+ executeCMD $MICRO_SERVICE_IP "tail -n 50 $DSS_INSTALL_HOME/$MICRO_SERVICE_NAME/logs/*.out"
echo '<-------------------------------------------------------------->'
echo "PLEAESE CHECK DETAIL LOG,LOCATION:$DSS_INSTALL_HOME/$MICRO_SERVICE_NAME/logs/linkis.out"
exit 1
diff --git a/bin/install.sh b/bin/install.sh
index 5961dfdbd8..01b8c3a04e 100644
--- a/bin/install.sh
+++ b/bin/install.sh
@@ -15,6 +15,7 @@
#
#!/bin/sh
#Actively load user env
+
source ~/.bash_profile
shellDir=`dirname $0`
@@ -45,17 +46,13 @@ fi
function isSuccess(){
if [ $? -ne 0 ]; then
- echo "ERROR to " + $1
+ echo "Failed to " + $1
exit 1
else
- echo "SUCESS to" + $1
+ echo "Succeed to" + $1
fi
}
-#check env
-sh ${workDir}/bin/checkEnv.sh
-isSuccess "check env"
-
function checkJava(){
java -version
isSuccess "execute java --version"
@@ -72,46 +69,108 @@ else
fi
}
+
+say() {
+ printf 'check command fail \n %s\n' "$1"
+}
+
+err() {
+ say "$1" >&2
+ exit 1
+}
+
+check_cmd() {
+ command -v "$1" > /dev/null 2>&1
+}
+
+need_cmd() {
+ if ! check_cmd "$1"; then
+ err "need '$1' (command not found)"
+ fi
+}
+
+#check env
+sh ${workDir}/bin/checkEnv.sh
+isSuccess "check env"
+
##load config
echo "step1:load config"
-source ${workDir}/conf/config.sh
-source ${workDir}/conf/db.sh
+export DSS_CONFIG_PATH=${DSS_CONFIG_PATH:-"${workDir}/conf/config.sh"}
+export DSS_DB_CONFIG_PATH=${DSS_DB_CONFIG_PATH:-"${workDir}/conf/db.sh"}
+export DISTRIBUTION=${DISTRIBUTION:-"${workDir}/conf/config.sh"}
+source ${DSS_CONFIG_PATH}
+source ${DSS_DB_CONFIG_PATH}
+source ${DISTRIBUTION}
isSuccess "load config"
-local_host="`hostname -i`"
+local_host="`hostname --fqdn`"
+ipaddr="`hostname -i`"
+
+function isLocal(){
+ if [ "$1" == "127.0.0.1" ];then
+ return 0
+ elif [ $1 == "localhost" ]; then
+ return 0
+ elif [ $1 == $local_host ]; then
+ return 0
+ elif [ $1 == $ipaddr ]; then
+ return 0
+ fi
+ return 1
+}
+
+function executeCMD(){
+ isLocal $1
+ flag=$?
+ if [ $flag == "0" ];then
+ echo "Is local execution:$2"
+ eval $2
+ else
+ echo "Is remote execution:$2"
+ ssh -p $SSH_PORT $1 $2
+ fi
+}
-##env check
-echo "Please enter the mode selection such as: 1"
-echo " 1: lite"
-echo " 2: sample"
-echo " 3: Standard"
-echo ""
+function copyFile(){
+ isLocal $1
+ flag=$?
+ src=$2
+ dest=$3
+ if [ $flag == "0" ];then
+ echo "Is local cp "
+ eval "cp -r $src $dest"
+ else
+ echo "Is remote cp "
+ scp -r -P $SSH_PORT $src $1:$dest
+ fi
+}
-INSTALL_MODE=1
+##install mode choice
+if [ "$INSTALL_MODE" == "" ];then
+ echo "Please enter the mode selection such as: 1"
+ echo " 1: Lite"
+ echo " 2: Simple"
+ echo " 3: Standard"
+ echo ""
+ read -p "Please input the choice:" idx
+ INSTALL_MODE=$idx
+fi
-read -p "Please input the choice:" idx
-if [[ '1' = "$idx" ]];then
- INSTALL_MODE=1
+if [[ '1' = "$INSTALL_MODE" ]];then
echo "You chose lite installation mode"
- #check for Java
checkJava
- #check for mysql
SERVER_NAME=MYSQL
EXTERNAL_SERVER_IP=$MYSQL_HOST
EXTERNAL_SERVER_PORT=$MYSQL_PORT
checkExternalServer
-elif [[ '2' = "$idx" ]];then
- INSTALL_MODE=2
+elif [[ '2' = "$INSTALL_MODE" ]];then
echo "You chose sample installation mode"
- #check for Java
checkJava
- #check for mysql
SERVER_NAME=MYSQL
EXTERNAL_SERVER_IP=$MYSQL_HOST
EXTERNAL_SERVER_PORT=$MYSQL_PORT
-
-elif [[ '3' = "$idx" ]];then
- INSTALL_MODE=3
+ checkExternalServer
+elif [[ '3' = "$INSTALL_MODE" ]];then
echo "You chose Standard installation mode"
#check for Java
checkJava
@@ -124,13 +183,16 @@ elif [[ '3' = "$idx" ]];then
SERVER_NAME=Qualitis
EXTERNAL_SERVER_IP=$QUALITIS_ADRESS_IP
EXTERNAL_SERVER_PORT=$QUALITIS_ADRESS_PORT
+ if [[ $IGNORECHECK = "" ]];then
checkExternalServer
+ fi
#check azkaban serivice
SERVER_NAME=AZKABAN
EXTERNAL_SERVER_IP=$AZKABAN_ADRESS_IP
EXTERNAL_SERVER_PORT=$AZKABAN_ADRESS_PORT
+ if [[ $IGNORECHECK = "" ]];then
checkExternalServer
-
+ fi
else
echo "no choice,exit!"
exit 1
@@ -156,40 +218,97 @@ else
exit 1
fi
+echo "create hdfs directory and local directory"
+if [ "$WORKSPACE_USER_ROOT_PATH" != "" ]
+then
+ localRootDir=$WORKSPACE_USER_ROOT_PATH
+ if [[ $WORKSPACE_USER_ROOT_PATH == file://* ]];then
+ localRootDir=${WORKSPACE_USER_ROOT_PATH#file://}
+ mkdir -p $localRootDir/$deployUser
+ sudo chmod -R 775 $localRootDir/$deployUser
+ elif [[ $WORKSPACE_USER_ROOT_PATH == hdfs://* ]];then
+ localRootDir=${WORKSPACE_USER_ROOT_PATH#hdfs://}
+ hdfs dfs -mkdir -p $localRootDir/$deployUser
+ hdfs dfs -chmod -R 775 $localRootDir/$deployUser
+ else
+ echo "does not support $WORKSPACE_USER_ROOT_PATH filesystem types"
+ fi
+isSuccess "create $WORKSPACE_USER_ROOT_PATH directory"
+fi
+
+
+if [ "$RESULT_SET_ROOT_PATH" != "" ]
+then
+ localRootDir=$RESULT_SET_ROOT_PATH
+ if [[ $RESULT_SET_ROOT_PATH == file://* ]];then
+ localRootDir=${RESULT_SET_ROOT_PATH#file://}
+ mkdir -p $localRootDir/$deployUser
+ sudo chmod -R 775 $localRootDir/$deployUser
+ elif [[ $RESULT_SET_ROOT_PATH == hdfs://* ]];then
+ localRootDir=${RESULT_SET_ROOT_PATH#hdfs://}
+ hdfs dfs -mkdir -p $localRootDir/$deployUser
+ hdfs dfs -chmod -R 775 $localRootDir/$deployUser
+ else
+ echo "does not support $RESULT_SET_ROOT_PATH filesystem types"
+ fi
+isSuccess "create $RESULT_SET_ROOT_PATH directory"
+fi
+
+
+if [ "$WDS_SCHEDULER_PATH" != "" ]
+then
+ localRootDir=$WDS_SCHEDULER_PATH
+ if [[ $WDS_SCHEDULER_PATH == file://* ]];then
+ localRootDir=${WDS_SCHEDULER_PATH#file://}
+ mkdir -p $localRootDir
+ sudo chmod -R 775 $localRootDir
+ elif [[ $WDS_SCHEDULER_PATH == hdfs://* ]];then
+ localRootDir=${WDS_SCHEDULER_PATH#hdfs://}
+ hdfs dfs -mkdir -p $localRootDir
+ hdfs dfs -chmod -R 775 $localRootDir
+ else
+ echo "does not support $WDS_SCHEDULER_PATH filesystem types"
+ fi
+isSuccess "create $WDS_SCHEDULER_PATH directory"
+fi
+
+
##init db
if [[ '2' = "$MYSQL_INSTALL_MODE" ]];then
- mysql -h$MYSQL_HOST -P$MYSQL_PORT -u$MYSQL_USER -p$MYSQL_PASSWORD -D$MYSQL_DB --default-character-set=utf8 -e "source ${workDir}/db/dss_ddl.sql"
- isSuccess "source dss_ddl.sql"
- LOCAL_IP="`hostname -i`"
- if [ $GATEWAY_INSTALL_IP == "127.0.0.1" ];then
- echo "GATEWAY_INSTALL_IP is equals 127.0.0.1 ,we will change it to ip address"
- GATEWAY_INSTALL_IP_2=$LOCAL_IP
+ mysql -h$MYSQL_HOST -P$MYSQL_PORT -u$MYSQL_USER -p$MYSQL_PASSWORD -D$MYSQL_DB --default-character-set=utf8 -e "source ${workDir}/db/dss_ddl.sql"
+ isSuccess "source dss_ddl.sql"
+ LOCAL_IP="`hostname -i`"
+ if [ $GATEWAY_INSTALL_IP == "127.0.0.1" ];then
+ echo "GATEWAY_INSTALL_IP is equals 127.0.0.1 ,we will change it to ip address"
+ GATEWAY_INSTALL_IP_2=$LOCAL_IP
else
- GATEWAY_INSTALL_IP_2=$GATEWAY_INSTALL_IP
+ GATEWAY_INSTALL_IP_2=$GATEWAY_INSTALL_IP
fi
#echo $GATEWAY_INSTALL_IP_2
sed -i "s/GATEWAY_INSTALL_IP_2/$GATEWAY_INSTALL_IP_2/g" ${workDir}/db/dss_dml.sql
sed -i "s/GATEWAY_PORT/$GATEWAY_PORT/g" ${workDir}/db/dss_dml.sql
mysql -h$MYSQL_HOST -P$MYSQL_PORT -u$MYSQL_USER -p$MYSQL_PASSWORD -D$MYSQL_DB --default-character-set=utf8 -e "source ${workDir}/db/dss_dml.sql"
- isSuccess "source dss_dml.sql"
+ isSuccess "source dss_dml.sql"
- if [ '2' = "$INSTALL_MODE" ]||[ '3' = "$INSTALL_MODE" ];then
- echo "visualis support "
- if [ $VISUALIS_NGINX_IP == "127.0.0.1" ]||[ $VISUALIS_NGINX_IP == "0.0.0.0" ];then
- echo "VISUALIS_NGINX_IP is equals $VISUALIS_NGINX_IP ,we will change it to ip address"
- VISUALIS_NGINX_IP_2=$LOCAL_IP
+ if [[ '2' = "$INSTALL_MODE" ]] || [[ '3' = "$INSTALL_MODE" ]];then
+ echo "visualis support,visualis database will be initialized !"
+ if [ $VISUALIS_NGINX_IP == "127.0.0.1" ]||[ $VISUALIS_NGINX_IP == "0.0.0.0" ];then
+ echo "VISUALIS_NGINX_IP is equals $VISUALIS_NGINX_IP ,we will change it to ip address"
+ VISUALIS_NGINX_IP_2=$LOCAL_IP
else
- VISUALIS_NGINX_IP_2=$VISUALIS_NGINX_IP
+ VISUALIS_NGINX_IP_2=$VISUALIS_NGINX_IP
fi
#echo $VISUALIS_NGINX_IP_2
sed -i "s/VISUALIS_NGINX_IP_2/$VISUALIS_NGINX_IP_2/g" ${workDir}/db/visualis.sql
sed -i "s/VISUALIS_NGINX_PORT/$VISUALIS_NGINX_PORT/g" ${workDir}/db/visualis.sql
- mysql -h$MYSQL_HOST -P$MYSQL_PORT -u$MYSQL_USER -p$MYSQL_PASSWORD -D$MYSQL_DB --default-character-set=utf8 -e "source ${workDir}/db/visualis.sql"
- isSuccess "source visualis.sql"
+ mysql -h$MYSQL_HOST -P$MYSQL_PORT -u$MYSQL_USER -p$MYSQL_PASSWORD -D$MYSQL_DB --default-character-set=utf8 -e "source ${workDir}/db/visualis.sql"
+ isSuccess "source visualis.sql"
+ mysql -h$MYSQL_HOST -P$MYSQL_PORT -u$MYSQL_USER -p$MYSQL_PASSWORD -D$MYSQL_DB --default-character-set=utf8 -e "source ${workDir}/db/davinci.sql"
+ isSuccess "source davinci.sql"
fi
- if [[ '3' = "$INSTALL_MODE" ]];then
- echo "start to update azkaban and qualitis table info "
+ if [[ '3' = "$INSTALL_MODE" ]];then
+ echo "azkaban and qualitis support, azkaban and qualitis database will be initialized !"
#azkaban
if [ $AZKABAN_ADRESS_IP == "127.0.0.1" ];then
echo "AZKABAN_ADRESS_IP is equals 127.0.0.1 ,we will change it to ip address"
@@ -217,28 +336,9 @@ if [[ '2' = "$MYSQL_INSTALL_MODE" ]];then
fi
fi
-## davinci db init
-echo "Do you want to clear davinci table information in the database ? If you have not installed davinci environment,you must input '2',if you have davinci installed,choice 1."
-echo " 1: Do not execute table-building statements"
-echo "WARN:"
-echo " 2: Dangerous! Clear all data and rebuild the tables."
-echo ""
-DAVINCI_INSTALL_MODE=1
-read -p "Please input the choice:" idx
-if [[ '2' = "$idx" ]];then
- DAVINCI_INSTALL_MODE=2
- echo "You chose rebuild davinci's table !!! start rebuild all tables"
- mysql -h$MYSQL_HOST -P$MYSQL_PORT -u$MYSQL_USER -p$MYSQL_PASSWORD -D$MYSQL_DB --default-character-set=utf8 -e "source ${workDir}/db/davinci.sql"
- isSuccess "source davinci.sql"
- echo ""
-elif [[ '1' = "$idx" ]];then
- DAVINCI_INSTALL_MODE=1
- echo "You chose not execute table-building statements"
- echo ""
-else
- echo "no choice,exit!"
- exit 1
-fi
+##Deal special symbol '#'
+HIVE_META_PASSWORD=$(echo ${HIVE_META_PASSWORD//'#'/'\#'})
+MYSQL_PASSWORD=$(echo ${MYSQL_PASSWORD//'#'/'\#'})
###linkis Eurkea info
SERVER_IP=$EUREKA_INSTALL_IP
@@ -260,24 +360,30 @@ then
SERVER_IP=$local_host
fi
-if ! ssh -p $SSH_PORT $SERVER_IP test -e $SERVER_HOME; then
- ssh -p $SSH_PORT $SERVER_IP "sudo mkdir -p $SERVER_HOME;sudo chown -R $deployUser:$deployUser $SERVER_HOME"
+if ! executeCMD $SERVER_IP "test -e $SERVER_HOME"; then
+ executeCMD $SERVER_IP "sudo mkdir -p $SERVER_HOME;sudo chown -R $deployUser:$deployUser $SERVER_HOME"
isSuccess "create the dir of $SERVERNAME"
fi
echo "$SERVERNAME-step2:copy install package"
-scp -P $SSH_PORT ${workDir}/share/$PACKAGE_DIR/$SERVERNAME.zip $SERVER_IP:$SERVER_HOME
+copyFile $SERVER_IP ${workDir}/share/$PACKAGE_DIR/$SERVERNAME.zip $SERVER_HOME
+
+if ! executeCMD $SERVER_IP "test -e $SERVER_HOME/lib"; then
+ copyFile $SERVER_IP ${workDir}/lib $SERVER_HOME
+fi
+
+#copyFile $SERVER_IP ${workDir}/lib $SERVER_HOME
isSuccess "copy ${SERVERNAME}.zip"
-ssh -p $SSH_PORT $SERVER_IP "cd $SERVER_HOME/;rm -rf $SERVERNAME-bak; mv -f $SERVERNAME $SERVERNAME-bak"
-ssh -p $SSH_PORT $SERVER_IP "cd $SERVER_HOME/;unzip $SERVERNAME.zip > /dev/null"
-ssh -p $SSH_PORT $SERVER_IP "cd $workDir/;scp -r lib/* $SERVER_HOME/$SERVERNAME/lib"
+executeCMD $SERVER_IP "cd $SERVER_HOME/;rm -rf $SERVERNAME-bak; mv -f $SERVERNAME $SERVERNAME-bak"
+executeCMD $SERVER_IP "cd $SERVER_HOME/;unzip $SERVERNAME.zip > /dev/null"
+executeCMD $SERVER_IP "cd $SERVER_HOME/;scp -r lib/* $SERVER_HOME/$SERVERNAME/lib"
isSuccess "unzip ${SERVERNAME}.zip"
echo "$SERVERNAME-step3:subsitution conf"
SERVER_CONF_PATH=$SERVER_HOME/$SERVERNAME/conf/application.yml
-ssh -p $SSH_PORT $SERVER_IP "sed -i \"s#port:.*#port: $SERVER_PORT#g\" $SERVER_CONF_PATH"
-ssh -p $SSH_PORT $SERVER_IP "sed -i \"s#defaultZone:.*#defaultZone: $EUREKA_URL#g\" $SERVER_CONF_PATH"
-ssh -p $SSH_PORT $SERVER_IP "sed -i \"s#hostname:.*#hostname: $SERVER_IP#g\" $SERVER_CONF_PATH"
+executeCMD $SERVER_IP "sed -i \"s#port:.*#port: $SERVER_PORT#g\" $SERVER_CONF_PATH"
+executeCMD $SERVER_IP "sed -i \"s#defaultZone:.*#defaultZone: $EUREKA_URL#g\" $SERVER_CONF_PATH"
+executeCMD $SERVER_IP "sed -i \"s#hostname:.*#hostname: $SERVER_IP#g\" $SERVER_CONF_PATH"
isSuccess "subsitution conf of $SERVERNAME"
}
##function end
@@ -291,16 +397,16 @@ then
SERVER_IP=$local_host
fi
-if ! ssh -p $SSH_PORT $SERVER_IP test -e $SERVER_HOME; then
- ssh -p $SSH_PORT $SERVER_IP "sudo mkdir -p $SERVER_HOME;sudo chown -R $deployUser:$deployUser $SERVER_HOME"
+if ! executeCMD $SERVER_IP "test -e $SERVER_HOME"; then
+ executeCMD $SERVER_IP "sudo mkdir -p $SERVER_HOME;sudo chown -R $deployUser:$deployUser $SERVER_HOME"
isSuccess "create the dir of $SERVERNAME"
fi
echo "$SERVERNAME-step2:copy install package"
-scp -P $SSH_PORT ${workDir}/share/$PACKAGE_DIR/$SERVERNAME.zip $SERVER_IP:$SERVER_HOME
+copyFile $SERVER_IP ${workDir}/share/$PACKAGE_DIR/$SERVERNAME.zip $SERVER_HOME
isSuccess "copy ${SERVERNAME}.zip"
-ssh -p $SSH_PORT $SERVER_IP "cd $SERVER_HOME/;rm -rf $SERVERNAME-bak; mv -f $SERVERNAME $SERVERNAME-bak"
-ssh -p $SSH_PORT $SERVER_IP "cd $SERVER_HOME/;unzip $SERVERNAME.zip > /dev/null"
+executeCMD $SERVER_IP "cd $SERVER_HOME/;rm -rf $SERVERNAME-bak; mv -f $SERVERNAME $SERVERNAME-bak"
+executeCMD $SERVER_IP "cd $SERVER_HOME/;unzip $SERVERNAME.zip > /dev/null"
isSuccess "unzip ${SERVERNAME}.zip"
}
##function end
@@ -315,15 +421,15 @@ then
SERVER_IP=$local_host
fi
-if ! ssh -p $SSH_PORT $SERVER_IP test -e $SERVER_HOME/$APPJOINTPARENT; then
- ssh -p $SSH_PORT $SERVER_IP "sudo mkdir -p $SERVER_HOME/$APPJOINTPARENT;sudo chown -R $deployUser:$deployUser $SERVER_HOME/$APPJOINTPARENT"
+if ! executeCMD $SERVER_IP "test -e $SERVER_HOME/$APPJOINTPARENT"; then
+ executeCMD $SERVER_IP "sudo mkdir -p $SERVER_HOME/$APPJOINTPARENT;sudo chown -R $deployUser:$deployUser $SERVER_HOME/$APPJOINTPARENT"
isSuccess "create the dir of $SERVER_HOME/$APPJOINTPARENT;"
fi
echo "$APPJOINTNAME-step2:copy install package"
-scp -P $SSH_PORT $workDir/share/appjoints/$APPJOINTNAME/*.zip $SERVER_IP:$SERVER_HOME/$APPJOINTPARENT
+copyFile $SERVER_IP $workDir/share/appjoints/$APPJOINTNAME/*.zip $SERVER_HOME/$APPJOINTPARENT
isSuccess "copy ${APPJOINTNAME}.zip"
-ssh -p $SSH_PORT $SERVER_IP "cd $SERVER_HOME/$APPJOINTPARENT/;unzip -o dss-*-appjoint.zip > /dev/null;rm -rf dss-*-appjoint.zip"
+executeCMD $SERVER_IP "cd $SERVER_HOME/$APPJOINTPARENT/;unzip -o dss-*-appjoint.zip > /dev/null;rm -rf dss-*-appjoint.zip"
isSuccess "install ${APPJOINTNAME}.zip"
}
##function end
@@ -339,18 +445,18 @@ installPackage
###update Dss-Server linkis.properties
echo "$SERVERNAME-step4:update linkis.properties"
SERVER_CONF_PATH=$SERVER_HOME/$SERVERNAME/conf/linkis.properties
-ssh -p $SSH_PORT $SERVER_IP "sed -i \"s#wds.linkis.server.mybatis.datasource.url.*#wds.linkis.server.mybatis.datasource.url=jdbc:mysql://${MYSQL_HOST}:${MYSQL_PORT}/${MYSQL_DB}?characterEncoding=UTF-8#g\" $SERVER_CONF_PATH"
-ssh -p $SSH_PORT $SERVER_IP "sed -i \"s#wds.linkis.server.mybatis.datasource.username.*#wds.linkis.server.mybatis.datasource.username=$MYSQL_USER#g\" $SERVER_CONF_PATH"
-ssh -p $SSH_PORT $SERVER_IP "sed -i \"s#wds.linkis.server.mybatis.datasource.password.*#wds.linkis.server.mybatis.datasource.password=$MYSQL_PASSWORD#g\" $SERVER_CONF_PATH"
-ssh -p $SSH_PORT $SERVER_IP "sed -i \"s#wds.dss.appjoint.scheduler.azkaban.address.*#wds.dss.appjoint.scheduler.azkaban.address=http://${AZKABAN_ADRESS_IP}:${AZKABAN_ADRESS_PORT}#g\" $SERVER_CONF_PATH"
-ssh -p $SSH_PORT $SERVER_IP "sed -i \"s#wds.linkis.gateway.ip.*#wds.linkis.gateway.ip=$GATEWAY_INSTALL_IP#g\" $SERVER_CONF_PATH"
-ssh -p $SSH_PORT $SERVER_IP "sed -i \"s#wds.linkis.gateway.port.*#wds.linkis.gateway.port=$GATEWAY_PORT#g\" $SERVER_CONF_PATH"
-ssh -p $SSH_PORT $SERVER_IP "sed -i \"s#wds.dss.appjoint.scheduler.project.store.dir.*#wds.dss.appjoint.scheduler.project.store.dir=$WDS_SCHEDULER_PATH#g\" $SERVER_CONF_PATH"
+executeCMD $SERVER_IP "sed -i \"s#wds.linkis.server.mybatis.datasource.url.*#wds.linkis.server.mybatis.datasource.url=jdbc:mysql://${MYSQL_HOST}:${MYSQL_PORT}/${MYSQL_DB}?characterEncoding=UTF-8#g\" $SERVER_CONF_PATH"
+executeCMD $SERVER_IP "sed -i \"s#wds.linkis.server.mybatis.datasource.username.*#wds.linkis.server.mybatis.datasource.username=$MYSQL_USER#g\" $SERVER_CONF_PATH"
+executeCMD $SERVER_IP "sed -i \"s#wds.linkis.server.mybatis.datasource.password.*#wds.linkis.server.mybatis.datasource.password=$MYSQL_PASSWORD#g\" $SERVER_CONF_PATH"
+executeCMD $SERVER_IP "sed -i \"s#wds.dss.appjoint.scheduler.azkaban.address.*#wds.dss.appjoint.scheduler.azkaban.address=http://${AZKABAN_ADRESS_IP}:${AZKABAN_ADRESS_PORT}#g\" $SERVER_CONF_PATH"
+executeCMD $SERVER_IP "sed -i \"s#wds.linkis.gateway.ip.*#wds.linkis.gateway.ip=$GATEWAY_INSTALL_IP#g\" $SERVER_CONF_PATH"
+executeCMD $SERVER_IP "sed -i \"s#wds.linkis.gateway.port.*#wds.linkis.gateway.port=$GATEWAY_PORT#g\" $SERVER_CONF_PATH"
+executeCMD $SERVER_IP "sed -i \"s#wds.dss.appjoint.scheduler.project.store.dir.*#wds.dss.appjoint.scheduler.project.store.dir=$WDS_SCHEDULER_PATH#g\" $SERVER_CONF_PATH"
isSuccess "subsitution linkis.properties of $SERVERNAME"
echo "<----------------$SERVERNAME:end------------------->"
echo ""
-if [ '2' = "$INSTALL_MODE" ]||[ '3' = "$INSTALL_MODE" ];then
+if [[ '2' = "$INSTALL_MODE" ]]||[[ '3' = "$INSTALL_MODE" ]];then
##Flow execution Install
PACKAGE_DIR=dss/dss-flow-execution-entrance
SERVERNAME=dss-flow-execution-entrance
@@ -362,9 +468,9 @@ installPackage
###Update flow execution linkis.properties
echo "$SERVERNAME-step4:update linkis.properties"
SERVER_CONF_PATH=$SERVER_HOME/$SERVERNAME/conf/linkis.properties
-ssh -p $SSH_PORT $SERVER_IP "sed -i \"s#wds.linkis.entrance.config.logPath.*#wds.linkis.entrance.config.logPath=$WORKSPACE_USER_ROOT_PATH#g\" $SERVER_CONF_PATH"
-ssh -p $SSH_PORT $SERVER_IP "sed -i \"s#wds.linkis.resultSet.store.path.*#wds.linkis.resultSet.store.path=$RESULT_SET_ROOT_PATH#g\" $SERVER_CONF_PATH"
-ssh -p $SSH_PORT $SERVER_IP "sed -i \"s#wds.linkis.gateway.url.*#wds.linkis.gateway.url=http://${GATEWAY_INSTALL_IP}:${GATEWAY_PORT}#g\" $SERVER_CONF_PATH"
+executeCMD $SERVER_IP "sed -i \"s#wds.linkis.entrance.config.logPath.*#wds.linkis.entrance.config.logPath=$WORKSPACE_USER_ROOT_PATH#g\" $SERVER_CONF_PATH"
+executeCMD $SERVER_IP "sed -i \"s#wds.linkis.resultSet.store.path.*#wds.linkis.resultSet.store.path=$RESULT_SET_ROOT_PATH#g\" $SERVER_CONF_PATH"
+executeCMD $SERVER_IP "sed -i \"s#wds.linkis.gateway.url.*#wds.linkis.gateway.url=http://${GATEWAY_INSTALL_IP}:${GATEWAY_PORT}#g\" $SERVER_CONF_PATH"
isSuccess "subsitution linkis.properties of $SERVERNAME"
echo "<----------------$SERVERNAME:end------------------->"
echo ""
@@ -379,8 +485,8 @@ installPackage
###Update appjoint entrance linkis.properties
echo "$SERVERNAME-step4:update linkis.properties"
SERVER_CONF_PATH=$SERVER_HOME/$SERVERNAME/conf/linkis.properties
-ssh -p $SSH_PORT $SERVER_IP "sed -i \"s#wds.linkis.entrance.config.logPath.*#wds.linkis.entrance.config.logPath=$WORKSPACE_USER_ROOT_PATH#g\" $SERVER_CONF_PATH"
-ssh -p $SSH_PORT $SERVER_IP "sed -i \"s#wds.linkis.resultSet.store.path.*#wds.linkis.resultSet.store.path=$RESULT_SET_ROOT_PATH#g\" $SERVER_CONF_PATH"
+executeCMD $SERVER_IP "sed -i \"s#wds.linkis.entrance.config.logPath.*#wds.linkis.entrance.config.logPath=$WORKSPACE_USER_ROOT_PATH#g\" $SERVER_CONF_PATH"
+executeCMD $SERVER_IP "sed -i \"s#wds.linkis.resultSet.store.path.*#wds.linkis.resultSet.store.path=$RESULT_SET_ROOT_PATH#g\" $SERVER_CONF_PATH"
isSuccess "subsitution linkis.properties of $SERVERNAME"
echo "<----------------$SERVERNAME:end------------------->"
echo ""
@@ -396,22 +502,25 @@ installVisualis
echo "$SERVERNAME-step4:update linkis.properties"
SERVER_CONF_PATH=$SERVER_HOME/$SERVERNAME/conf/linkis.properties
if [ $VISUALIS_NGINX_IP == "127.0.0.1" ]||[ $VISUALIS_NGINX_IP == "0.0.0.0" ]; then
- VISUALIS_NGINX_IP=$local_host
+ VISUALIS_NGINX_IP=$ipaddr
+fi
+if [ $VISUALIS_SERVER_INSTALL_IP == "127.0.0.1" ]||[ $VISUALIS_SERVER_INSTALL_IP == "0.0.0.0" ]; then
+ VISUALIS_SERVER_INSTALL_IP=$ipaddr
fi
-ssh -p $SSH_PORT $SERVER_IP "sed -i \"s#wds.linkis.entrance.config.logPath.*#wds.linkis.entrance.config.logPath=$WORKSPACE_USER_ROOT_PATH#g\" $SERVER_CONF_PATH"
-ssh -p $SSH_PORT $SERVER_IP "sed -i \"s#wds.linkis.resultSet.store.path.*#wds.linkis.resultSet.store.path=$RESULT_SET_ROOT_PATH#g\" $SERVER_CONF_PATH"
-ssh -p $SSH_PORT $SERVER_IP "sed -i \"s#wds.dss.visualis.gateway.ip.*#wds.dss.visualis.gateway.ip=$GATEWAY_INSTALL_IP#g\" $SERVER_CONF_PATH"
-ssh -p $SSH_PORT $SERVER_IP "sed -i \"s#wds.dss.visualis.gateway.port.*#wds.dss.visualis.gateway.port=$GATEWAY_PORT#g\" $SERVER_CONF_PATH"
+executeCMD $SERVER_IP "sed -i \"s#wds.linkis.entrance.config.logPath.*#wds.linkis.entrance.config.logPath=$WORKSPACE_USER_ROOT_PATH#g\" $SERVER_CONF_PATH"
+executeCMD $SERVER_IP "sed -i \"s#wds.linkis.resultSet.store.path.*#wds.linkis.resultSet.store.path=$RESULT_SET_ROOT_PATH#g\" $SERVER_CONF_PATH"
+executeCMD $SERVER_IP "sed -i \"s#wds.dss.visualis.gateway.ip.*#wds.dss.visualis.gateway.ip=$GATEWAY_INSTALL_IP#g\" $SERVER_CONF_PATH"
+executeCMD $SERVER_IP "sed -i \"s#wds.dss.visualis.gateway.port.*#wds.dss.visualis.gateway.port=$GATEWAY_PORT#g\" $SERVER_CONF_PATH"
SERVER_CONF_PATH=$SERVER_HOME/$SERVERNAME/conf/application.yml
-ssh -p $SSH_PORT $SERVER_IP "sed -i \"s#address: 127.0.0.1#address: $VISUALIS_SERVER_INSTALL_IP#g\" $SERVER_CONF_PATH"
-ssh -p $SSH_PORT $SERVER_IP "sed -i \"s#port: 9007#port: $VISUALIS_SERVER_PORT#g\" $SERVER_CONF_PATH"
-ssh -p $SSH_PORT $SERVER_IP "sed -i \"s#url: http://0.0.0.0:0000/dss/visualis#url: http://$VISUALIS_NGINX_IP:$VISUALIS_NGINX_PORT/dss/visualis#g\" $SERVER_CONF_PATH"
-ssh -p $SSH_PORT $SERVER_IP "sed -i \"s#address: 0.0.0.0#address: $VISUALIS_NGINX_IP#g\" $SERVER_CONF_PATH"
-ssh -p $SSH_PORT $SERVER_IP "sed -i \"s#port: 0000#port: $VISUALIS_NGINX_PORT#g\" $SERVER_CONF_PATH"
-ssh -p $SSH_PORT $SERVER_IP "sed -i \"s#defaultZone: http://127.0.0.1:20303/eureka/#defaultZone: http://$EUREKA_INSTALL_IP:$EUREKA_PORT/eureka/#g\" $SERVER_CONF_PATH"
-ssh -p $SSH_PORT $SERVER_IP "sed -i \"s#url: jdbc:mysql://127.0.0.1:3306/xxx?characterEncoding=UTF-8#url: jdbc:mysql://$MYSQL_HOST:$MYSQL_PORT/$MYSQL_DB?characterEncoding=UTF-8#g\" $SERVER_CONF_PATH"
-ssh -p $SSH_PORT $SERVER_IP "sed -i \"s#username: xxx#username: $MYSQL_USER#g\" $SERVER_CONF_PATH"
-ssh -p $SSH_PORT $SERVER_IP "sed -i \"s#password: xxx#password: $MYSQL_PASSWORD#g\" $SERVER_CONF_PATH"
+executeCMD $SERVER_IP "sed -i \"s#address: 127.0.0.1#address: $VISUALIS_SERVER_INSTALL_IP#g\" $SERVER_CONF_PATH"
+executeCMD $SERVER_IP "sed -i \"s#port: 9007#port: $VISUALIS_SERVER_PORT#g\" $SERVER_CONF_PATH"
+executeCMD $SERVER_IP "sed -i \"s#url: http://0.0.0.0:0000/dss/visualis#url: http://$VISUALIS_NGINX_IP:$VISUALIS_NGINX_PORT/dss/visualis#g\" $SERVER_CONF_PATH"
+executeCMD $SERVER_IP "sed -i \"s#address: 0.0.0.0#address: $VISUALIS_NGINX_IP#g\" $SERVER_CONF_PATH"
+executeCMD $SERVER_IP "sed -i \"s#port: 0000#port: $VISUALIS_NGINX_PORT#g\" $SERVER_CONF_PATH"
+executeCMD $SERVER_IP "sed -i \"s#defaultZone: http://127.0.0.1:20303/eureka/#defaultZone: http://$EUREKA_INSTALL_IP:$EUREKA_PORT/eureka/#g\" $SERVER_CONF_PATH"
+executeCMD $SERVER_IP "sed -i \"s#url: jdbc:mysql://127.0.0.1:3306/xxx?characterEncoding=UTF-8#url: jdbc:mysql://$MYSQL_HOST:$MYSQL_PORT/$MYSQL_DB?characterEncoding=UTF-8#g\" $SERVER_CONF_PATH"
+executeCMD $SERVER_IP "sed -i \"s#username: xxx#username: $MYSQL_USER#g\" $SERVER_CONF_PATH"
+executeCMD $SERVER_IP "sed -i \"s#password: xxx#password: $MYSQL_PASSWORD#g\" $SERVER_CONF_PATH"
isSuccess "subsitution linkis.properties of $SERVERNAME"
echo "<----------------$SERVERNAME:end------------------->"
echo ""
@@ -423,9 +532,9 @@ APPJOINTNAME=datachecker
installAppjoints
echo "$APPJOINTNAME:subsitution conf"
APPJOINTNAME_CONF_PATH_PATENT=$SERVER_HOME/$APPJOINTPARENT/$APPJOINTNAME/appjoint.properties
-ssh -p $SSH_PORT $SERVER_IP "sed -i \"s#job.datachecker.jdo.option.url.*#job.datachecker.jdo.option.url=$HIVE_META_URL#g\" $APPJOINTNAME_CONF_PATH_PATENT"
-ssh -p $SSH_PORT $SERVER_IP "sed -i \"s#job.datachecker.jdo.option.username.*#job.datachecker.jdo.option.username=$HIVE_META_USER#g\" $APPJOINTNAME_CONF_PATH_PATENT"
-ssh -p $SSH_PORT $SERVER_IP "sed -i \"s#job.datachecker.jdo.option.password.*#job.datachecker.jdo.option.password=$HIVE_META_PASSWORD#g\" $APPJOINTNAME_CONF_PATH_PATENT"
+executeCMD $SERVER_IP "sed -i \"s#job.datachecker.jdo.option.url.*#job.datachecker.jdo.option.url=$HIVE_META_URL#g\" $APPJOINTNAME_CONF_PATH_PATENT"
+executeCMD $SERVER_IP "sed -i \"s#job.datachecker.jdo.option.username.*#job.datachecker.jdo.option.username=$HIVE_META_USER#g\" $APPJOINTNAME_CONF_PATH_PATENT"
+executeCMD $SERVER_IP "sed -i \"s#job.datachecker.jdo.option.password.*#job.datachecker.jdo.option.password=$HIVE_META_PASSWORD#g\" $APPJOINTNAME_CONF_PATH_PATENT"
isSuccess "subsitution conf of datachecker"
echo "<----------------datachecker appjoint install end------------------->"
echo ""
@@ -436,9 +545,9 @@ APPJOINTNAME=eventchecker
installAppjoints
echo "$APPJOINTNAME:subsitution conf"
APPJOINTNAME_CONF_PATH_PATENT=$SERVER_HOME/$APPJOINTPARENT/$APPJOINTNAME/appjoint.properties
-ssh -p $SSH_PORT $SERVER_IP "sed -i \"s#msg.eventchecker.jdo.option.url.*#msg.eventchecker.jdo.option.url=jdbc:mysql://${MYSQL_HOST}:${MYSQL_PORT}/${MYSQL_DB}?characterEncoding=UTF-8#g\" $APPJOINTNAME_CONF_PATH_PATENT"
-ssh -p $SSH_PORT $SERVER_IP "sed -i \"s#msg.eventchecker.jdo.option.username.*#msg.eventchecker.jdo.option.username=$MYSQL_USER#g\" $APPJOINTNAME_CONF_PATH_PATENT"
-ssh -p $SSH_PORT $SERVER_IP "sed -i \"s#msg.eventchecker.jdo.option.password.*#msg.eventchecker.jdo.option.password=$MYSQL_PASSWORD#g\" $APPJOINTNAME_CONF_PATH_PATENT"
+executeCMD $SERVER_IP "sed -i \"s#msg.eventchecker.jdo.option.url.*#msg.eventchecker.jdo.option.url=jdbc:mysql://${MYSQL_HOST}:${MYSQL_PORT}/${MYSQL_DB}?characterEncoding=UTF-8#g\" $APPJOINTNAME_CONF_PATH_PATENT"
+executeCMD $SERVER_IP "sed -i \"s#msg.eventchecker.jdo.option.username.*#msg.eventchecker.jdo.option.username=$MYSQL_USER#g\" $APPJOINTNAME_CONF_PATH_PATENT"
+executeCMD $SERVER_IP "sed -i \"s#msg.eventchecker.jdo.option.password.*#msg.eventchecker.jdo.option.password=$MYSQL_PASSWORD#g\" $APPJOINTNAME_CONF_PATH_PATENT"
isSuccess "subsitution conf of eventchecker"
echo "<----------------$APPJOINTNAME:end------------------->"
echo ""
@@ -459,7 +568,7 @@ APPJOINTNAME=qualitis
#qualitis appjoint install
installAppjoints
APPJOINTNAME_CONF_PATH_PATENT=$SERVER_HOME/$APPJOINTPARENT/$APPJOINTNAME/appjoint.properties
-ssh -p $SSH_PORT $SERVER_IP "sed -i \"s#baseUrl=http://127.0.0.1:8090#baseUrl=http://$QUALITIS_ADRESS_IP:$QUALITIS_ADRESS_PORT#g\" $APPJOINTNAME_CONF_PATH_PATENT"
+executeCMD $SERVER_IP "sed -i \"s#baseUrl=http://127.0.0.1:8090#baseUrl=http://$QUALITIS_ADRESS_IP:$QUALITIS_ADRESS_PORT#g\" $APPJOINTNAME_CONF_PATH_PATENT"
isSuccess "subsitution conf of qualitis"
echo "<----------------$APPJOINTNAME:end------------------->"
echo ""
diff --git a/bin/start-all.sh b/bin/start-all.sh
index 8f5b04e2ca..14016d0200 100644
--- a/bin/start-all.sh
+++ b/bin/start-all.sh
@@ -15,21 +15,33 @@
# limitations under the License.
#
-
-
# Start all dss applications
info="We will start all dss applications, it will take some time, please wait"
echo ${info}
#Actively load user env
+source /etc/profile
source ~/.bash_profile
workDir=`dirname "${BASH_SOURCE-$0}"`
workDir=`cd "$workDir"; pwd`
-
CONF_DIR="${workDir}"/../conf
-CONF_FILE=${CONF_DIR}/config.sh
+export LINKIS_DSS_CONF_FILE=${LINKIS_DSS_CONF_FILE:-"${CONF_DIR}/config.sh"}
+export DISTRIBUTION=${DISTRIBUTION:-"${CONF_DIR}/config.sh"}
+source $LINKIS_DSS_CONF_FILE
+source ${DISTRIBUTION}
+function isSuccess(){
+if [ $? -ne 0 ]; then
+ echo "ERROR: " + $1
+ exit 1
+else
+ echo "INFO:" + $1
+fi
+}
+local_host="`hostname --fqdn`"
+
+ipaddr="`hostname -i`"
function isLocal(){
if [ "$1" == "127.0.0.1" ];then
@@ -56,28 +68,14 @@ function executeCMD(){
}
-function isSuccess(){
-if [ $? -ne 0 ]; then
- echo "ERROR: " + $1
- exit 1
-else
- echo "INFO:" + $1
-fi
-}
-
-sudo yum -y install dos2unix
-
-
-local_host="`hostname --fqdn`"
-
#if there is no LINKIS_INSTALL_HOME,we need to source config again
if [ -z ${DSS_INSTALL_HOME} ];then
echo "Warning: DSS_INSTALL_HOME does not exist, we will source config"
- if [ ! -f "${CONF_FILE}" ];then
+ if [ ! -f "${LINKIS_DSS_CONF_FILE}" ];then
echo "Error: can not find config file, start applications failed"
exit 1
else
- source ${CONF_FILE}
+ source ${LINKIS_DSS_CONF_FILE}
fi
fi
@@ -85,19 +83,29 @@ function startApp(){
echo "<-------------------------------->"
echo "Begin to start $SERVER_NAME"
SERVER_BIN=${DSS_INSTALL_HOME}/${SERVER_NAME}/bin
-SERVER_START_CMD="source /etc/profile;source ~/.bash_profile;cd ${SERVER_BIN}; dos2unix ./* > /dev/null 2>&1; dos2unix ../conf/* > /dev/null 2>&1;sh start-${SERVER_NAME}.sh > /dev/null 2>&1 &"
+#echo $SERVER_BIN
+SERVER_LOCAL_START_CMD="dos2unix ${SERVER_BIN}/* > /dev/null 2>&1; dos2unix ${SERVER_BIN}/../conf/* > /dev/null 2>&1;sh ${SERVER_BIN}/start-${SERVER_NAME}.sh > /dev/null 2>&1 &"
+SERVER_REMOTE_START_CMD="source /etc/profile;source ~/.bash_profile;cd ${SERVER_BIN}; dos2unix ./* > /dev/null 2>&1; dos2unix ../conf/* > /dev/null 2>&1; sh start-${SERVER_NAME}.sh > /dev/null 2>&1"
+
+if test -z "$SERVER_IP"
+then
+ SERVER_IP=$local_host
+fi
-if [ ! -d $SERVER_BIN ];then
+if ! executeCMD $SERVER_IP "test -e $SERVER_BIN"; then
echo "<-------------------------------->"
echo "$SERVER_NAME is not installed,the start steps will be skipped"
echo "<-------------------------------->"
return
fi
-if [ -n "${SERVER_IP}" ];then
- ssh ${SERVER_IP} "${SERVER_START_CMD}"
+isLocal $SERVER_IP
+flag=$?
+echo "Is local "$flag
+if [ $flag == "0" ];then
+ eval $SERVER_LOCAL_START_CMD
else
- ssh ${local_host} "${SERVER_START_CMD}"
+ ssh -p $SSH_PORT $SERVER_IP $SERVER_REMOTE_START_CMD
fi
isSuccess "End to start $SERVER_NAME"
echo "<-------------------------------->"
@@ -119,6 +127,7 @@ SERVER_NAME=linkis-appjoint-entrance
SERVER_IP=$APPJOINT_ENTRANCE_INSTALL_IP
startApp
+#visualis-server
SERVER_NAME=visualis-server
SERVER_IP=$VISUALIS_SERVER_INSTALL_IP
startApp
@@ -126,34 +135,53 @@ startApp
echo ""
echo "Start to check all dss microservice"
echo ""
+
+function checkServer(){
+echo "<-------------------------------->"
+echo "Begin to check $SERVER_NAME"
+if test -z "$SERVER_IP"
+then
+ SERVER_IP=$local_host
+fi
+
+SERVER_BIN=${SERVER_HOME}/${SERVER_NAME}/bin
+
+if ! executeCMD $SERVER_IP "test -e ${DSS_INSTALL_HOME}/${SERVER_NAME}"; then
+ echo "$SERVER_NAME is not installed,the checkServer steps will be skipped"
+ return
+fi
+
+sh $workDir/checkServices.sh $SERVER_NAME $SERVER_IP $SERVER_PORT
+isSuccess "start $SERVER_NAME "
+sleep 3
+echo "<-------------------------------->"
+}
+
#check dss-server
-MICRO_SERVICE_NAME=dss-server
-MICRO_SERVICE_IP=$DSS_SERVER_INSTALL_IP
-MICRO_SERVICE_PORT=$DSS_SERVER_PORT
-sh $workDir/checkMicro.sh $MICRO_SERVICE_NAME $MICRO_SERVICE_IP $MICRO_SERVICE_PORT
-isSuccess "$MICRO_SERVICE_NAME start"
+SERVER_NAME=dss-server
+SERVER_IP=$DSS_SERVER_INSTALL_IP
+SERVER_PORT=$DSS_SERVER_PORT
+checkServer
#check dss-flow-execution-entrance
-MICRO_SERVICE_NAME=dss-flow-execution-entrance
-MICRO_SERVICE_IP=$FLOW_EXECUTION_INSTALL_IP
-MICRO_SERVICE_PORT=$FLOW_EXECUTION_PORT
-sh $workDir/checkMicro.sh $MICRO_SERVICE_NAME $MICRO_SERVICE_IP $MICRO_SERVICE_PORT
-isSuccess "$MICRO_SERVICE_NAME start"
+SERVER_NAME=dss-flow-execution-entrance
+SERVER_IP=$FLOW_EXECUTION_INSTALL_IP
+SERVER_PORT=$FLOW_EXECUTION_PORT
+checkServer
#check linkis-appjoint-entrance
-MICRO_SERVICE_NAME=linkis-appjoint-entrance
-MICRO_SERVICE_IP=$APPJOINT_ENTRANCE_INSTALL_IP
-MICRO_SERVICE_PORT=$APPJOINT_ENTRANCE_PORT
-sh $workDir/checkMicro.sh $MICRO_SERVICE_NAME $MICRO_SERVICE_IP $MICRO_SERVICE_PORT
-isSuccess "$MICRO_SERVICE_NAME start"
+SERVER_NAME=linkis-appjoint-entrance
+SERVER_IP=$APPJOINT_ENTRANCE_INSTALL_IP
+SERVER_PORT=$APPJOINT_ENTRANCE_PORT
+checkServer
#check visualis-server
-sleep 10 #for visualis-server
-MICRO_SERVICE_NAME=visualis-server
-MICRO_SERVICE_IP=$VISUALIS_SERVER_INSTALL_IP
-MICRO_SERVICE_PORT=$VISUALIS_SERVER_PORT
-sh $workDir/checkMicro.sh $MICRO_SERVICE_NAME $MICRO_SERVICE_IP $MICRO_SERVICE_PORT
-isSuccess "$MICRO_SERVICE_NAME start"
+sleep 10 #visualis service need more time to register
+SERVER_NAME=visualis-server
+SERVER_IP=$VISUALIS_SERVER_INSTALL_IP
+SERVER_PORT=$VISUALIS_SERVER_PORT
+checkServer
+echo "DSS started successfully"
diff --git a/bin/stop-all.sh b/bin/stop-all.sh
index 82252c7b55..af158c9b78 100644
--- a/bin/stop-all.sh
+++ b/bin/stop-all.sh
@@ -29,7 +29,12 @@ workDir=`cd "$workDir"; pwd`
CONF_DIR="${workDir}"/../conf
-CONF_FILE=${CONF_DIR}/config.sh
+export LINKIS_DSS_CONF_FILE=${LINKIS_DSS_CONF_FILE:-"${CONF_DIR}/config.sh"}
+export DISTRIBUTION=${DISTRIBUTION:-"${CONF_DIR}/config.sh"}
+source ${DISTRIBUTION}
+
+local_host="`hostname --fqdn`"
+ipaddr="`hostname -i`"
function isSuccess(){
if [ $? -ne 0 ]; then
@@ -40,18 +45,40 @@ else
fi
}
+function isLocal(){
+ if [ "$1" == "127.0.0.1" ];then
+ return 0
+ elif [ $1 == "localhost" ]; then
+ return 0
+ elif [ $1 == $local_host ]; then
+ return 0
+ elif [ $1 == $ipaddr ]; then
+ return 0
+ fi
+ return 1
+}
+function executeCMD(){
+ isLocal $1
+ flag=$?
+ echo "Is local "$flag
+ if [ $flag == "0" ];then
+ eval $2
+ else
+ ssh -p $SSH_PORT $1 $2
+ fi
+
+}
-local_host="`hostname --fqdn`"
#if there is no LINKIS_INSTALL_HOME,we need to source config again
if [ -z ${DSS_INSTALL_HOME} ];then
echo "Warning: DSS_INSTALL_HOME does not exist, we will source config"
- if [ ! -f "${CONF_FILE}" ];then
+ if [ ! -f "${LINKIS_DSS_CONF_FILE}" ];then
echo "Error: can not find config file, stop applications failed"
exit 1
else
- source ${CONF_FILE}
+ source ${LINKIS_DSS_CONF_FILE}
fi
fi
@@ -59,18 +86,26 @@ function stopAPP(){
echo "<-------------------------------->"
echo "Begin to stop $SERVER_NAME"
SERVER_BIN=${DSS_INSTALL_HOME}/${SERVER_NAME}/bin
-SERVER_STOP_CMD="source ~/.bash_profile;cd ${SERVER_BIN}; dos2unix ./* > /dev/null 2>&1; dos2unix ../conf/* > /dev/null 2>&1; sh stop-${SERVER_NAME}.sh"
-if [ ! -d ${DSS_INSTALL_HOME}/${SERVER_NAME} ];then
+SERVER_LOCAL_STOP_CMD="sh ${SERVER_BIN}/stop-${SERVER_NAME}.sh"
+SERVER_REMOTE_STOP_CMD="source /etc/profile;source ~/.bash_profile;cd ${SERVER_BIN}; sh stop-${SERVER_NAME}.sh "
+if test -z "$SERVER_IP"
+then
+ SERVER_IP=$local_host
+fi
+
+if ! executeCMD $SERVER_IP "test -e ${DSS_INSTALL_HOME}/${SERVER_NAME}"; then
echo "$SERVER_NAME is not installed,the stop steps will be skipped"
return
fi
-if [ -n "${SERVER_IP}" ];then
- ssh -p $SSH_PORT ${SERVER_IP} "${SERVER_STOP_CMD}"
+isLocal $SERVER_IP
+flag=$?
+echo "Is local "$flag
+if [ $flag == "0" ];then
+ eval $SERVER_LOCAL_STOP_CMD
else
- ssh -p $SSH_PORT ${local_host} "${SERVER_STOP_CMD}"
+ ssh -p $SSH_PORT $SERVER_IP $SERVER_REMOTE_STOP_CMD
fi
-isSuccess "End to stop $SERVER_NAME"
echo "<-------------------------------->"
sleep 3
}
@@ -89,7 +124,10 @@ stopAPP
SERVER_NAME=linkis-appjoint-entrance
SERVER_IP=$APPJOINT_ENTRANCE_INSTALL_IP
stopAPP
+
#visualis-server
SERVER_NAME=visualis-server
SERVER_IP=$VISUALIS_SERVER_INSTALL_IP
stopAPP
+
+echo "stop-all shell script executed completely"
diff --git a/conf/config.sh b/conf/config.sh
index 2d0172d23a..5499f42c1d 100644
--- a/conf/config.sh
+++ b/conf/config.sh
@@ -1,8 +1,13 @@
+#!/bin/sh
+
+shellDir=`dirname $0`
+workDir=`cd ${shellDir}/..;pwd`
+
### deploy user
deployUser=hadoop
### The install home path of DSS,Must provided
-DSS_INSTALL_HOME=/appcom/Install/DSS
+DSS_INSTALL_HOME=$workDir
### Specifies the user workspace, which is used to store the user's script files and log files.
### Generally local directory
@@ -72,4 +77,4 @@ AZKABAN_ADRESS_PORT=8091
QUALITIS_ADRESS_IP=127.0.0.1
QUALITIS_ADRESS_PORT=8090
-DSS_VERSION=0.7.0
\ No newline at end of file
+DSS_VERSION=0.7.0
diff --git a/db/dss_dml.sql b/db/dss_dml.sql
index 1d48c18b20..79de16e9af 100644
--- a/db/dss_dml.sql
+++ b/db/dss_dml.sql
@@ -9,7 +9,7 @@ INSERT INTO `dss_workflow_node` (`id`, `icon`, `node_type`, `application_id`, `s
INSERT INTO `dss_workflow_node` (`id`, `icon`, `node_type`, `application_id`, `submit_to_scheduler`, `enable_copy`, `should_creation_before_node`, `support_jump`, `jump_url`) VALUES (NULL, NULL, 'linkis.spark.sql', @linkis_appid, '1', '1', '0', '1', NULL);
INSERT INTO `dss_workflow_node` (`id`, `icon`, `node_type`, `application_id`, `submit_to_scheduler`, `enable_copy`, `should_creation_before_node`, `support_jump`, `jump_url`) VALUES (NULL, NULL, 'linkis.spark.scala', @linkis_appid, '1', '1', '0', '1', NULL);
INSERT INTO `dss_workflow_node` (`id`, `icon`, `node_type`, `application_id`, `submit_to_scheduler`, `enable_copy`, `should_creation_before_node`, `support_jump`, `jump_url`) VALUES (NULL, NULL, 'linkis.hive.hql', @linkis_appid, '1', '1', '0', '1', NULL);
-INSERT INTO `dss_workflow_node` (`id`, `icon`, `node_type`, `application_id`, `submit_to_scheduler`, `enable_copy`, `should_creation_before_node`, `support_jump`, `jump_url`) VALUES (NULL, NULL, 'linkis.jdbc', @linkis_appid, '1', '1', '0', '1', NULL);
+INSERT INTO `dss_workflow_node` (`id`, `icon`, `node_type`, `application_id`, `submit_to_scheduler`, `enable_copy`, `should_creation_before_node`, `support_jump`, `jump_url`) VALUES (NULL, NULL, 'linkis.jdbc.jdbc', @linkis_appid, '1', '1', '0', '1', NULL);
INSERT INTO `dss_workflow_node` (`id`, `icon`, `node_type`, `application_id`, `submit_to_scheduler`, `enable_copy`, `should_creation_before_node`, `support_jump`, `jump_url`) VALUES (NULL, NULL, 'linkis.control.empty', @linkis_appid, '1', '1', '0', '0', NULL);
INSERT INTO `dss_workflow_node` (`id`, `icon`, `node_type`, `application_id`, `submit_to_scheduler`, `enable_copy`, `should_creation_before_node`, `support_jump`, `jump_url`) VALUES (NULL, NULL, 'linkis.appjoint.sendemail', @linkis_appid, '1', '1', '0', '0', NULL);
INSERT INTO `dss_workflow_node` (`id`, `icon`, `node_type`, `application_id`, `submit_to_scheduler`, `enable_copy`, `should_creation_before_node`, `support_jump`, `jump_url`) VALUES (NULL, NULL, 'linkis.appjoint.eventchecker.eventsender', @linkis_appid, '1', '1', '0', '0', NULL);
diff --git a/docs/en_US/ch1/DataSphereStudio_Compile_Manual.md b/docs/en_US/ch1/DataSphereStudio_Compile_Manual.md
index d3dde44ae7..6a24df612d 100644
--- a/docs/en_US/ch1/DataSphereStudio_Compile_Manual.md
+++ b/docs/en_US/ch1/DataSphereStudio_Compile_Manual.md
@@ -7,7 +7,7 @@
```xml
0.7.0
- 0.9.1
+ 0.9.3
2.11.8
1.8
3.3.3
diff --git a/docs/en_US/ch2/DSS Quick Installation Guide.md b/docs/en_US/ch2/DSS Quick Installation Guide.md
index f8837393dc..f4b8cd1595 100644
--- a/docs/en_US/ch2/DSS Quick Installation Guide.md
+++ b/docs/en_US/ch2/DSS Quick Installation Guide.md
@@ -17,7 +17,7 @@ DSS also implements the integration of many external systems, such as [Qualitis]
DSS environment configuration can be divided into three steps, including basic software installation, backend environment configuration, and frontend environment configuration. The details are as below:
### 2.1 Frontend and backend basic software installation
-Linkis standard version (above 0.9.1). How to install [Linkis](https://github.com/WeBankFinTech/Linkis/blob/master/docs/en_US/ch1/deploy.md)
+Linkis standard version (above 0.9.3). How to install [Linkis](https://github.com/WeBankFinTech/Linkis/blob/master/docs/en_US/ch1/deploy.md)
JDK (above 1.8.0_141). How to install [JDK](https://www.runoob.com/java/java-environment-setup.html)
@@ -111,7 +111,7 @@ The environment is ready, click me to enter ****[4. Installation and use](https:
## Three Standard DSS environment configuration preparation
The standard DSS environment preparation is also divided into three parts, the frontEnd-end and back-end basic software installation, back-end environment preparation, and frontEnd-end environment preparation. The details are as follows:
### 3.1 frontEnd and BackEnd basic software installation
-Linkis standard version (above 0.9.1), [How to install Linkis](https://github.com/WeBankFinTech/Linkis/blob/master/docs/en_US/ch1/deploy.md)
+Linkis standard version (above 0.9.3), [How to install Linkis](https://github.com/WeBankFinTech/Linkis/blob/master/docs/en_US/ch1/deploy.md)
JDK (above 1.8.0_141), How to install [JDK](https://www.runoob.com/java/java-environment-setup.html)
diff --git "a/docs/zh_CN/ch1/DSS\347\274\226\350\257\221\346\226\207\346\241\243.md" "b/docs/zh_CN/ch1/DSS\347\274\226\350\257\221\346\226\207\346\241\243.md"
index 26f89d3dc4..0b4d82f0b8 100644
--- "a/docs/zh_CN/ch1/DSS\347\274\226\350\257\221\346\226\207\346\241\243.md"
+++ "b/docs/zh_CN/ch1/DSS\347\274\226\350\257\221\346\226\207\346\241\243.md"
@@ -7,7 +7,7 @@
```xml
0.7.0
- 0.9.1
+ 0.9.3
2.11.8
1.8
3.3.3
diff --git "a/docs/zh_CN/ch2/DSS\345\277\253\351\200\237\345\256\211\350\243\205\344\275\277\347\224\250\346\226\207\346\241\243.md" "b/docs/zh_CN/ch2/DSS\345\277\253\351\200\237\345\256\211\350\243\205\344\275\277\347\224\250\346\226\207\346\241\243.md"
index d3d96b212a..23c3008bc2 100644
--- "a/docs/zh_CN/ch2/DSS\345\277\253\351\200\237\345\256\211\350\243\205\344\275\277\347\224\250\346\226\207\346\241\243.md"
+++ "b/docs/zh_CN/ch2/DSS\345\277\253\351\200\237\345\256\211\350\243\205\344\275\277\347\224\250\346\226\207\346\241\243.md"
@@ -32,7 +32,7 @@
## 二、精简版DSS环境配置准备
DSS环境配置准备分为三部分,前后端基础软件安装、后端环境配置准备和前端环配置境准备,详细介绍如下:
### 2.1 前后端基础软件安装
-Linkis简单版(0.9.1及以上),[如何安装Linkis](https://github.com/WeBankFinTech/Linkis/wiki/%E5%A6%82%E4%BD%95%E5%BF%AB%E9%80%9F%E5%AE%89%E8%A3%85%E4%BD%BF%E7%94%A8Linkis)
+Linkis简单版(0.9.3及以上),[如何安装Linkis](https://github.com/WeBankFinTech/Linkis/wiki/%E5%A6%82%E4%BD%95%E5%BF%AB%E9%80%9F%E5%AE%89%E8%A3%85%E4%BD%BF%E7%94%A8Linkis)
JDK (1.8.0_141以上),[如何安装JDK](https://www.runoob.com/java/java-environment-setup.html)
@@ -143,7 +143,7 @@ dss_ipaddr=$(ip addr | awk '/^[0-9]+: / {}; /inet.*global/ {print gensub(/(.*)\/
## 三、简单版DSS环境配置准备
DSS环境配置准备分为三部分,前后端基础软件安装、后端环境配置准备和前端环配置境准备,详细介绍如下:
### 3.1 前后端基础软件安装
-Linkis简单版(0.9.1及以上),[如何安装Linkis](https://github.com/WeBankFinTech/Linkis/wiki/%E5%A6%82%E4%BD%95%E5%BF%AB%E9%80%9F%E5%AE%89%E8%A3%85%E4%BD%BF%E7%94%A8Linkis)
+Linkis简单版(0.9.3及以上),[如何安装Linkis](https://github.com/WeBankFinTech/Linkis/wiki/%E5%A6%82%E4%BD%95%E5%BF%AB%E9%80%9F%E5%AE%89%E8%A3%85%E4%BD%BF%E7%94%A8Linkis)
JDK (1.8.0_141以上),[如何安装JDK](https://www.runoob.com/java/java-environment-setup.html)
@@ -251,7 +251,7 @@ dss_ipaddr=$(ip addr | awk '/^[0-9]+: / {}; /inet.*global/ {print gensub(/(.*)\/
## 四、标准版DSS环境配置准备
标准版DSS环境准备也分为三部分,前后端基础软件安装、后端环境准备和前端环境准备,详细介绍如下:
### 4.1 前后端基础软件安装
-Linkis简单版(0.9.1及以上),[如何安装Linkis](https://github.com/WeBankFinTech/Linkis/wiki/%E5%A6%82%E4%BD%95%E5%BF%AB%E9%80%9F%E5%AE%89%E8%A3%85%E4%BD%BF%E7%94%A8Linkis)
+Linkis简单版(0.9.3及以上),[如何安装Linkis](https://github.com/WeBankFinTech/Linkis/wiki/%E5%A6%82%E4%BD%95%E5%BF%AB%E9%80%9F%E5%AE%89%E8%A3%85%E4%BD%BF%E7%94%A8Linkis)
JDK (1.8.0_141以上),[如何安装JDK](https://www.runoob.com/java/java-environment-setup.html)
diff --git a/dss-appjoint-auth/pom.xml b/dss-appjoint-auth/pom.xml
index 04144de5c6..595ad36f0b 100644
--- a/dss-appjoint-auth/pom.xml
+++ b/dss-appjoint-auth/pom.xml
@@ -33,6 +33,11 @@
linkis-gateway-httpclient-support
${linkis.version}
+
+ com.webank.wedatasphere.linkis
+ linkis-common
+ ${linkis.version}
+
diff --git a/dss-flow-execution-entrance/pom.xml b/dss-flow-execution-entrance/pom.xml
index 12218e310e..97d12b24be 100644
--- a/dss-flow-execution-entrance/pom.xml
+++ b/dss-flow-execution-entrance/pom.xml
@@ -33,12 +33,28 @@
com.webank.wedatasphere.linkis
linkis-ujes-entrance
${linkis.version}
+
+
+ org.apache.poi
+ ooxml-schemas
+
+
+
+
+ com.webank.wedatasphere.linkis
+ linkis-cloudRPC
+ ${linkis.version}
-
com.webank.wedatasphere.dss
dss-linkis-node-execution
${dss.version}
+
+
+ com.ibm.icu
+ icu4j
+
+
diff --git a/dss-flow-execution-entrance/src/main/assembly/distribution.xml b/dss-flow-execution-entrance/src/main/assembly/distribution.xml
index c080c0c09f..d59e84970f 100644
--- a/dss-flow-execution-entrance/src/main/assembly/distribution.xml
+++ b/dss-flow-execution-entrance/src/main/assembly/distribution.xml
@@ -84,7 +84,6 @@
com.google.code.gson:gson:jar
com.google.guava:guava:jar
com.google.inject:guice:jar
- com.google.protobuf:protobuf-java:jar
com.netflix.archaius:archaius-core:jar
com.netflix.eureka:eureka-client:jar
com.netflix.eureka:eureka-core:jar
@@ -100,7 +99,6 @@
com.netflix.ribbon:ribbon-loadbalancer:jar
com.netflix.ribbon:ribbon-transport:jar
com.netflix.servo:servo-core:jar
- com.ning:async-http-client:jar
com.sun.jersey.contribs:jersey-apache-client4:jar
com.sun.jersey:jersey-client:jar
com.sun.jersey:jersey-core:jar
@@ -113,15 +111,10 @@
com.webank.wedatasphere.linkis:linkis-common:jar
com.webank.wedatasphere.linkis:linkis-module:jar
commons-beanutils:commons-beanutils:jar
- commons-beanutils:commons-beanutils-core:jar
- commons-cli:commons-cli:jar
commons-codec:commons-codec:jar
commons-collections:commons-collections:jar
commons-configuration:commons-configuration:jar
- commons-daemon:commons-daemon:jar
commons-dbcp:commons-dbcp:jar
- commons-digester:commons-digester:jar
- commons-httpclient:commons-httpclient:jar
commons-io:commons-io:jar
commons-jxpath:commons-jxpath:jar
commons-lang:commons-lang:jar
@@ -129,7 +122,6 @@
commons-net:commons-net:jar
commons-pool:commons-pool:jar
io.micrometer:micrometer-core:jar
- io.netty:netty:jar
io.netty:netty-all:jar
io.netty:netty-buffer:jar
io.netty:netty-codec:jar
@@ -146,41 +138,21 @@
javax.annotation:javax.annotation-api:jar
javax.inject:javax.inject:jar
javax.servlet:javax.servlet-api:jar
- javax.servlet.jsp:jsp-api:jar
javax.validation:validation-api:jar
javax.websocket:javax.websocket-api:jar
javax.ws.rs:javax.ws.rs-api:jar
javax.xml.bind:jaxb-api:jar
javax.xml.stream:stax-api:jar
joda-time:joda-time:jar
- log4j:log4j:jar
mysql:mysql-connector-java:jar
- net.databinder.dispatch:dispatch-core_2.11:jar
- net.databinder.dispatch:dispatch-json4s-jackson_2.11:jar
org.antlr:antlr-runtime:jar
org.antlr:stringtemplate:jar
- org.apache.commons:commons-compress:jar
org.apache.commons:commons-math:jar
- org.apache.commons:commons-math3:jar
- org.apache.curator:curator-client:jar
- org.apache.curator:curator-framework:jar
- org.apache.curator:curator-recipes:jar
- org.apache.directory.api:api-asn1-api:jar
- org.apache.directory.api:api-util:jar
- org.apache.directory.server:apacheds-i18n:jar
- org.apache.directory.server:apacheds-kerberos-codec:jar
- org.apache.hadoop:hadoop-annotations:jar
- org.apache.hadoop:hadoop-auth:jar
- org.apache.hadoop:hadoop-common:jar
- org.apache.hadoop:hadoop-hdfs:jar
- org.apache.htrace:htrace-core:jar
org.apache.httpcomponents:httpclient:jar
- org.apache.httpcomponents:httpcore:jar
org.apache.logging.log4j:log4j-api:jar
org.apache.logging.log4j:log4j-core:jar
org.apache.logging.log4j:log4j-jul:jar
org.apache.logging.log4j:log4j-slf4j-impl:jar
- org.apache.zookeeper:zookeeper:jar
org.aspectj:aspectjweaver:jar
org.bouncycastle:bcpkix-jdk15on:jar
org.bouncycastle:bcprov-jdk15on:jar
@@ -194,7 +166,6 @@
org.eclipse.jetty:jetty-continuation:jar
org.eclipse.jetty:jetty-http:jar
org.eclipse.jetty:jetty-io:jar
- org.eclipse.jetty:jetty-jndi:jar
org.eclipse.jetty:jetty-plus:jar
org.eclipse.jetty:jetty-security:jar
org.eclipse.jetty:jetty-server:jar
@@ -210,7 +181,6 @@
org.eclipse.jetty.websocket:websocket-common:jar
org.eclipse.jetty.websocket:websocket-server:jar
org.eclipse.jetty.websocket:websocket-servlet:jar
- org.fusesource.leveldbjni:leveldbjni-all:jar
org.glassfish.hk2:class-model:jar
org.glassfish.hk2:config-types:jar
org.glassfish.hk2.external:aopalliance-repackaged:jar
@@ -243,13 +213,10 @@
org.json4s:json4s-ast_2.11:jar
org.json4s:json4s-core_2.11:jar
org.json4s:json4s-jackson_2.11:jar
- org.jsoup:jsoup:jar
org.jvnet.mimepull:mimepull:jar
org.jvnet:tiger-types:jar
org.latencyutils:LatencyUtils:jar
org.mortbay.jasper:apache-el:jar
- org.mortbay.jetty:jetty:jar
- org.mortbay.jetty:jetty-util:jar
org.ow2.asm:asm-analysis:jar
org.ow2.asm:asm-commons:jar
org.ow2.asm:asm-tree:jar
@@ -296,11 +263,8 @@
org.springframework:spring-jcl:jar
org.springframework:spring-web:jar
org.springframework:spring-webmvc:jar
- org.tukaani:xz:jar
org.yaml:snakeyaml:jar
software.amazon.ion:ion-java:jar
- xerces:xercesImpl:jar
- xmlenc:xmlenc:jar
xmlpull:xmlpull:jar
xpp3:xpp3_min:jar
diff --git a/dss-flow-execution-entrance/src/main/java/com/webank/wedatasphere/dss/flow/execution/entrance/job/FlowExecutionAppJointSignalSharedJob.java b/dss-flow-execution-entrance/src/main/java/com/webank/wedatasphere/dss/flow/execution/entrance/job/FlowExecutionAppJointSignalSharedJob.java
index 8d0735e581..6cfcf877be 100644
--- a/dss-flow-execution-entrance/src/main/java/com/webank/wedatasphere/dss/flow/execution/entrance/job/FlowExecutionAppJointSignalSharedJob.java
+++ b/dss-flow-execution-entrance/src/main/java/com/webank/wedatasphere/dss/flow/execution/entrance/job/FlowExecutionAppJointSignalSharedJob.java
@@ -19,15 +19,27 @@
import com.webank.wedatasphere.dss.common.entity.node.DWSNode;
import com.webank.wedatasphere.dss.flow.execution.entrance.conf.FlowExecutionEntranceConfiguration;
+import com.webank.wedatasphere.dss.linkis.node.execution.job.JobSignalKeyCreator;
import com.webank.wedatasphere.dss.linkis.node.execution.job.SignalSharedJob;
import java.util.Map;
/**
- * Created by peacewong on 2019/11/14.
+ * Created by johnnwang on 2019/11/14.
*/
-public class FlowExecutionAppJointSignalSharedJob extends FlowExecutionAppJointLinkisSharedJob implements SignalSharedJob {
+public class FlowExecutionAppJointSignalSharedJob extends FlowExecutionAppJointLinkisJob implements SignalSharedJob {
+ private JobSignalKeyCreator signalKeyCreator;
+
+ @Override
+ public JobSignalKeyCreator getSignalKeyCreator() {
+ return this.signalKeyCreator;
+ }
+
+ @Override
+ public void setSignalKeyCreator(JobSignalKeyCreator signalKeyCreator) {
+ this.signalKeyCreator = signalKeyCreator;
+ }
@Override
public String getMsgSaveKey() {
diff --git a/dss-flow-execution-entrance/src/main/java/com/webank/wedatasphere/dss/flow/execution/entrance/job/FlowExecutionJobSignalKeyCreator.java b/dss-flow-execution-entrance/src/main/java/com/webank/wedatasphere/dss/flow/execution/entrance/job/FlowExecutionJobSignalKeyCreator.java
new file mode 100644
index 0000000000..e284b44d8e
--- /dev/null
+++ b/dss-flow-execution-entrance/src/main/java/com/webank/wedatasphere/dss/flow/execution/entrance/job/FlowExecutionJobSignalKeyCreator.java
@@ -0,0 +1,39 @@
+/*
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package com.webank.wedatasphere.dss.flow.execution.entrance.job;
+
+import com.webank.wedatasphere.dss.flow.execution.entrance.conf.FlowExecutionEntranceConfiguration;
+import com.webank.wedatasphere.dss.linkis.node.execution.job.Job;
+import com.webank.wedatasphere.dss.linkis.node.execution.job.JobSignalKeyCreator;
+import com.webank.wedatasphere.dss.linkis.node.execution.job.SignalSharedJob;
+
+public class FlowExecutionJobSignalKeyCreator implements JobSignalKeyCreator {
+
+ @Override
+ public String getSignalKeyByJob(Job job) {
+ String projectId = job.getJobProps().get(FlowExecutionEntranceConfiguration.PROJECT_NAME());
+ String flowId = job.getJobProps().get(FlowExecutionEntranceConfiguration.FLOW_NAME());
+ String flowExecId = job.getJobProps().get(FlowExecutionEntranceConfiguration.FLOW_EXEC_ID());
+ return projectId + "." + flowId + "." + flowExecId;
+ }
+
+ @Override
+ public String getSignalKeyBySignalSharedJob(SignalSharedJob job) {
+ return getSignalKeyByJob((Job)job);
+ }
+}
diff --git a/dss-flow-execution-entrance/src/main/scala/com/webank/wedatasphere/dss/flow/execution/entrance/conf/FlowExecutionEntranceConfiguration.scala b/dss-flow-execution-entrance/src/main/scala/com/webank/wedatasphere/dss/flow/execution/entrance/conf/FlowExecutionEntranceConfiguration.scala
index 91cdeedd6d..245a23687f 100644
--- a/dss-flow-execution-entrance/src/main/scala/com/webank/wedatasphere/dss/flow/execution/entrance/conf/FlowExecutionEntranceConfiguration.scala
+++ b/dss-flow-execution-entrance/src/main/scala/com/webank/wedatasphere/dss/flow/execution/entrance/conf/FlowExecutionEntranceConfiguration.scala
@@ -44,9 +44,10 @@ object FlowExecutionEntranceConfiguration {
val NODE_STATUS_POLLER_THREAD_SIZE = CommonVars("wds.dds.flow.node.status.poller.thread.size", 20)
- val NODE_STATUS_POLLER_SCHEDULER_TIME = CommonVars("wds.dds.flow.node.status.poller.scheduler.time", 2)
+ val NODE_STATUS_POLLER_SCHEDULER_TIME = CommonVars("wds.dds.flow.node.status.poller.scheduler.time", 5)
val FLOW_EXECUTION_SCHEDULER_POOL_SIZE = CommonVars("wds.linkis.flow.execution.pool.size", 30)
+ val NODE_STATUS_INTERVAL = CommonVars("wds.dds.flow.node.status.poller.interval.time", 3000)
val COMMAND = "command"
diff --git a/dss-flow-execution-entrance/src/main/scala/com/webank/wedatasphere/dss/flow/execution/entrance/execution/DefaultFlowExecution.scala b/dss-flow-execution-entrance/src/main/scala/com/webank/wedatasphere/dss/flow/execution/entrance/execution/DefaultFlowExecution.scala
index 04b56936e8..d5a9cbf797 100644
--- a/dss-flow-execution-entrance/src/main/scala/com/webank/wedatasphere/dss/flow/execution/entrance/execution/DefaultFlowExecution.scala
+++ b/dss-flow-execution-entrance/src/main/scala/com/webank/wedatasphere/dss/flow/execution/entrance/execution/DefaultFlowExecution.scala
@@ -16,7 +16,7 @@
*/
package com.webank.wedatasphere.dss.flow.execution.entrance.execution
-import java.util
+
import java.util.concurrent.{Executors, LinkedBlockingQueue, TimeUnit}
import com.webank.wedatasphere.dss.flow.execution.entrance.conf.FlowExecutionEntranceConfiguration
@@ -31,13 +31,11 @@ import scala.collection.mutable.ArrayBuffer
/**
- * Created by peacewong on 2019/11/5.
- */
+ * Created by johnnwang on 2019/11/5.
+ */
@Service
class DefaultFlowExecution extends FlowExecution with Logging {
- private val executeService = Utils.newCachedThreadPool(FlowExecutionEntranceConfiguration.FLOW_EXECUTION_POOL_SIZE.getValue,
- "DefaultFlowExecution",true)
private val nodeRunnerQueue: LinkedBlockingQueue[NodeRunner] = new LinkedBlockingQueue[NodeRunner]()
@@ -74,6 +72,7 @@ class DefaultFlowExecution extends FlowExecution with Logging {
if (pollerCount < FlowExecutionEntranceConfiguration.NODE_STATUS_POLLER_THREAD_SIZE.getValue){
scheduledThreadPool.scheduleAtFixedRate(new NodeExecutionStatusPoller(nodeRunnerQueue), 1,
FlowExecutionEntranceConfiguration.NODE_STATUS_POLLER_SCHEDULER_TIME.getValue ,TimeUnit.SECONDS)
+ pollerCount = pollerCount + 1
}
}
}
diff --git a/dss-flow-execution-entrance/src/main/scala/com/webank/wedatasphere/dss/flow/execution/entrance/job/parser/FlowJobNodeParser.scala b/dss-flow-execution-entrance/src/main/scala/com/webank/wedatasphere/dss/flow/execution/entrance/job/parser/FlowJobNodeParser.scala
index 158a8aebde..6447e45eb2 100644
--- a/dss-flow-execution-entrance/src/main/scala/com/webank/wedatasphere/dss/flow/execution/entrance/job/parser/FlowJobNodeParser.scala
+++ b/dss-flow-execution-entrance/src/main/scala/com/webank/wedatasphere/dss/flow/execution/entrance/job/parser/FlowJobNodeParser.scala
@@ -19,6 +19,7 @@ package com.webank.wedatasphere.dss.flow.execution.entrance.job.parser
import java.util
+import com.webank.wedatasphere.dss.flow.execution.entrance.conf.FlowExecutionEntranceConfiguration
import com.webank.wedatasphere.dss.flow.execution.entrance.conf.FlowExecutionEntranceConfiguration._
import com.webank.wedatasphere.dss.flow.execution.entrance.exception.FlowExecutionErrorException
import com.webank.wedatasphere.dss.flow.execution.entrance.job.FlowEntranceJob
@@ -34,8 +35,8 @@ import org.springframework.core.annotation.Order
import org.springframework.stereotype.Component
/**
- * Created by peacewong on 2019/11/6.
- */
+ * Created by johnnwang on 2019/11/6.
+ */
@Order(2)
@Component
@@ -76,6 +77,8 @@ class FlowJobNodeParser extends FlowEntranceJobParser with Logging{
}
}
+ propsMap.put(FlowExecutionEntranceConfiguration.FLOW_EXEC_ID, flowEntranceJob.getId)
+
params.put(PROPS_MAP, propsMap)
params.put(FLOW_VAR_MAP, flowVar)
params.put(PROJECT_RESOURCES, project.getProjectResources)
diff --git a/dss-flow-execution-entrance/src/main/scala/com/webank/wedatasphere/dss/flow/execution/entrance/node/AppJointJobBuilder.scala b/dss-flow-execution-entrance/src/main/scala/com/webank/wedatasphere/dss/flow/execution/entrance/node/AppJointJobBuilder.scala
index 02505695a8..440e6a4aa4 100644
--- a/dss-flow-execution-entrance/src/main/scala/com/webank/wedatasphere/dss/flow/execution/entrance/node/AppJointJobBuilder.scala
+++ b/dss-flow-execution-entrance/src/main/scala/com/webank/wedatasphere/dss/flow/execution/entrance/node/AppJointJobBuilder.scala
@@ -26,13 +26,28 @@ import com.webank.wedatasphere.dss.flow.execution.entrance.job._
import com.webank.wedatasphere.dss.flow.execution.entrance.utils.FlowExecutionUtils
import com.webank.wedatasphere.dss.linkis.node.execution.conf.LinkisJobExecutionConfiguration
import com.webank.wedatasphere.dss.linkis.node.execution.entity.BMLResource
+import com.webank.wedatasphere.dss.linkis.node.execution.execution.impl.LinkisNodeExecutionImpl
import com.webank.wedatasphere.dss.linkis.node.execution.job._
+import com.webank.wedatasphere.dss.linkis.node.execution.parser.JobParamsParser
import org.apache.commons.lang.StringUtils
/**
- * Created by peacewong on 2019/11/5.
- */
+ * Created by johnnwang on 2019/11/5.
+ */
object AppJointJobBuilder {
+
+ val signalKeyCreator = new FlowExecutionJobSignalKeyCreator
+
+ init()
+
+ def init(): Unit ={
+ val jobParamsParser = new JobParamsParser
+
+ jobParamsParser.setSignalKeyCreator(signalKeyCreator)
+
+ LinkisNodeExecutionImpl.getLinkisNodeExecution.asInstanceOf[LinkisNodeExecutionImpl].registerJobParser(jobParamsParser)
+ }
+
def builder():FlowBuilder = new FlowBuilder
class FlowBuilder extends Builder {
@@ -95,9 +110,10 @@ object AppJointJobBuilder {
override protected def createSignalSharedJob(isLinkisType: Boolean): SignalSharedJob = {
if(isLinkisType){
- null
+ null
} else {
val signalJob = new FlowExecutionAppJointSignalSharedJob
+ signalJob.setSignalKeyCreator(signalKeyCreator)
signalJob.setJobProps(this.jobProps)
signalJob
}
diff --git a/dss-flow-execution-entrance/src/main/scala/com/webank/wedatasphere/dss/flow/execution/entrance/node/DefaultNodeRunner.scala b/dss-flow-execution-entrance/src/main/scala/com/webank/wedatasphere/dss/flow/execution/entrance/node/DefaultNodeRunner.scala
index a9462dda8a..89ddc910a4 100644
--- a/dss-flow-execution-entrance/src/main/scala/com/webank/wedatasphere/dss/flow/execution/entrance/node/DefaultNodeRunner.scala
+++ b/dss-flow-execution-entrance/src/main/scala/com/webank/wedatasphere/dss/flow/execution/entrance/node/DefaultNodeRunner.scala
@@ -27,11 +27,12 @@ import com.webank.wedatasphere.dss.flow.execution.entrance.log.FlowExecutionLog
import com.webank.wedatasphere.dss.flow.execution.entrance.node.NodeExecutionState.NodeExecutionState
import com.webank.wedatasphere.dss.linkis.node.execution.execution.impl.LinkisNodeExecutionImpl
import com.webank.wedatasphere.dss.linkis.node.execution.listener.LinkisExecutionListener
+import com.webank.wedatasphere.linkis.common.exception.ErrorException
import com.webank.wedatasphere.linkis.common.utils.{Logging, Utils}
/**
- * Created by peacewong on 2019/11/5.
- */
+ * Created by johnnwang on 2019/11/5.
+ */
class DefaultNodeRunner extends NodeRunner with Logging {
private var node: SchedulerNode = _
@@ -48,6 +49,8 @@ class DefaultNodeRunner extends NodeRunner with Logging {
private var startTime: Long = _
+ private var lastGetStatusTime: Long = 0
+
override def getNode: SchedulerNode = this.node
def setNode(schedulerNode: SchedulerNode): Unit = {
@@ -64,6 +67,15 @@ class DefaultNodeRunner extends NodeRunner with Logging {
}
override def isLinkisJobCompleted: Boolean = Utils.tryCatch{
+
+ val interval = System.currentTimeMillis() - lastGetStatusTime
+
+ if ( interval < FlowExecutionEntranceConfiguration.NODE_STATUS_INTERVAL.getValue){
+ return false
+ }
+
+ lastGetStatusTime = System.currentTimeMillis()
+
if(NodeExecutionState.isCompleted(getStatus)) return true
val toState = NodeExecutionState.withName(LinkisNodeExecutionImpl.getLinkisNodeExecution.getState(this.linkisJob))
if (NodeExecutionState.isCompleted(toState)) {
@@ -75,9 +87,11 @@ class DefaultNodeRunner extends NodeRunner with Logging {
} else {
false
}
- }{ t =>
- warn(s"Failed to get ${this.node.getName} linkis job states", t)
- false
+ }{
+ case e:ErrorException => logger.warn(s"failed to get ${this.node.getName} state", e)
+ false
+ case t :Throwable => logger.error(s"failed to get ${this.node.getName} state", t)
+ true
}
override def setNodeRunnerListener(nodeRunnerListener: NodeRunnerListener): Unit = this.nodeRunnerListener = nodeRunnerListener
@@ -102,7 +116,7 @@ class DefaultNodeRunner extends NodeRunner with Logging {
}
LinkisNodeExecutionImpl.getLinkisNodeExecution.runJob(this.linkisJob)
- info(s"Finished to run node of ${node.getName}")
+ info(s"start to run node of ${node.getName}")
/*LinkisNodeExecutionImpl.getLinkisNodeExecution.waitForComplete(this.linkisJob)
val listener = LinkisNodeExecutionImpl.getLinkisNodeExecution.asInstanceOf[LinkisExecutionListener]
val toState = LinkisNodeExecutionImpl.getLinkisNodeExecution.getState(this.linkisJob)
diff --git a/dss-linkis-node-execution/pom.xml b/dss-linkis-node-execution/pom.xml
index ecea3e8bff..b98c33987a 100644
--- a/dss-linkis-node-execution/pom.xml
+++ b/dss-linkis-node-execution/pom.xml
@@ -33,13 +33,13 @@
com.webank.wedatasphere.linkis
linkis-ujes-client
- 0.9.1
+ 0.9.3
com.webank.wedatasphere.linkis
linkis-workspace-httpclient
- 0.9.1
+ 0.9.3
diff --git a/dss-linkis-node-execution/src/main/java/com/webank/wedatasphere/dss/linkis/node/execution/WorkflowContextImpl.java b/dss-linkis-node-execution/src/main/java/com/webank/wedatasphere/dss/linkis/node/execution/WorkflowContextImpl.java
index e928813569..8f2ff84fac 100644
--- a/dss-linkis-node-execution/src/main/java/com/webank/wedatasphere/dss/linkis/node/execution/WorkflowContextImpl.java
+++ b/dss-linkis-node-execution/src/main/java/com/webank/wedatasphere/dss/linkis/node/execution/WorkflowContextImpl.java
@@ -21,12 +21,13 @@
import com.google.common.cache.CacheBuilder;
import com.webank.wedatasphere.dss.linkis.node.execution.conf.LinkisJobExecutionConfiguration;
import com.webank.wedatasphere.dss.linkis.node.execution.entity.ContextInfo;
+import org.apache.commons.lang.StringUtils;
import java.util.*;
import java.util.concurrent.TimeUnit;
/**
- * Created by peacewong on 2019/9/26.
+ * Created by johnnwang on 2019/9/26.
*/
public class WorkflowContextImpl implements WorkflowContext {
@@ -79,7 +80,7 @@ public Map getSubMapByPrefix(String keyPrefix) {
while (keys.hasNext()) {
String key = keys.next();
if (key.startsWith(keyPrefix)) {
- map.put(key, getValue(key));
+ map.put(StringUtils.substringAfter(key, keyPrefix), getValue(key));
}
}
return map;
diff --git a/dss-linkis-node-execution/src/main/java/com/webank/wedatasphere/dss/linkis/node/execution/conf/LinkisJobExecutionConfiguration.java b/dss-linkis-node-execution/src/main/java/com/webank/wedatasphere/dss/linkis/node/execution/conf/LinkisJobExecutionConfiguration.java
index 852262bb57..9b75d73a01 100644
--- a/dss-linkis-node-execution/src/main/java/com/webank/wedatasphere/dss/linkis/node/execution/conf/LinkisJobExecutionConfiguration.java
+++ b/dss-linkis-node-execution/src/main/java/com/webank/wedatasphere/dss/linkis/node/execution/conf/LinkisJobExecutionConfiguration.java
@@ -68,7 +68,7 @@ public class LinkisJobExecutionConfiguration {
public final static CommonVars LINKIS_CONNECTION_TIMEOUT = CommonVars.apply("wds.linkis.flow.connection.timeout",30000);
- public final static CommonVars LINKIS_JOB_REQUEST_STATUS_TIME = CommonVars.apply("wds.linkis.flow.connection.timeout",1000);
+ public final static CommonVars LINKIS_JOB_REQUEST_STATUS_TIME = CommonVars.apply("wds.linkis.flow.connection.timeout",3000);
public final static CommonVars LINKIS_ADMIN_USER = CommonVars.apply("wds.linkis.client.flow.adminuser","ws");
diff --git a/dss-linkis-node-execution/src/main/java/com/webank/wedatasphere/dss/linkis/node/execution/execution/impl/LinkisNodeExecutionImpl.java b/dss-linkis-node-execution/src/main/java/com/webank/wedatasphere/dss/linkis/node/execution/execution/impl/LinkisNodeExecutionImpl.java
index 635ce6ac03..264e8288c4 100644
--- a/dss-linkis-node-execution/src/main/java/com/webank/wedatasphere/dss/linkis/node/execution/execution/impl/LinkisNodeExecutionImpl.java
+++ b/dss-linkis-node-execution/src/main/java/com/webank/wedatasphere/dss/linkis/node/execution/execution/impl/LinkisNodeExecutionImpl.java
@@ -23,7 +23,6 @@
import com.webank.wedatasphere.dss.linkis.node.execution.job.SharedJob;
import com.webank.wedatasphere.dss.linkis.node.execution.job.SignalSharedJob;
import com.webank.wedatasphere.dss.linkis.node.execution.listener.LinkisExecutionListener;
-import com.webank.wedatasphere.dss.linkis.node.execution.parser.JobParamsParser;
import com.webank.wedatasphere.dss.linkis.node.execution.parser.JobRuntimeParamsParser;
import com.webank.wedatasphere.dss.linkis.node.execution.service.impl.BuildJobActionImpl;
import com.webank.wedatasphere.dss.linkis.node.execution.conf.LinkisJobExecutionConfiguration;
@@ -51,7 +50,7 @@ public class LinkisNodeExecutionImpl implements LinkisNodeExecution , LinkisExec
private LinkisNodeExecutionImpl() {
registerJobParser(new CodeParser());
- registerJobParser(new JobParamsParser());
+ /*registerJobParser(new JobParamsParser());*/
registerJobParser(new JobRuntimeParamsParser());
}
@@ -107,7 +106,7 @@ public String getLog(Job job) {
job.getLogFromLine(),
LinkisJobExecutionConfiguration.LOG_SIZE.getValue());
- job.setLogFromLint(jobLogResult.fromLine());
+ job.setLogFromLine(jobLogResult.fromLine());
ArrayList logArray = jobLogResult.getLog();
@@ -191,12 +190,7 @@ public void onStatusChanged(String fromState, String toState, Job job) {
if (job instanceof SignalSharedJob){
SignalSharedJob signalSharedJob = (SignalSharedJob) job;
String result = getResult(job, 0, -1);
- String msgSaveKey = signalSharedJob.getMsgSaveKey();
- String key = SignalSharedJob.PREFIX ;
- if (StringUtils.isNotEmpty(msgSaveKey)){
- key = key + msgSaveKey;
- }
- WorkflowContext.getAppJointContext().setValue(key, result , -1);
+ WorkflowContext.getAppJointContext().setValue(signalSharedJob.getSharedKey(), result , -1);
} else if(job instanceof SharedJob){
String taskId = job.getJobExecuteResult().getTaskID();
job.getLogObj().info("Set shared info:" + taskId);
diff --git a/dss-linkis-node-execution/src/main/java/com/webank/wedatasphere/dss/linkis/node/execution/job/AbstractAppJointLinkisJob.java b/dss-linkis-node-execution/src/main/java/com/webank/wedatasphere/dss/linkis/node/execution/job/AbstractAppJointLinkisJob.java
index 1bce73c8af..7d9e47f069 100644
--- a/dss-linkis-node-execution/src/main/java/com/webank/wedatasphere/dss/linkis/node/execution/job/AbstractAppJointLinkisJob.java
+++ b/dss-linkis-node-execution/src/main/java/com/webank/wedatasphere/dss/linkis/node/execution/job/AbstractAppJointLinkisJob.java
@@ -146,7 +146,7 @@ public int getLogFromLine() {
}
@Override
- public void setLogFromLint(int index) {
+ public void setLogFromLine(int index) {
this.logFromLine = index;
}
diff --git a/dss-linkis-node-execution/src/main/java/com/webank/wedatasphere/dss/linkis/node/execution/job/AbstractCommonLinkisJob.java b/dss-linkis-node-execution/src/main/java/com/webank/wedatasphere/dss/linkis/node/execution/job/AbstractCommonLinkisJob.java
index 2d836eaacc..c7f74f2348 100644
--- a/dss-linkis-node-execution/src/main/java/com/webank/wedatasphere/dss/linkis/node/execution/job/AbstractCommonLinkisJob.java
+++ b/dss-linkis-node-execution/src/main/java/com/webank/wedatasphere/dss/linkis/node/execution/job/AbstractCommonLinkisJob.java
@@ -154,7 +154,7 @@ public int getLogFromLine() {
}
@Override
- public void setLogFromLint(int index) {
+ public void setLogFromLine(int index) {
this.logFromLine = index;
}
diff --git a/dss-linkis-node-execution/src/main/java/com/webank/wedatasphere/dss/linkis/node/execution/job/Job.java b/dss-linkis-node-execution/src/main/java/com/webank/wedatasphere/dss/linkis/node/execution/job/Job.java
index ac25c724b4..1811ad105e 100644
--- a/dss-linkis-node-execution/src/main/java/com/webank/wedatasphere/dss/linkis/node/execution/job/Job.java
+++ b/dss-linkis-node-execution/src/main/java/com/webank/wedatasphere/dss/linkis/node/execution/job/Job.java
@@ -63,6 +63,6 @@ public interface Job {
int getLogFromLine();
- void setLogFromLint(int index);
+ void setLogFromLine(int index);
}
diff --git a/dss-linkis-node-execution/src/main/java/com/webank/wedatasphere/dss/linkis/node/execution/job/JobSignalKeyCreator.java b/dss-linkis-node-execution/src/main/java/com/webank/wedatasphere/dss/linkis/node/execution/job/JobSignalKeyCreator.java
new file mode 100644
index 0000000000..ce64bf9017
--- /dev/null
+++ b/dss-linkis-node-execution/src/main/java/com/webank/wedatasphere/dss/linkis/node/execution/job/JobSignalKeyCreator.java
@@ -0,0 +1,25 @@
+/*
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package com.webank.wedatasphere.dss.linkis.node.execution.job;
+
+public interface JobSignalKeyCreator {
+
+ String getSignalKeyByJob(Job job);
+
+ String getSignalKeyBySignalSharedJob(SignalSharedJob job);
+}
diff --git a/dss-linkis-node-execution/src/main/java/com/webank/wedatasphere/dss/linkis/node/execution/job/SignalSharedJob.java b/dss-linkis-node-execution/src/main/java/com/webank/wedatasphere/dss/linkis/node/execution/job/SignalSharedJob.java
index a71a12555a..13a9b8b444 100644
--- a/dss-linkis-node-execution/src/main/java/com/webank/wedatasphere/dss/linkis/node/execution/job/SignalSharedJob.java
+++ b/dss-linkis-node-execution/src/main/java/com/webank/wedatasphere/dss/linkis/node/execution/job/SignalSharedJob.java
@@ -24,6 +24,10 @@ public interface SignalSharedJob extends SharedJob {
String PREFIX = "signal.";
+ JobSignalKeyCreator getSignalKeyCreator();
+
+ void setSignalKeyCreator(JobSignalKeyCreator signalKeyCreator);
+
@Override
default int getSharedNum() {
return -1;
@@ -31,7 +35,7 @@ default int getSharedNum() {
@Override
default String getSharedKey() {
- return PREFIX + getMsgSaveKey();
+ return PREFIX + getSignalKeyCreator().getSignalKeyBySignalSharedJob(this) + "." + getMsgSaveKey();
}
String getMsgSaveKey();
diff --git a/dss-linkis-node-execution/src/main/java/com/webank/wedatasphere/dss/linkis/node/execution/parser/JobParamsParser.java b/dss-linkis-node-execution/src/main/java/com/webank/wedatasphere/dss/linkis/node/execution/parser/JobParamsParser.java
index fca769d8a6..5c9e013c6b 100644
--- a/dss-linkis-node-execution/src/main/java/com/webank/wedatasphere/dss/linkis/node/execution/parser/JobParamsParser.java
+++ b/dss-linkis-node-execution/src/main/java/com/webank/wedatasphere/dss/linkis/node/execution/parser/JobParamsParser.java
@@ -19,19 +19,31 @@
import com.google.gson.reflect.TypeToken;
import com.webank.wedatasphere.dss.linkis.node.execution.WorkflowContext;
+import com.webank.wedatasphere.dss.linkis.node.execution.job.JobSignalKeyCreator;
import com.webank.wedatasphere.dss.linkis.node.execution.job.LinkisJob;
import com.webank.wedatasphere.dss.linkis.node.execution.job.Job;
import com.webank.wedatasphere.dss.linkis.node.execution.job.SignalSharedJob;
import com.webank.wedatasphere.dss.linkis.node.execution.utils.LinkisJobExecutionUtils;
+import java.util.Collection;
import java.util.List;
import java.util.Map;
/**
- * Created by peacewong on 2019/11/3.
+ * Created by johnnwang on 2019/11/3.
*/
public class JobParamsParser implements JobParser {
+ private JobSignalKeyCreator signalKeyCreator;
+
+ public JobSignalKeyCreator getSignalKeyCreator() {
+ return signalKeyCreator;
+ }
+
+ public void setSignalKeyCreator(JobSignalKeyCreator signalKeyCreator) {
+ this.signalKeyCreator = signalKeyCreator;
+ }
+
@Override
public void parseJob(Job job) throws Exception {
@@ -44,11 +56,15 @@ public void parseJob(Job job) throws Exception {
Map flowVariables = linkisJob.getVariables();
putParamsMap(job.getParams(), "variable", flowVariables);
//put signal info
- Map sharedValue = WorkflowContext.getAppJointContext().getSubMapByPrefix(SignalSharedJob.PREFIX);
+ Map sharedValue = WorkflowContext.getAppJointContext()
+ .getSubMapByPrefix(SignalSharedJob.PREFIX + this.getSignalKeyCreator().getSignalKeyByJob(job));
if (sharedValue != null) {
- putParamsMap(job.getParams(), "variable", sharedValue);
+ Collection
+
+ com.google.code.gson
+ gson
+ 2.8.5
+
diff --git a/dss-server/pom.xml b/dss-server/pom.xml
index f42dbb0a44..b0627677bc 100644
--- a/dss-server/pom.xml
+++ b/dss-server/pom.xml
@@ -117,7 +117,7 @@
com.webank.wedatasphere.linkis
- 0.9.1
+ 0.9.3
diff --git a/dss-server/src/main/assembly/distribution.xml b/dss-server/src/main/assembly/distribution.xml
index ffa6656d8c..560fc34e34 100644
--- a/dss-server/src/main/assembly/distribution.xml
+++ b/dss-server/src/main/assembly/distribution.xml
@@ -176,7 +176,9 @@
org.eclipse.jetty:jetty-continuation:jar
org.eclipse.jetty:jetty-http:jar
org.eclipse.jetty:jetty-io:jar
+
org.eclipse.jetty:jetty-plus:jar
org.eclipse.jetty:jetty-security:jar
org.eclipse.jetty:jetty-server:jar
@@ -225,7 +227,9 @@
org.json4s:json4s-ast_2.11:jar
org.json4s:json4s-core_2.11:jar
org.json4s:json4s-jackson_2.11:jar
+
org.jvnet.mimepull:mimepull:jar
org.jvnet:tiger-types:jar
org.latencyutils:LatencyUtils:jar
diff --git a/plugins/azkaban/linkis-jobtype/src/main/java/com/webank/wedatasphere/dss/plugins/azkaban/linkis/jobtype/job/AzkabanAppJointSignalSharedJob.java b/plugins/azkaban/linkis-jobtype/src/main/java/com/webank/wedatasphere/dss/plugins/azkaban/linkis/jobtype/job/AzkabanAppJointSignalSharedJob.java
index 01ebbf0b69..a24adcb69d 100644
--- a/plugins/azkaban/linkis-jobtype/src/main/java/com/webank/wedatasphere/dss/plugins/azkaban/linkis/jobtype/job/AzkabanAppJointSignalSharedJob.java
+++ b/plugins/azkaban/linkis-jobtype/src/main/java/com/webank/wedatasphere/dss/plugins/azkaban/linkis/jobtype/job/AzkabanAppJointSignalSharedJob.java
@@ -17,17 +17,30 @@
package com.webank.wedatasphere.dss.plugins.azkaban.linkis.jobtype.job;
+import com.webank.wedatasphere.dss.linkis.node.execution.job.JobSignalKeyCreator;
import com.webank.wedatasphere.dss.linkis.node.execution.job.SignalSharedJob;
import com.webank.wedatasphere.dss.plugins.azkaban.linkis.jobtype.conf.LinkisJobTypeConf;
import java.util.Map;
/**
- * Created by peacewong on 2019/11/14.
+ * Created by johnnwang on 2019/11/14.
*/
-public class AzkabanAppJointSignalSharedJob extends AzkabanAppJointLinkisSharedJob implements SignalSharedJob {
+public class AzkabanAppJointSignalSharedJob extends AzkabanAppJointLinkisJob implements SignalSharedJob {
+ private JobSignalKeyCreator signalKeyCreator;
+
+ @Override
+ public JobSignalKeyCreator getSignalKeyCreator() {
+ return this.signalKeyCreator;
+ }
+
+ @Override
+ public void setSignalKeyCreator(JobSignalKeyCreator signalKeyCreator) {
+ this.signalKeyCreator = signalKeyCreator;
+ }
+
@Override
public String getMsgSaveKey() {
Map configuration = this.getConfiguration();
diff --git a/plugins/azkaban/linkis-jobtype/src/main/java/com/webank/wedatasphere/dss/plugins/azkaban/linkis/jobtype/job/AzkabanJobSignalKeyCreator.java b/plugins/azkaban/linkis-jobtype/src/main/java/com/webank/wedatasphere/dss/plugins/azkaban/linkis/jobtype/job/AzkabanJobSignalKeyCreator.java
new file mode 100644
index 0000000000..30c6a6415f
--- /dev/null
+++ b/plugins/azkaban/linkis-jobtype/src/main/java/com/webank/wedatasphere/dss/plugins/azkaban/linkis/jobtype/job/AzkabanJobSignalKeyCreator.java
@@ -0,0 +1,39 @@
+/*
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package com.webank.wedatasphere.dss.plugins.azkaban.linkis.jobtype.job;
+
+import com.webank.wedatasphere.dss.linkis.node.execution.job.Job;
+import com.webank.wedatasphere.dss.linkis.node.execution.job.JobSignalKeyCreator;
+import com.webank.wedatasphere.dss.linkis.node.execution.job.SignalSharedJob;
+import com.webank.wedatasphere.dss.plugins.azkaban.linkis.jobtype.conf.LinkisJobTypeConf;
+
+public class AzkabanJobSignalKeyCreator implements JobSignalKeyCreator {
+
+ @Override
+ public String getSignalKeyByJob(Job job) {
+ String projectId = job.getJobProps().get(LinkisJobTypeConf.PROJECT_ID);
+ String flowId = job.getJobProps().get(LinkisJobTypeConf.FLOW_NAME);
+ String flowExecId = job.getJobProps().get(LinkisJobTypeConf.FLOW_EXEC_ID);
+ return projectId + "." + flowId + "." + flowExecId ;
+ }
+
+ @Override
+ public String getSignalKeyBySignalSharedJob(SignalSharedJob job) {
+ return getSignalKeyByJob((Job)job);
+ }
+}
diff --git a/plugins/azkaban/linkis-jobtype/src/main/java/com/webank/wedatasphere/dss/plugins/azkaban/linkis/jobtype/job/AzkanbanBuilder.java b/plugins/azkaban/linkis-jobtype/src/main/java/com/webank/wedatasphere/dss/plugins/azkaban/linkis/jobtype/job/AzkanbanBuilder.java
index bcd88097bd..c38fac2440 100644
--- a/plugins/azkaban/linkis-jobtype/src/main/java/com/webank/wedatasphere/dss/plugins/azkaban/linkis/jobtype/job/AzkanbanBuilder.java
+++ b/plugins/azkaban/linkis-jobtype/src/main/java/com/webank/wedatasphere/dss/plugins/azkaban/linkis/jobtype/job/AzkanbanBuilder.java
@@ -20,7 +20,10 @@
import com.webank.wedatasphere.dss.linkis.node.execution.conf.LinkisJobExecutionConfiguration;
import com.webank.wedatasphere.dss.linkis.node.execution.entity.BMLResource;
import com.webank.wedatasphere.dss.linkis.node.execution.exception.LinkisJobExecutionErrorException;
+import com.webank.wedatasphere.dss.linkis.node.execution.execution.LinkisNodeExecution;
+import com.webank.wedatasphere.dss.linkis.node.execution.execution.impl.LinkisNodeExecutionImpl;
import com.webank.wedatasphere.dss.linkis.node.execution.job.*;
+import com.webank.wedatasphere.dss.linkis.node.execution.parser.JobParamsParser;
import com.webank.wedatasphere.dss.linkis.node.execution.utils.LinkisJobExecutionUtils;
import com.webank.wedatasphere.dss.plugins.azkaban.linkis.jobtype.conf.LinkisJobTypeConf;
import com.webank.wedatasphere.dss.plugins.azkaban.linkis.jobtype.utils.LinkisJobTypeUtils;
@@ -29,17 +32,29 @@
import java.util.*;
/**
- * Created by peacewong on 2019/11/3.
+ * Created by johnnwang on 2019/11/3.
*/
public class AzkanbanBuilder extends Builder{
private Map jobProps;
+ private JobSignalKeyCreator jobSignalKeyCreator = new AzkabanJobSignalKeyCreator();
+
public AzkanbanBuilder setJobProps(Map jobProps) {
this.jobProps = jobProps;
return this;
}
+ {
+ init();
+ }
+
+ private void init(){
+ JobParamsParser jobParamsParser = new JobParamsParser();
+ jobParamsParser.setSignalKeyCreator(jobSignalKeyCreator);
+ LinkisNodeExecutionImpl linkisNodeExecution = (LinkisNodeExecutionImpl)LinkisNodeExecutionImpl.getLinkisNodeExecution();
+ linkisNodeExecution.registerJobParser(jobParamsParser);
+ }
@Override
protected String getJobType() {
@@ -99,6 +114,7 @@ protected SignalSharedJob createSignalSharedJob(boolean isLinkisType) {
return null;
} else {
AzkabanAppJointSignalSharedJob signalSharedJob = new AzkabanAppJointSignalSharedJob();
+ signalSharedJob.setSignalKeyCreator(jobSignalKeyCreator);
signalSharedJob.setJobProps(this.jobProps);
return signalSharedJob;
}
diff --git a/plugins/linkis/linkis-appjoint-entrance/pom.xml b/plugins/linkis/linkis-appjoint-entrance/pom.xml
index 26b80a750c..0651c396ef 100644
--- a/plugins/linkis/linkis-appjoint-entrance/pom.xml
+++ b/plugins/linkis/linkis-appjoint-entrance/pom.xml
@@ -54,8 +54,21 @@
-
-
+
+ com.webank.wedatasphere.linkis
+ linkis-cloudRPC
+ ${linkis.version}
+
+
+ com.webank.wedatasphere.linkis
+ linkis-storage
+ ${linkis.version}
+
+
+ com.webank.wedatasphere.linkis
+ linkis-httpclient
+ ${linkis.version}
+
diff --git a/plugins/linkis/linkis-appjoint-entrance/src/main/assembly/distribution.xml b/plugins/linkis/linkis-appjoint-entrance/src/main/assembly/distribution.xml
index d4a200502a..83c6ee1a07 100644
--- a/plugins/linkis/linkis-appjoint-entrance/src/main/assembly/distribution.xml
+++ b/plugins/linkis/linkis-appjoint-entrance/src/main/assembly/distribution.xml
@@ -84,7 +84,7 @@
com.google.code.gson:gson:jar
com.google.guava:guava:jar
com.google.inject:guice:jar
- com.google.protobuf:protobuf-java:jar
+
com.netflix.archaius:archaius-core:jar
com.netflix.eureka:eureka-client:jar
com.netflix.eureka:eureka-core:jar
@@ -100,7 +100,6 @@
com.netflix.ribbon:ribbon-loadbalancer:jar
com.netflix.ribbon:ribbon-transport:jar
com.netflix.servo:servo-core:jar
- com.ning:async-http-client:jar
com.sun.jersey.contribs:jersey-apache-client4:jar
com.sun.jersey:jersey-client:jar
com.sun.jersey:jersey-core:jar
@@ -112,15 +111,15 @@
com.thoughtworks.xstream:xstream:jar
com.webank.wedatasphere.linkis:linkis-common:jar
com.webank.wedatasphere.linkis:linkis-module:jar
- commons-beanutils:commons-beanutils:jar
- commons-beanutils:commons-beanutils-core:jar
- commons-cli:commons-cli:jar
- commons-codec:commons-codec:jar
- commons-collections:commons-collections:jar
- commons-configuration:commons-configuration:jar
- commons-daemon:commons-daemon:jar
- commons-dbcp:commons-dbcp:jar
- commons-digester:commons-digester:jar
+
+
+
+
+
+
+
+
+
commons-httpclient:commons-httpclient:jar
commons-io:commons-io:jar
commons-jxpath:commons-jxpath:jar
@@ -155,11 +154,9 @@
joda-time:joda-time:jar
log4j:log4j:jar
mysql:mysql-connector-java:jar
- net.databinder.dispatch:dispatch-core_2.11:jar
- net.databinder.dispatch:dispatch-json4s-jackson_2.11:jar
org.antlr:antlr-runtime:jar
org.antlr:stringtemplate:jar
- org.apache.commons:commons-compress:jar
+
org.apache.commons:commons-math:jar
org.apache.commons:commons-math3:jar
org.apache.curator:curator-client:jar
@@ -169,13 +166,13 @@
org.apache.directory.api:api-util:jar
org.apache.directory.server:apacheds-i18n:jar
org.apache.directory.server:apacheds-kerberos-codec:jar
- org.apache.hadoop:hadoop-annotations:jar
- org.apache.hadoop:hadoop-auth:jar
- org.apache.hadoop:hadoop-common:jar
- org.apache.hadoop:hadoop-hdfs:jar
- org.apache.htrace:htrace-core:jar
- org.apache.httpcomponents:httpclient:jar
- org.apache.httpcomponents:httpcore:jar
+
+
+
+
+
+
+
org.apache.logging.log4j:log4j-api:jar
org.apache.logging.log4j:log4j-core:jar
org.apache.logging.log4j:log4j-jul:jar
@@ -194,7 +191,6 @@
org.eclipse.jetty:jetty-continuation:jar
org.eclipse.jetty:jetty-http:jar
org.eclipse.jetty:jetty-io:jar
- org.eclipse.jetty:jetty-jndi:jar
org.eclipse.jetty:jetty-plus:jar
org.eclipse.jetty:jetty-security:jar
org.eclipse.jetty:jetty-server:jar
@@ -243,7 +239,6 @@
org.json4s:json4s-ast_2.11:jar
org.json4s:json4s-core_2.11:jar
org.json4s:json4s-jackson_2.11:jar
- org.jsoup:jsoup:jar
org.jvnet.mimepull:mimepull:jar
org.jvnet:tiger-types:jar
org.latencyutils:LatencyUtils:jar
diff --git a/pom.xml b/pom.xml
index c93448dc71..76737321d2 100644
--- a/pom.xml
+++ b/pom.xml
@@ -48,7 +48,7 @@
0.7.0
- 0.9.1
+ 0.9.3
2.11.8
1.8
3.3.3
diff --git a/visualis-appjoint/appjoint/pom.xml b/visualis-appjoint/appjoint/pom.xml
index 9c6016de16..ab48a6148b 100644
--- a/visualis-appjoint/appjoint/pom.xml
+++ b/visualis-appjoint/appjoint/pom.xml
@@ -45,6 +45,14 @@
true
+
+ com.webank.wedatasphere.linkis
+ linkis-httpclient
+ ${linkis.version}
+ provided
+ true
+
+
diff --git a/visualis-appjoint/appjoint/src/main/scala/com/webank/wedatasphere/dss/appjoint/visualis/execution/VisualisNodeExecution.scala b/visualis-appjoint/appjoint/src/main/scala/com/webank/wedatasphere/dss/appjoint/visualis/execution/VisualisNodeExecution.scala
index 9de32921cf..fa25757263 100644
--- a/visualis-appjoint/appjoint/src/main/scala/com/webank/wedatasphere/dss/appjoint/visualis/execution/VisualisNodeExecution.scala
+++ b/visualis-appjoint/appjoint/src/main/scala/com/webank/wedatasphere/dss/appjoint/visualis/execution/VisualisNodeExecution.scala
@@ -17,7 +17,7 @@
package com.webank.wedatasphere.dss.appjoint.visualis.execution
-import java.io.ByteArrayOutputStream
+import java.io.{ByteArrayOutputStream, InputStream}
import java.util
import java.util.Base64
@@ -28,25 +28,34 @@ import com.webank.wedatasphere.dss.appjoint.service.session.Session
import com.webank.wedatasphere.dss.appjoint.visualis.execution.VisualisNodeExecutionConfiguration._
import com.webank.wedatasphere.linkis.common.exception.ErrorException
import com.webank.wedatasphere.linkis.common.log.LogUtils
-import com.webank.wedatasphere.linkis.common.utils.{HttpClient, Logging, Utils}
+import com.webank.wedatasphere.linkis.common.utils.{Logging, Utils}
import com.webank.wedatasphere.linkis.storage.{LineMetaData, LineRecord}
import org.apache.commons.io.IOUtils
import scala.collection.JavaConversions.mapAsScalaMap
-import scala.concurrent.ExecutionContext
+import scala.concurrent.duration.Duration
+import scala.concurrent.{Await, ExecutionContext}
+import dispatch._
+import org.json4s.{DefaultFormats, Formats}
/**
* Created by enjoyyin on 2019/10/12.
*/
-class VisualisNodeExecution extends NodeExecution with HttpClient with Logging {
-
- override protected implicit val executors: ExecutionContext = Utils.newCachedExecutionContext(VISUALIS_THREAD_MAX.getValue, getName + "-NodeExecution-Thread", true)
+class VisualisNodeExecution extends NodeExecution with Logging {
private val DISPLAY = "display"
private val DASHBOARD = "dashboard"
var basicUrl:String = _
+ protected implicit val executors: ExecutionContext = Utils.newCachedExecutionContext(VISUALIS_THREAD_MAX.getValue, getName + "-NodeExecution-Thread", true)
+ protected implicit val formats: Formats = DefaultFormats
+
+ private implicit def svc(url: String): Req =
+ dispatch.url(url)
+
+
+
override def getBaseUrl: String = this.basicUrl
override def setBaseUrl(basicUrl: String): Unit = this.basicUrl = basicUrl
@@ -95,6 +104,17 @@ class VisualisNodeExecution extends NodeExecution with HttpClient with Logging {
appJointResponse
}
+ def download(url: String, queryParams: Map[String, String], headerParams: Map[String, String],
+ write: InputStream => Unit,
+ paths: String*): Unit = {
+ var req = url.GET
+ if(headerParams != null && headerParams.nonEmpty) req = req <:< headerParams
+ if(queryParams != null) queryParams.foreach{ case (k, v) => req = req.addQueryParameter(k, v)}
+ if(paths != null) paths.filter(_ != null).foreach(p => req = req / p)
+ val response = Http(req OK as.Response(_.getResponseBodyAsStream)).map(write)
+ Await.result(response, Duration.Inf)
+ }
+
private def getRealId(displayId:String):Int = {
Utils.tryCatch{
val f = java.lang.Float.parseFloat(displayId)