Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

ppml docker 2.1.0 #5949

Merged
merged 1 commit into from
Sep 26, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion ppml/services/kms-utils/docker/Dockerfile
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
ARG JDK_VERSION=8u192
ARG SPARK_VERSION=3.1.2
ARG BIGDL_VERSION=2.1.0-SNAPSHOT
ARG BIGDL_VERSION=2.1.0

# stage.1 java & spark
FROM ubuntu:20.04 AS builder
Expand Down
24 changes: 12 additions & 12 deletions ppml/services/kms-utils/docker/entrypoint.sh
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ elif [ "$action" = "generatekeys" ]; then
if [ "$KMS_TYPE" = "ehsm" ]; then
appid=$2
apikey=$3
java -cp $BIGDL_HOME/jars/bigdl-ppml-spark_3.1.2-2.1.0-SNAPSHOT.jar:$SPARK_HOME/jars/*:$SPARK_HOME/examples/jars/*:$BIGDL_HOME/jars/* \
java -cp $BIGDL_HOME/jars/bigdl-ppml-spark_3.1.2-2.1.0.jar:$SPARK_HOME/jars/*:$SPARK_HOME/examples/jars/*:$BIGDL_HOME/jars/* \
com.intel.analytics.bigdl.ppml.examples.GenerateKeys \
--primaryKeyPath /home/key/ehsm_encrypted_primary_key \
--dataKeyPath /home/key/ehsm_encrypted_data_key \
Expand All @@ -33,7 +33,7 @@ elif [ "$action" = "generatekeys" ]; then
elif [ "$KMS_TYPE" = "simple" ]; then
appid=$2
apikey=$3
java -cp $BIGDL_HOME/jars/bigdl-ppml-spark_3.1.2-2.1.0-SNAPSHOT.jar:$SPARK_HOME/jars/*:$SPARK_HOME/examples/jars/*:$BIGDL_HOME/jars/* \
java -cp $BIGDL_HOME/jars/bigdl-ppml-spark_3.1.2-2.1.0.jar:$SPARK_HOME/jars/*:$SPARK_HOME/examples/jars/*:$BIGDL_HOME/jars/* \
com.intel.analytics.bigdl.ppml.examples.GenerateKeys \
--primaryKeyPath /home/key/simple_encrypted_primary_key \
--dataKeyPath /home/key/simple_encrypted_data_key \
Expand All @@ -42,7 +42,7 @@ elif [ "$action" = "generatekeys" ]; then
--simpleAPIKEY $apikey
elif [ "$KMS_TYPE" = "azure" ]; then
keyVaultName=$2
java -cp $BIGDL_HOME/jars/bigdl-ppml-spark_3.1.2-2.1.0-SNAPSHOT.jar:$SPARK_HOME/jars/*:$SPARK_HOME/examples/jars/*:$BIGDL_HOME/jars/* \
java -cp $BIGDL_HOME/jars/bigdl-ppml-spark_3.1.2-2.1.0.jar:$SPARK_HOME/jars/*:$SPARK_HOME/examples/jars/*:$BIGDL_HOME/jars/* \
com.intel.analytics.bigdl.ppml.examples.GenerateKeys \
--primaryKeyPath /home/key/simple_encrypted_primary_key \
--dataKeyPath /home/key/simple_encrypted_data_key \
Expand All @@ -59,7 +59,7 @@ elif [ "$action" = "encrypt" ]; then
if [ "$KMS_TYPE" = "ehsm" ]; then
appid=$2
apikey=$3
java -cp $BIGDL_HOME/jars/bigdl-ppml-spark_3.1.2-2.1.0-SNAPSHOT.jar:$SPARK_HOME/jars/*:$SPARK_HOME/examples/jars/*:$BIGDL_HOME/jars/* \
java -cp $BIGDL_HOME/jars/bigdl-ppml-spark_3.1.2-2.1.0.jar:$SPARK_HOME/jars/*:$SPARK_HOME/examples/jars/*:$BIGDL_HOME/jars/* \
com.intel.analytics.bigdl.ppml.examples.Encrypt \
--inputPath $input_path \
--primaryKeyPath /home/key/ehsm_encrypted_primary_key \
Expand All @@ -72,7 +72,7 @@ elif [ "$action" = "encrypt" ]; then
elif [ "$KMS_TYPE" = "simple" ]; then
appid=$2
apikey=$3
java -cp $BIGDL_HOME/jars/bigdl-ppml-spark_3.1.2-2.1.0-SNAPSHOT.jar:$SPARK_HOME/jars/*:$SPARK_HOME/examples/jars/*:$BIGDL_HOME/jars/* \
java -cp $BIGDL_HOME/jars/bigdl-ppml-spark_3.1.2-2.1.0.jar:$SPARK_HOME/jars/*:$SPARK_HOME/examples/jars/*:$BIGDL_HOME/jars/* \
com.intel.analytics.bigdl.ppml.examples.Encrypt \
--inputPath $input_path \
--primaryKeyPath /home/key/simple_encrypted_primary_key \
Expand All @@ -82,7 +82,7 @@ elif [ "$action" = "encrypt" ]; then
--simpleAPIKEY $apikey
elif [ "$KMS_TYPE" = "azure" ]; then
keyVaultName=$2
java -cp $BIGDL_HOME/jars/bigdl-ppml-spark_3.1.2-2.1.0-SNAPSHOT.jar:$SPARK_HOME/jars/*:$SPARK_HOME/examples/jars/*:$BIGDL_HOME/jars/* \
java -cp $BIGDL_HOME/jars/bigdl-ppml-spark_3.1.2-2.1.0.jar:$SPARK_HOME/jars/*:$SPARK_HOME/examples/jars/*:$BIGDL_HOME/jars/* \
com.intel.analytics.bigdl.ppml.examples.Encrypt \
--inputPath $input_path \
--primaryKeyPath /home/key/simple_encrypted_primary_key \
Expand All @@ -99,7 +99,7 @@ elif [ "$action" = "encryptwithrepartition" ]; then
apikey=$3
input_path=$4
output_path=$input_path.encrypted
java -cp $BIGDL_HOME/jars/bigdl-ppml-spark_3.1.2-2.1.0-SNAPSHOT.jar:$SPARK_HOME/jars/*:$SPARK_HOME/examples/jars/*:$BIGDL_HOME/jars/* \
java -cp $BIGDL_HOME/jars/bigdl-ppml-spark_3.1.2-2.1.0.jar:$SPARK_HOME/jars/*:$SPARK_HOME/examples/jars/*:$BIGDL_HOME/jars/* \
com.intel.analytics.bigdl.ppml.examples.EncryptWithRepartition \
--inputPath $input_path \
--outputPath $output_path \
Expand All @@ -118,7 +118,7 @@ elif [ "$action" = "encryptwithrepartition" ]; then
apikey=$3
input_path=$4
output_path=$input_path.encrypted
java -cp $BIGDL_HOME/jars/bigdl-ppml-spark_3.1.2-2.1.0-SNAPSHOT.jar:$SPARK_HOME/jars/*:$SPARK_HOME/examples/jars/*:$BIGDL_HOME/jars/* \
java -cp $BIGDL_HOME/jars/bigdl-ppml-spark_3.1.2-2.1.0.jar:$SPARK_HOME/jars/*:$SPARK_HOME/examples/jars/*:$BIGDL_HOME/jars/* \
com.intel.analytics.bigdl.ppml.examples.EncryptWithRepartition \
--inputPath $input_path \
--outputPath $output_path \
Expand All @@ -134,7 +134,7 @@ elif [ "$action" = "encryptwithrepartition" ]; then
keyVaultName=$2
input_path=$3
output_path=$input_path.encrypted
java -cp $BIGDL_HOME/jars/bigdl-ppml-spark_3.1.2-2.1.0-SNAPSHOT.jar:$SPARK_HOME/jars/*:$SPARK_HOME/examples/jars/*:$BIGDL_HOME/jars/* \
java -cp $BIGDL_HOME/jars/bigdl-ppml-spark_3.1.2-2.1.0.jar:$SPARK_HOME/jars/*:$SPARK_HOME/examples/jars/*:$BIGDL_HOME/jars/* \
com.intel.analytics.bigdl.ppml.examples.EncryptWithRepartition \
--inputPath $input_path \
--outputPath $output_path \
Expand All @@ -154,7 +154,7 @@ elif [ "$action" = "decrypt" ]; then
appid=$2
apikey=$3
input_path=$4
java -cp $BIGDL_HOME/jars/bigdl-ppml-spark_3.1.2-2.1.0-SNAPSHOT.jar:$SPARK_HOME/jars/*:$SPARK_HOME/examples/jars/*:$BIGDL_HOME/jars/* \
java -cp $BIGDL_HOME/jars/bigdl-ppml-spark_3.1.2-2.1.0.jar:$SPARK_HOME/jars/*:$SPARK_HOME/examples/jars/*:$BIGDL_HOME/jars/* \
com.intel.analytics.bigdl.ppml.examples.Decrypt \
--inputPath $input_path \
--inputPartitionNum 8 \
Expand All @@ -172,7 +172,7 @@ elif [ "$action" = "decrypt" ]; then
appid=$2
apikey=$3
input_path=$4
java -cp $BIGDL_HOME/jars/bigdl-ppml-spark_3.1.2-2.1.0-SNAPSHOT.jar:$SPARK_HOME/jars/*:$SPARK_HOME/examples/jars/*:$BIGDL_HOME/jars/* \
java -cp $BIGDL_HOME/jars/bigdl-ppml-spark_3.1.2-2.1.0.jar:$SPARK_HOME/jars/*:$SPARK_HOME/examples/jars/*:$BIGDL_HOME/jars/* \
com.intel.analytics.bigdl.ppml.examples.Decrypt \
--inputPath $input_path \
--inputPartitionNum 8 \
Expand All @@ -187,7 +187,7 @@ elif [ "$action" = "decrypt" ]; then
elif [ "$KMS_TYPE" = "azure" ]; then
keyVaultName=$2
input_path=$3
java -cp $BIGDL_HOME/jars/bigdl-ppml-spark_3.1.2-2.1.0-SNAPSHOT.jar:$SPARK_HOME/jars/*:$SPARK_HOME/examples/jars/*:$BIGDL_HOME/jars/* \
java -cp $BIGDL_HOME/jars/bigdl-ppml-spark_3.1.2-2.1.0.jar:$SPARK_HOME/jars/*:$SPARK_HOME/examples/jars/*:$BIGDL_HOME/jars/* \
com.intel.analytics.bigdl.ppml.examples.Decrypt \
--inputPath $input_path \
--inputPartitionNum 8 \
Expand Down
4 changes: 2 additions & 2 deletions ppml/tdx/docker/client-image/tdx-client.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ spec:
runtimeClassName: kata-cc
containers:
- name: spark-local-k8s-client-kata-cc
image: intelanalytics/bigdl-tdx-client-spark-3.1.2:2.1.0-SNAPSHOT
image: intelanalytics/bigdl-tdx-client-spark-3.1.2:2.1.0
imagePullPolicy: Always
securityContext:
privileged: true
Expand All @@ -33,7 +33,7 @@ spec:
- name: RUNTIME_K8S_SERVICE_ACCOUNT
value: "spark"
- name: RUNTIME_K8S_SPARK_IMAGE
value: "intelanalytics/bigdl-tdx-client-spark-3.1.2:2.1.0-SNAPSHOT"
value: "intelanalytics/bigdl-tdx-client-spark-3.1.2:2.1.0"
- name: RUNTIME_DRIVER_HOST
value: "x.x.x.x"
- name: RUNTIME_DRIVER_PORT
Expand Down
2 changes: 1 addition & 1 deletion ppml/trusted-big-data-ml/python/docker-gramine/Dockerfile
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
ARG BIGDL_VERSION=2.1.0-SNAPSHOT
ARG BIGDL_VERSION=2.1.0
ARG SPARK_VERSION=3.1.2
ARG TINI_VERSION=v0.18.0
ARG JDK_VERSION=8u192
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,15 +17,15 @@ Proxy_Modified="sudo docker build \
--build-arg JDK_URL=${SPARK_JAR_REPO_URL} \
--build-arg SPARK_JAR_REPO_URL=${SPARK_JAR_REPO_URL} \
--build-arg no_proxy=x.x.x.x \
-t intelanalytics/bigdl-ppml-trusted-big-data-ml-python-gramine:2.1.0-SNAPSHOT -f ./Dockerfile ."
-t intelanalytics/bigdl-ppml-trusted-big-data-ml-python-gramine:2.1.0 -f ./Dockerfile ."

No_Proxy_Modified="sudo docker build \
--build-arg JDK_VERSION=8u192 \
--build-arg JDK_URL=${JDK_URL} \
--build-arg JDK_URL=${SPARK_JAR_REPO_URL} \
--build-arg SPARK_JAR_REPO_URL=${SPARK_JAR_REPO_URL} \
--build-arg no_proxy=x.x.x.x \
-t intelanalytics/bigdl-ppml-trusted-big-data-ml-python-gramine:2.1.0-SNAPSHOT -f ./Dockerfile ."
-t intelanalytics/bigdl-ppml-trusted-big-data-ml-python-gramine:2.1.0 -f ./Dockerfile ."

if [[ "$JDK_URL" == "http://your-http-url-to-download-jdk" ]] || [[ "$SPARK_JAR_REPO_URL" == "http://your_spark_jar_repo_url" ]]
then
Expand Down
2 changes: 1 addition & 1 deletion ppml/trusted-big-data-ml/python/docker-graphene/Dockerfile
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
ARG BIGDL_VERSION=2.1.0-SNAPSHOT
ARG BIGDL_VERSION=2.1.0
ARG SPARK_VERSION=3.1.2
ARG TINI_VERSION=v0.18.0
ARG JDK_VERSION=8u192
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
#!/bin/bash

export LOCAL_IP=YOUR_LOCAL_IP
export DOCKER_IMAGE=intelanalytics/bigdl-ppml-trusted-big-data-ml-python-graphene:2.1.0-SNAPSHOT
export DOCKER_IMAGE=intelanalytics/bigdl-ppml-trusted-big-data-ml-python-graphene:2.1.0

sudo docker pull $DOCKER_IMAGE

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ export TF_MKL_ALLOC_MAX_BYTES=10737418240 && \
--conf spark.driver.defaultJavaOptions="-Dlog4j.configuration=/ppml/trusted-big-data-ml/work/spark-3.1.2/conf/log4j2.xml" \
--conf spark.executor.defaultJavaOptions="-Dlog4j.configuration=/ppml/trusted-big-data-ml/work/spark-3.1.2/conf/log4j2.xml" \
--conf spark.kubernetes.authenticate.driver.serviceAccountName=spark \
--conf spark.kubernetes.container.image=intelanalytics/bigdl-ppml-trusted-big-data-ml-python-graphene:2.1.0-SNAPSHOT \
--conf spark.kubernetes.container.image=intelanalytics/bigdl-ppml-trusted-big-data-ml-python-graphene:2.1.0 \
--conf spark.kubernetes.driver.podTemplateFile=/ppml/trusted-big-data-ml/azure/spark-driver-template-az.yaml \
--conf spark.kubernetes.executor.podTemplateFile=/ppml/trusted-big-data-ml/azure/spark-executor-template-az.yaml \
--conf spark.kubernetes.executor.deleteOnTermination=false \
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,14 +16,14 @@ Proxy_Modified="sudo docker build \
--build-arg JDK_URL=${JDK_URL} \
--build-arg SPARK_JAR_REPO_URL=${SPARK_JAR_REPO_URL} \
--build-arg no_proxy=x.x.x.x \
-t intelanalytics/bigdl-ppml-trusted-big-data-ml-python-graphene:2.1.0-SNAPSHOT -f ./Dockerfile ."
-t intelanalytics/bigdl-ppml-trusted-big-data-ml-python-graphene:2.1.0 -f ./Dockerfile ."

No_Proxy_Modified="sudo docker build \
--build-arg JDK_VERSION=8u192 \
--build-arg JDK_URL=${JDK_URL} \
--build-arg SPARK_JAR_REPO_URL=${SPARK_JAR_REPO_URL} \
--build-arg no_proxy=x.x.x.x \
-t intelanalytics/bigdl-ppml-trusted-big-data-ml-python-graphene:2.1.0-SNAPSHOT -f ./Dockerfile ."
-t intelanalytics/bigdl-ppml-trusted-big-data-ml-python-graphene:2.1.0 -f ./Dockerfile ."

if [[ "$JDK_URL" == "http://your-http-url-to-download-jdk" ]] || [[ "$SPARK_JAR_REPO_URL" == "http://your_spark_jar_repo_url" ]]
then
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ export ENCLAVE_KEY_PATH=YOUR_LOCAL_ENCLAVE_KEY_PATH
export DATA_PATH=YOUR_LOCAL_DATA_PATH
export KEYS_PATH=YOUR_LOCAL_KEYS_PATH
export LOCAL_IP=YOUR_LOCAL_IP
export DOCKER_IMAGE=intelanalytics/bigdl-ppml-trusted-big-data-ml-python-graphene:2.1.0-SNAPSHOT
export DOCKER_IMAGE=intelanalytics/bigdl-ppml-trusted-big-data-ml-python-graphene:2.1.0

sudo docker pull $DOCKER_IMAGE

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,4 +3,4 @@ name: bigdl-ppml-helm-chart
description: A Helm chart for submitting BigDL PPML Spark jobs to Kubernetes
type: application
version: 0.1.0
appVersion: 2.1.0-SNAPSHOT
appVersion: 2.1.0
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
export MASTER=YOUR_MASTER_IP
export WORKERS=(YOUR_WORKER_IP_1 YOUR_WORKER_IP_2 YOUR_WORKER_IP_3)

export TRUSTED_BIGDATA_ML_IMAGE=intelanalytics/bigdl-ppml-trusted-big-data-ml-python-graphene:2.1.0-SNAPSHOT
export TRUSTED_BIGDATA_ML_IMAGE=intelanalytics/bigdl-ppml-trusted-big-data-ml-python-graphene:2.1.0

export SOURCE_ENCLAVE_KEY_PATH=YOUR_LOCAL_ENCLAVE_KEY_PATH
export SOURCE_KEYS_PATH=YOUR_LOCAL_KEYS_PATH
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
cd /ppml/trusted-big-data-ml

SGX=1 ./pal_loader bash -c "/opt/jdk8/bin/java -cp \
'/ppml/trusted-big-data-ml/work/bigdl-2.1.0-SNAPSHOT/jars/*:/ppml/trusted-big-data-ml/work/spark-3.1.2/conf/:/ppml/trusted-big-data-ml/work/spark-3.1.2/jars/*' \
'/ppml/trusted-big-data-ml/work/bigdl-2.1.0/jars/*:/ppml/trusted-big-data-ml/work/spark-3.1.2/conf/:/ppml/trusted-big-data-ml/work/spark-3.1.2/jars/*' \
-Xmx2g \
org.apache.spark.deploy.SparkSubmit \
--master 'local[4]' \
Expand All @@ -12,13 +12,13 @@ SGX=1 ./pal_loader bash -c "/opt/jdk8/bin/java -cp \
--conf spark.rpc.message.maxSize=190 \
--conf spark.network.timeout=10000000 \
--conf spark.executor.heartbeatInterval=10000000 \
--properties-file /ppml/trusted-big-data-ml/work/bigdl-2.1.0-SNAPSHOT/conf/spark-bigdl.conf \
--py-files local://${BIGDL_HOME}/python/bigdl-orca-spark_${SPARK_VERSION}-${BIGDL_VERSION}-python-api.zip,local://${BIGDL_HOME}/python/bigdl-dllib-spark_${SPARK_VERSION}-${BIGDL_VERSION}-python-api.zip,/ppml/trusted-big-data-ml/work/bigdl-2.1.0-SNAPSHOT/examples/dllib/lenet/lenet.py \
--properties-file /ppml/trusted-big-data-ml/work/bigdl-2.1.0/conf/spark-bigdl.conf \
--py-files local://${BIGDL_HOME}/python/bigdl-orca-spark_${SPARK_VERSION}-${BIGDL_VERSION}-python-api.zip,local://${BIGDL_HOME}/python/bigdl-dllib-spark_${SPARK_VERSION}-${BIGDL_VERSION}-python-api.zip,/ppml/trusted-big-data-ml/work/bigdl-2.1.0/examples/dllib/lenet/lenet.py \
--driver-cores 2 \
--total-executor-cores 2 \
--executor-cores 2 \
--executor-memory 8g \
/ppml/trusted-big-data-ml/work/bigdl-2.1.0-SNAPSHOT/examples/dllib/lenet/lenet.py \
/ppml/trusted-big-data-ml/work/bigdl-2.1.0/examples/dllib/lenet/lenet.py \
--dataPath /ppml/trusted-big-data-ml/work/data/mnist \
--maxEpoch 2" 2>&1 | tee test-bigdl-lenet-sgx.log && \
cat test-bigdl-lenet-sgx.log | egrep -a "Accuracy"
Expand Down
Original file line number Diff line number Diff line change
@@ -1,15 +1,15 @@
#!/bin/bash
SGX=1 ./pal_loader bash -c "/opt/jdk8/bin/java -cp \
'/ppml/trusted-big-data-ml/work/bigdl-2.1.0-SNAPSHOT/jars/*:/ppml/trusted-big-data-ml/work/spark-3.1.2/conf/:/ppml/trusted-big-data-ml/work/spark-3.1.2/jars/*' \
'/ppml/trusted-big-data-ml/work/bigdl-2.1.0/jars/*:/ppml/trusted-big-data-ml/work/spark-3.1.2/conf/:/ppml/trusted-big-data-ml/work/spark-3.1.2/jars/*' \
-Xmx2g \
org.apache.spark.deploy.SparkSubmit \
--master 'local[4]' \
--conf spark.driver.memory=2g \
--conf spark.executor.extraClassPath=/ppml/trusted-big-data-ml/work/bigdl-2.1.0-SNAPSHOT/jars/* \
--conf spark.driver.extraClassPath=/ppml/trusted-big-data-ml/work/bigdl-2.1.0-SNAPSHOT/jars/* \
--properties-file /ppml/trusted-big-data-ml/work/bigdl-2.1.0-SNAPSHOT/conf/spark-analytics-zoo.conf \
--jars /ppml/trusted-big-data-ml/work/bigdl-2.1.0-SNAPSHOT/jars/* \
--py-files /ppml/trusted-big-data-ml/work/bigdl-2.1.0-SNAPSHOT/python/bigdl-orca-spark_3.1.2-2.1.0-SNAPSHOT-python-api.zip \
--conf spark.executor.extraClassPath=/ppml/trusted-big-data-ml/work/bigdl-2.1.0/jars/* \
--conf spark.driver.extraClassPath=/ppml/trusted-big-data-ml/work/bigdl-2.1.0/jars/* \
--properties-file /ppml/trusted-big-data-ml/work/bigdl-2.1.0/conf/spark-analytics-zoo.conf \
--jars /ppml/trusted-big-data-ml/work/bigdl-2.1.0/jars/* \
--py-files /ppml/trusted-big-data-ml/work/bigdl-2.1.0/python/bigdl-orca-spark_3.1.2-2.1.0-python-api.zip \
--executor-memory 2g \
/ppml/trusted-big-data-ml/work/examples/pyzoo/orca/data/spark_pandas.py \
-f path_of_nyc_taxi_csv" | tee test-orca-data-sgx.log
Original file line number Diff line number Diff line change
@@ -1,15 +1,15 @@
#!/bin/bash
SGX=1 ./pal_loader bash -c "/opt/jdk8/bin/java -cp \
'/ppml/trusted-big-data-ml/work/bigdl-2.1.0-SNAPSHOT/jars/*:/ppml/trusted-big-data-ml/work/spark-3.1.2/conf/:/ppml/trusted-big-data-ml/work/spark-3.1.2/jars/*' \
'/ppml/trusted-big-data-ml/work/bigdl-2.1.0/jars/*:/ppml/trusted-big-data-ml/work/spark-3.1.2/conf/:/ppml/trusted-big-data-ml/work/spark-3.1.2/jars/*' \
-Xmx3g \
org.apache.spark.deploy.SparkSubmit \
--master 'local[4]' \
--conf spark.driver.memory=3g \
--conf spark.executor.extraClassPath=/ppml/trusted-big-data-ml/work/bigdl-2.1.0-SNAPSHOT/jars/* \
--conf spark.driver.extraClassPath=/ppml/trusted-big-data-ml/work/bigdl-2.1.0-SNAPSHOT/jars/* \
--properties-file /ppml/trusted-big-data-ml/work/bigdl-2.1.0-SNAPSHOT/conf/spark-analytics-zoo.conf \
--jars /ppml/trusted-big-data-ml/work/bigdl-2.1.0-SNAPSHOT/jars/* \
--py-files /ppml/trusted-big-data-ml/work/bigdl-2.1.0-SNAPSHOT/python/bigdl-orca-spark_3.1.2-2.1.0-SNAPSHOT-python-api.zip \
--conf spark.executor.extraClassPath=/ppml/trusted-big-data-ml/work/bigdl-2.1.0/jars/* \
--conf spark.driver.extraClassPath=/ppml/trusted-big-data-ml/work/bigdl-2.1.0/jars/* \
--properties-file /ppml/trusted-big-data-ml/work/bigdl-2.1.0/conf/spark-analytics-zoo.conf \
--jars /ppml/trusted-big-data-ml/work/bigdl-2.1.0/jars/* \
--py-files /ppml/trusted-big-data-ml/work/bigdl-2.1.0/python/bigdl-orca-spark_3.1.2-2.1.0-python-api.zip \
--executor-memory 3g \
--executor-cores 2 \
--driver-cores 2 \
Expand Down
Original file line number Diff line number Diff line change
@@ -1,15 +1,15 @@
#!/bin/bash
SGX=1 ./pal_loader bash -c "/opt/jdk8/bin/java -cp \
'/ppml/trusted-big-data-ml/work/bigdl-2.1.0-SNAPSHOT/jars/*:/ppml/trusted-big-data-ml/work/spark-3.1.2/conf/:/ppml/trusted-big-data-ml/work/spark-3.1.2/jars/*' \
'/ppml/trusted-big-data-ml/work/bigdl-2.1.0/jars/*:/ppml/trusted-big-data-ml/work/spark-3.1.2/conf/:/ppml/trusted-big-data-ml/work/spark-3.1.2/jars/*' \
-Xmx2g \
org.apache.spark.deploy.SparkSubmit \
--master 'local[4]' \
--conf spark.driver.memory=2g \
--conf spark.executor.extraClassPath=/ppml/trusted-big-data-ml/work/bigdl-2.1.0-SNAPSHOT/jars/* \
--conf spark.driver.extraClassPath=/ppml/trusted-big-data-ml/work/bigdl-2.1.0-SNAPSHOT/jars/* \
--properties-file /ppml/trusted-big-data-ml/work/bigdl-2.1.0-SNAPSHOT/conf/spark-analytics-zoo.conf \
--jars /ppml/trusted-big-data-ml/work/bigdl-2.1.0-SNAPSHOT/jars/* \
--py-files /ppml/trusted-big-data-ml/work/bigdl-2.1.0-SNAPSHOT/python/bigdl-orca-spark_3.1.2-2.1.0-SNAPSHOT-python-api.zip \
--conf spark.executor.extraClassPath=/ppml/trusted-big-data-ml/work/bigdl-2.1.0/jars/* \
--conf spark.driver.extraClassPath=/ppml/trusted-big-data-ml/work/bigdl-2.1.0/jars/* \
--properties-file /ppml/trusted-big-data-ml/work/bigdl-2.1.0/conf/spark-analytics-zoo.conf \
--jars /ppml/trusted-big-data-ml/work/bigdl-2.1.0/jars/* \
--py-files /ppml/trusted-big-data-ml/work/bigdl-2.1.0/python/bigdl-orca-spark_3.1.2-2.1.0-python-api.zip \
--executor-memory 2g \
/ppml/trusted-big-data-ml/work/examples/pyzoo/xgboost/xgboost_classifier.py \
-f path_of_pima_indians_diabetes_data_csv" | tee test-xgboost-classifier-sgx.log
Original file line number Diff line number Diff line change
@@ -1,15 +1,15 @@
#!/bin/bash
SGX=1 ./pal_loader bash -c "/opt/jdk8/bin/java -cp \
'/ppml/trusted-big-data-ml/work/bigdl-2.1.0-SNAPSHOT/jars/*:/ppml/trusted-big-data-ml/work/spark-3.1.2/conf/:/ppml/trusted-big-data-ml/work/spark-3.1.2/jars/*' \
'/ppml/trusted-big-data-ml/work/bigdl-2.1.0/jars/*:/ppml/trusted-big-data-ml/work/spark-3.1.2/conf/:/ppml/trusted-big-data-ml/work/spark-3.1.2/jars/*' \
-Xmx2g \
org.apache.spark.deploy.SparkSubmit \
--master 'local[4]' \
--conf spark.driver.memory=2g \
--conf spark.executor.extraClassPath=/ppml/trusted-big-data-ml/work/bigdl-2.1.0-SNAPSHOT/jars/* \
--conf spark.driver.extraClassPath=/ppml/trusted-big-data-ml/work/bigdl-2.1.0-SNAPSHOT/jars/* \
--properties-file /ppml/trusted-big-data-ml/work/bigdl-2.1.0-SNAPSHOT/conf/spark-analytics-zoo.conf \
--jars /ppml/trusted-big-data-ml/work/bigdl-2.1.0-SNAPSHOT/jars/* \
--py-files /ppml/trusted-big-data-ml/work/bigdl-2.1.0-SNAPSHOT/python/bigdl-orca-spark_3.1.2-2.1.0-SNAPSHOT-python-api.zip \
--conf spark.executor.extraClassPath=/ppml/trusted-big-data-ml/work/bigdl-2.1.0/jars/* \
--conf spark.driver.extraClassPath=/ppml/trusted-big-data-ml/work/bigdl-2.1.0/jars/* \
--properties-file /ppml/trusted-big-data-ml/work/bigdl-2.1.0/conf/spark-analytics-zoo.conf \
--jars /ppml/trusted-big-data-ml/work/bigdl-2.1.0/jars/* \
--py-files /ppml/trusted-big-data-ml/work/bigdl-2.1.0/python/bigdl-orca-spark_3.1.2-2.1.0-python-api.zip \
--executor-memory 2g \
/ppml/trusted-big-data-ml/work/examples/pyzoo/xgboost/xgboost_example.py \
--file-path /ppml/trusted-big-data-ml/work/data/Boston_Housing.csv" | tee test-zoo-xgboost-regressor-sgx.log
Loading