From 3d427dadcfb24335e5672b416ffe22826c93543f Mon Sep 17 00:00:00 2001 From: Le-Zheng Date: Tue, 6 Sep 2022 09:04:59 +0800 Subject: [PATCH 1/6] tdx/docker/client-image --- ppml/tdx/docker/client-image/Dockerfile | 7 +++ .../spark-submit-with-ppml-tdx-k8s.sh | 60 +++++++++++++++++++ .../spark-submit-with-ppml-tdx-local.sh | 34 +++++++++++ 3 files changed, 101 insertions(+) create mode 100644 ppml/tdx/docker/client-image/Dockerfile create mode 100644 ppml/tdx/docker/client-image/spark-submit-with-ppml-tdx-k8s.sh create mode 100644 ppml/tdx/docker/client-image/spark-submit-with-ppml-tdx-local.sh diff --git a/ppml/tdx/docker/client-image/Dockerfile b/ppml/tdx/docker/client-image/Dockerfile new file mode 100644 index 00000000000..4f3325b1afa --- /dev/null +++ b/ppml/tdx/docker/client-image/Dockerfile @@ -0,0 +1,7 @@ +FROM intelanalytics/bigdl-k8s + +ADD ./spark-submit-with-ppml-tdx-local.sh /opt/spark/work-dir/ppml-tdx/spark-submit-with-ppml-tdx-local.sh +ADD ./spark-submit-with-ppml-tdx-k8s.sh /opt/spark/work-dir/ppml-tdx/spark-submit-with-ppml-tdx-k8s.sh +ADD ./spark-executor-template.yaml /opt/spark/work-dir/ppml-tdx/spark-executor-template.yaml + +ENTRYPOINT [ "/opt/entrypoint.sh" ] diff --git a/ppml/tdx/docker/client-image/spark-submit-with-ppml-tdx-k8s.sh b/ppml/tdx/docker/client-image/spark-submit-with-ppml-tdx-k8s.sh new file mode 100644 index 00000000000..1faf5d38a43 --- /dev/null +++ b/ppml/tdx/docker/client-image/spark-submit-with-ppml-tdx-k8s.sh @@ -0,0 +1,60 @@ +#!/bin/bash + +# Check environment variables +if [ -z "$SPARK_HOME" ]; then + echo "Please set SPARK_HOME environment variable" + exit 1 +fi + +if [ -z "$RUNTIME_K8S_SERVICE_ACCOUNT" ]; then + echo "Please set BIGDL_HOME environment variable" + exit 1 +fi + +if [ -z $RUNTIME_K8S_SPARK_IMAGE ]; then + echo "Please set BIGDL_HOME environment variable" + exit 1 +fi + +default_config="--conf spark.kubernetes.authenticate.driver.serviceAccountName=$RUNTIME_K8S_SERVICE_ACCOUNT \ + --conf spark.kubernetes.container.image=$RUNTIME_K8S_SPARK_IMAGE \ + --conf spark.kubernetes.executor.deleteOnTermination=false \ + --conf spark.network.timeout=10000000 \ + --conf spark.executor.heartbeatInterval=10000000 \ + --conf spark.python.use.daemon=false \ + --conf spark.python.worker.reuse=false" + +if [ $secure_password ]; then + SSL="--conf spark.authenticate=true \ + --conf spark.authenticate.secret=$secure_password \ + --conf spark.kubernetes.executor.secretKeyRef.SPARK_AUTHENTICATE_SECRET="spark-secret:secret" \ + --conf spark.kubernetes.driver.secretKeyRef.SPARK_AUTHENTICATE_SECRET="spark-secret:secret" \ + --conf spark.authenticate.enableSaslEncryption=true \ + --conf spark.network.crypto.enabled=true \ + --conf spark.network.crypto.keyLength=128 \ + --conf spark.network.crypto.keyFactoryAlgorithm=PBKDF2WithHmacSHA1 \ + --conf spark.io.encryption.enabled=true \ + --conf spark.io.encryption.keySizeBits=128 \ + --conf spark.io.encryption.keygen.algorithm=HmacSHA1 \ + --conf spark.ssl.enabled=true \ + --conf spark.ssl.port=8043 \ + --conf spark.ssl.keyPassword=$secure_password \ + --conf spark.ssl.keyStore=/opt/spark/work-dir/keys/keystore.jks \ + --conf spark.ssl.keyStorePassword=$secure_password \ + --conf spark.ssl.keyStoreType=JKS \ + --conf spark.ssl.trustStore=/opt/spark/work-dir/keys/keystore.jks \ + --conf spark.ssl.trustStorePassword=$secure_password \ + --conf spark.ssl.trustStoreType=JKS" +else + SSL="" +fi + +set -x + +spark_submit_command="${SPARK_HOME}/bin/spark-submit \ + $default_config \ + $SSL \ + $*" + +echo "spark_submit_command $spark_submit_command" +bash -c "$spark_submit_command" diff --git a/ppml/tdx/docker/client-image/spark-submit-with-ppml-tdx-local.sh b/ppml/tdx/docker/client-image/spark-submit-with-ppml-tdx-local.sh new file mode 100644 index 00000000000..ce51bdec356 --- /dev/null +++ b/ppml/tdx/docker/client-image/spark-submit-with-ppml-tdx-local.sh @@ -0,0 +1,34 @@ +#!/bin/bash + +# Check environment variables +if [ -z "${BIGDL_HOME}" ]; then + echo "Please set BIGDL_HOME environment variable" + exit 1 +fi + +if [ -z "${SPARK_HOME}" ]; then + echo "Please set SPARK_HOME environment variable" + exit 1 +fi + +#setup paths +export BIGDL_JAR_NAME=${BIGDL_HOME}/jars/* +export BIGDL_CONF=${BIGDL_HOME}/conf/spark-bigdl.conf + +# Check files +if [ ! -f ${BIGDL_CONF} ]; then + echo "Cannot find ${BIGDL_CONF}" + exit 1 +fi + +if [ ! -f $BIGDL_JAR ]; then + echo "Cannot find $BIGDL_JAR" + exit 1 +fi + + +${SPARK_HOME}/bin/spark-submit \ + --properties-file ${BIGDL_CONF} \ + --conf spark.driver.extraClassPath=${BIGDL_JAR} \ + --conf spark.executor.extraClassPath=${BIGDL_JAR} \ + $* From b0c872d78b065d9c2979c05b8e2591793387a1bb Mon Sep 17 00:00:00 2001 From: Le-Zheng Date: Wed, 14 Sep 2022 08:13:16 +0800 Subject: [PATCH 2/6] add README --- ppml/tdx/docker/client-image/README.md | 8 ++++++++ 1 file changed, 8 insertions(+) create mode 100644 ppml/tdx/docker/client-image/README.md diff --git a/ppml/tdx/docker/client-image/README.md b/ppml/tdx/docker/client-image/README.md new file mode 100644 index 00000000000..7f52f59dc15 --- /dev/null +++ b/ppml/tdx/docker/client-image/README.md @@ -0,0 +1,8 @@ +## Build bigdl-tdx image +```bash +docker build \ + --build-arg http_proxy=.. \ + --build-arg https_proxy=.. \ + --build-arg no_proxy=.. \ + --rm --no-cache -t bigdl-tdx-client-spark-3.1.2:2.1.0-SNAPSHOT . +``` From b6c5500db04789ad7f339f6b5fc912706c6ed478 Mon Sep 17 00:00:00 2001 From: Le-Zheng Date: Wed, 14 Sep 2022 09:06:23 +0800 Subject: [PATCH 3/6] update --- ppml/tdx/docker/client-image/Dockerfile | 3 +- .../tdx/docker/client-image/pod-template.yaml | 34 +++++++ ppml/tdx/docker/client-image/tdx-client.yaml | 96 +++++++++++++++++++ 3 files changed, 132 insertions(+), 1 deletion(-) create mode 100644 ppml/tdx/docker/client-image/pod-template.yaml create mode 100644 ppml/tdx/docker/client-image/tdx-client.yaml diff --git a/ppml/tdx/docker/client-image/Dockerfile b/ppml/tdx/docker/client-image/Dockerfile index 4f3325b1afa..5c694924c29 100644 --- a/ppml/tdx/docker/client-image/Dockerfile +++ b/ppml/tdx/docker/client-image/Dockerfile @@ -2,6 +2,7 @@ FROM intelanalytics/bigdl-k8s ADD ./spark-submit-with-ppml-tdx-local.sh /opt/spark/work-dir/ppml-tdx/spark-submit-with-ppml-tdx-local.sh ADD ./spark-submit-with-ppml-tdx-k8s.sh /opt/spark/work-dir/ppml-tdx/spark-submit-with-ppml-tdx-k8s.sh -ADD ./spark-executor-template.yaml /opt/spark/work-dir/ppml-tdx/spark-executor-template.yaml +ADD ./pod-template.yaml /opt/spark/work-dir/ppml-tdx/pod-template.yaml +ADD ./tdx-client.yaml /opt/spark/work-dir/ppml-tdx/tdx-client.yaml ENTRYPOINT [ "/opt/entrypoint.sh" ] diff --git a/ppml/tdx/docker/client-image/pod-template.yaml b/ppml/tdx/docker/client-image/pod-template.yaml new file mode 100644 index 00000000000..da7b5b86a66 --- /dev/null +++ b/ppml/tdx/docker/client-image/pod-template.yaml @@ -0,0 +1,34 @@ +# example pod template for driver/executor +apiVersion: v1 +kind: Pod +spec: + # runtimeClassName: default is kata-cc + runtimeClassName: kata-cc + containers: + - name: spark-executor + securityContext: + privileged: true + volumeMounts: + - name: data + mountPath: /ppml/trusted-big-data-ml/work/data + - name: kubeconf + mountPath: /root/.kube/config + - name: tpch-1g + mountPath: /TPCH-1G + volumeDevices: + - devicePath: "/dev/sdd" + name: datapath + command: ["/opt/entrypoint.sh"] + volumes: + - name: data + hostPath: + path: /home/data + - name: tpch-1g + hostPath: + path: /home/1G + - name: kubeconf + hostPath: + path: /root/.kube/config + - name: datapath + persistentVolumeClaim: + claimName: busybox-lvm-block-pvc-pre-2 diff --git a/ppml/tdx/docker/client-image/tdx-client.yaml b/ppml/tdx/docker/client-image/tdx-client.yaml new file mode 100644 index 00000000000..dbd1f9e1884 --- /dev/null +++ b/ppml/tdx/docker/client-image/tdx-client.yaml @@ -0,0 +1,96 @@ +apiVersion: v1 +kind: Pod + +metadata: + # name: default is bigdl-tdx-client + name: bigdl-tdx-client + +spec: + # hostNetwork: true + # runtimeClassName: default is kata-cc + runtimeClassName: kata-cc + + # default config to create container + containers: + - name: spark-local-k8s-client-kata-cc + image: intelanalytics/bigdl-tdx-client-spark-3.1.2:latest + imagePullPolicy: Always + securityContext: + privileged: true + resources: + limits: + cpu: "2" + memory: "2G" + requests: + cpu: "2" + memory: "2G" + command: + - sh + - "-c" + - | + mkdir -p /run/data && + mount /dev/sdd /run/data + sleep 10000 + + volumeMounts: + - name: kubeconfig + mountPath: /root/.kube/config + - name: sda-data + mountPath: /ppml/trusted-big-data-ml/work/data + - name: sda-tpch-1g + mountPath: /TPCH-1G + volumeDevices: + - devicePath: "/dev/sdd" + name: data + env: + - name: RUNTIME_SPARK_MASTER + value: "k8s://https://x.x.x.x:6443" + - name: RUNTIME_K8S_SERVICE_ACCOUNT + value: "spark" + - name: RUNTIME_K8S_SPARK_IMAGE + value: "intelanalytics/bigdl-tdx-client-spark-3.1.2:latest" + - name: RUNTIME_DRIVER_HOST + value: "x.x.x.x" + - name: RUNTIME_DRIVER_PORT + value: "54321" + - name: RUNTIME_EXECUTOR_INSTANCES + value: "1" + - name: RUNTIME_EXECUTOR_CORES + value: "16" + - name: RUNTIME_EXECUTOR_MEMORY + value: "32g" + - name: RUNTIME_TOTAL_EXECUTOR_CORES + value: "16" + - name: RUNTIME_DRIVER_CORES + value: "16" + - name: RUNTIME_DRIVER_MEMORY + value: "32g" + - name: LOCAL_IP + value: "x.x.x.x" + - name: http_proxy + value: http://.. + - name: https_proxy + value: http://.. + - name: JAVA_HOME + value: /opt/jdk + - name: SPARK_HOME + value: /opt/spark + volumes: + - name: kubeconfig + hostPath: + path: /root/.kube/config + - name: nvme-data + hostPath: + path: /mnt/nvme0n1/data + - name: nvme-tpch-1g + hostPath: + path: /mnt/nvme0n1/TPCH-1G + - name: sda-tpch-1g + hostPath: + path: /home/1G + - name: sda-data + hostPath: + path: /home/data + - name: data + persistentVolumeClaim: + claimName: busybox-lvm-block-pvc-pre-1 From f0fcdcf2762ae1503e0f42926be0840d35fde210 Mon Sep 17 00:00:00 2001 From: Le-Zheng Date: Thu, 15 Sep 2022 14:43:42 +0800 Subject: [PATCH 4/6] remove virtio --- ppml/tdx/docker/client-image/Dockerfile | 3 +- ppml/tdx/docker/client-image/README.md | 2 +- .../tdx/docker/client-image/pod-template.yaml | 17 -------- .../client-image/runtimeclass_kata.yaml | 11 ++++++ .../spark-submit-with-ppml-tdx-k8s.sh | 9 +++-- ppml/tdx/docker/client-image/tdx-client.yaml | 39 +++---------------- 6 files changed, 25 insertions(+), 56 deletions(-) create mode 100644 ppml/tdx/docker/client-image/runtimeclass_kata.yaml diff --git a/ppml/tdx/docker/client-image/Dockerfile b/ppml/tdx/docker/client-image/Dockerfile index 5c694924c29..c3e777299c6 100644 --- a/ppml/tdx/docker/client-image/Dockerfile +++ b/ppml/tdx/docker/client-image/Dockerfile @@ -4,5 +4,4 @@ ADD ./spark-submit-with-ppml-tdx-local.sh /opt/spark/work-dir/ppml-tdx/spark-sub ADD ./spark-submit-with-ppml-tdx-k8s.sh /opt/spark/work-dir/ppml-tdx/spark-submit-with-ppml-tdx-k8s.sh ADD ./pod-template.yaml /opt/spark/work-dir/ppml-tdx/pod-template.yaml ADD ./tdx-client.yaml /opt/spark/work-dir/ppml-tdx/tdx-client.yaml - -ENTRYPOINT [ "/opt/entrypoint.sh" ] +ADD ./runtimeclass_kata.yaml /opt/spark/work-dir/ppml-tdx/runtimeclass_kata.yaml diff --git a/ppml/tdx/docker/client-image/README.md b/ppml/tdx/docker/client-image/README.md index 7f52f59dc15..da400eb9869 100644 --- a/ppml/tdx/docker/client-image/README.md +++ b/ppml/tdx/docker/client-image/README.md @@ -4,5 +4,5 @@ docker build \ --build-arg http_proxy=.. \ --build-arg https_proxy=.. \  --build-arg no_proxy=.. \ - --rm --no-cache -t bigdl-tdx-client-spark-3.1.2:2.1.0-SNAPSHOT . + --rm --no-cache -t intelanalytics/bigdl-tdx-client-spark-3.1.2:2.1.0-SNAPSHOT . ``` diff --git a/ppml/tdx/docker/client-image/pod-template.yaml b/ppml/tdx/docker/client-image/pod-template.yaml index da7b5b86a66..50aaf3c24d8 100644 --- a/ppml/tdx/docker/client-image/pod-template.yaml +++ b/ppml/tdx/docker/client-image/pod-template.yaml @@ -9,26 +9,9 @@ spec: securityContext: privileged: true volumeMounts: - - name: data - mountPath: /ppml/trusted-big-data-ml/work/data - name: kubeconf mountPath: /root/.kube/config - - name: tpch-1g - mountPath: /TPCH-1G - volumeDevices: - - devicePath: "/dev/sdd" - name: datapath - command: ["/opt/entrypoint.sh"] volumes: - - name: data - hostPath: - path: /home/data - - name: tpch-1g - hostPath: - path: /home/1G - name: kubeconf hostPath: path: /root/.kube/config - - name: datapath - persistentVolumeClaim: - claimName: busybox-lvm-block-pvc-pre-2 diff --git a/ppml/tdx/docker/client-image/runtimeclass_kata.yaml b/ppml/tdx/docker/client-image/runtimeclass_kata.yaml new file mode 100644 index 00000000000..865044a5668 --- /dev/null +++ b/ppml/tdx/docker/client-image/runtimeclass_kata.yaml @@ -0,0 +1,11 @@ +# Copyright (c) 2020 Red Hat, Inc. +# +# SPDX-License-Identifier: Apache-2.0 +# +# Define the "kata" runtime class +--- +kind: RuntimeClass +apiVersion: node.k8s.io/v1 +metadata: + name: kata-cc +handler: kata diff --git a/ppml/tdx/docker/client-image/spark-submit-with-ppml-tdx-k8s.sh b/ppml/tdx/docker/client-image/spark-submit-with-ppml-tdx-k8s.sh index 1faf5d38a43..7ca2f942e24 100644 --- a/ppml/tdx/docker/client-image/spark-submit-with-ppml-tdx-k8s.sh +++ b/ppml/tdx/docker/client-image/spark-submit-with-ppml-tdx-k8s.sh @@ -19,12 +19,15 @@ fi default_config="--conf spark.kubernetes.authenticate.driver.serviceAccountName=$RUNTIME_K8S_SERVICE_ACCOUNT \ --conf spark.kubernetes.container.image=$RUNTIME_K8S_SPARK_IMAGE \ --conf spark.kubernetes.executor.deleteOnTermination=false \ - --conf spark.network.timeout=10000000 \ --conf spark.executor.heartbeatInterval=10000000 \ --conf spark.python.use.daemon=false \ --conf spark.python.worker.reuse=false" if [ $secure_password ]; then + if [ -z $KEYS_PATH ]; then + echo "Please set BIGDL_HOME environment variable" + exit 1 + fi SSL="--conf spark.authenticate=true \ --conf spark.authenticate.secret=$secure_password \ --conf spark.kubernetes.executor.secretKeyRef.SPARK_AUTHENTICATE_SECRET="spark-secret:secret" \ @@ -39,10 +42,10 @@ if [ $secure_password ]; then --conf spark.ssl.enabled=true \ --conf spark.ssl.port=8043 \ --conf spark.ssl.keyPassword=$secure_password \ - --conf spark.ssl.keyStore=/opt/spark/work-dir/keys/keystore.jks \ + --conf spark.ssl.keyStore=$KEYS_PATH/keystore.jks \ --conf spark.ssl.keyStorePassword=$secure_password \ --conf spark.ssl.keyStoreType=JKS \ - --conf spark.ssl.trustStore=/opt/spark/work-dir/keys/keystore.jks \ + --conf spark.ssl.trustStore=$KEYS_PATH/keystore.jks \ --conf spark.ssl.trustStorePassword=$secure_password \ --conf spark.ssl.trustStoreType=JKS" else diff --git a/ppml/tdx/docker/client-image/tdx-client.yaml b/ppml/tdx/docker/client-image/tdx-client.yaml index dbd1f9e1884..5834bb399d4 100644 --- a/ppml/tdx/docker/client-image/tdx-client.yaml +++ b/ppml/tdx/docker/client-image/tdx-client.yaml @@ -1,19 +1,12 @@ apiVersion: v1 kind: Pod - metadata: - # name: default is bigdl-tdx-client name: bigdl-tdx-client - spec: - # hostNetwork: true - # runtimeClassName: default is kata-cc runtimeClassName: kata-cc - - # default config to create container containers: - name: spark-local-k8s-client-kata-cc - image: intelanalytics/bigdl-tdx-client-spark-3.1.2:latest + image: intelanalytics/bigdl-tdx-client-spark-3.1.2:2.1.0-SNAPSHOT imagePullPolicy: Always securityContext: privileged: true @@ -28,27 +21,19 @@ spec: - sh - "-c" - | - mkdir -p /run/data && - mount /dev/sdd /run/data sleep 10000 - volumeMounts: - name: kubeconfig mountPath: /root/.kube/config - - name: sda-data - mountPath: /ppml/trusted-big-data-ml/work/data - - name: sda-tpch-1g - mountPath: /TPCH-1G - volumeDevices: - - devicePath: "/dev/sdd" - name: data + - name: nfs-pvc + mountPath: "/bigdl2.0/data" env: - name: RUNTIME_SPARK_MASTER value: "k8s://https://x.x.x.x:6443" - name: RUNTIME_K8S_SERVICE_ACCOUNT value: "spark" - name: RUNTIME_K8S_SPARK_IMAGE - value: "intelanalytics/bigdl-tdx-client-spark-3.1.2:latest" + value: "intelanalytics/bigdl-tdx-client-spark-3.1.2:2.1.0-SNAPSHOT" - name: RUNTIME_DRIVER_HOST value: "x.x.x.x" - name: RUNTIME_DRIVER_PORT @@ -79,18 +64,6 @@ spec: - name: kubeconfig hostPath: path: /root/.kube/config - - name: nvme-data - hostPath: - path: /mnt/nvme0n1/data - - name: nvme-tpch-1g - hostPath: - path: /mnt/nvme0n1/TPCH-1G - - name: sda-tpch-1g - hostPath: - path: /home/1G - - name: sda-data - hostPath: - path: /home/data - - name: data + - name: nfs-pvc persistentVolumeClaim: - claimName: busybox-lvm-block-pvc-pre-1 + claimName: nfsvolumeclaim From 4eb4e2c30aacefc2691c5ff7e534f9e35cf3dc59 Mon Sep 17 00:00:00 2001 From: Le-Zheng Date: Thu, 15 Sep 2022 16:26:32 +0800 Subject: [PATCH 5/6] update --- .../docker/client-image/spark-submit-with-ppml-tdx-k8s.sh | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/ppml/tdx/docker/client-image/spark-submit-with-ppml-tdx-k8s.sh b/ppml/tdx/docker/client-image/spark-submit-with-ppml-tdx-k8s.sh index 7ca2f942e24..b2f94773019 100644 --- a/ppml/tdx/docker/client-image/spark-submit-with-ppml-tdx-k8s.sh +++ b/ppml/tdx/docker/client-image/spark-submit-with-ppml-tdx-k8s.sh @@ -7,25 +7,24 @@ if [ -z "$SPARK_HOME" ]; then fi if [ -z "$RUNTIME_K8S_SERVICE_ACCOUNT" ]; then - echo "Please set BIGDL_HOME environment variable" + echo "Please set RUNTIME_K8S_SERVICE_ACCOUNT environment variable" exit 1 fi if [ -z $RUNTIME_K8S_SPARK_IMAGE ]; then - echo "Please set BIGDL_HOME environment variable" + echo "Please set RUNTIME_K8S_SPARK_IMAGE environment variable" exit 1 fi default_config="--conf spark.kubernetes.authenticate.driver.serviceAccountName=$RUNTIME_K8S_SERVICE_ACCOUNT \ --conf spark.kubernetes.container.image=$RUNTIME_K8S_SPARK_IMAGE \ --conf spark.kubernetes.executor.deleteOnTermination=false \ - --conf spark.executor.heartbeatInterval=10000000 \ --conf spark.python.use.daemon=false \ --conf spark.python.worker.reuse=false" if [ $secure_password ]; then if [ -z $KEYS_PATH ]; then - echo "Please set BIGDL_HOME environment variable" + echo "Please set $KEYS_PATH environment variable" exit 1 fi SSL="--conf spark.authenticate=true \ From c7372ba740f6595312056b9d05f5a089ece8af72 Mon Sep 17 00:00:00 2001 From: Le-Zheng Date: Thu, 15 Sep 2022 20:08:47 +0800 Subject: [PATCH 6/6] update --- ppml/tdx/docker/client-image/tdx-client.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ppml/tdx/docker/client-image/tdx-client.yaml b/ppml/tdx/docker/client-image/tdx-client.yaml index 5834bb399d4..ed3da88e2ff 100644 --- a/ppml/tdx/docker/client-image/tdx-client.yaml +++ b/ppml/tdx/docker/client-image/tdx-client.yaml @@ -26,7 +26,7 @@ spec: - name: kubeconfig mountPath: /root/.kube/config - name: nfs-pvc - mountPath: "/bigdl2.0/data" + mountPath: "/ppml/trusted-big-data-ml/work" env: - name: RUNTIME_SPARK_MASTER value: "k8s://https://x.x.x.x:6443"