Skip to content

Commit

Permalink
Merge pull request apache#324 from palantir/use-upstream-kubernetes
Browse files Browse the repository at this point in the history
Rebase to upstream's version of Kubernetes support.
  • Loading branch information
mccheah authored Mar 29, 2018
2 parents 4e7f4f0 + 1d60e38 commit 95cf5f7
Show file tree
Hide file tree
Showing 153 changed files with 2,616 additions and 12,775 deletions.
9 changes: 7 additions & 2 deletions bin/docker-image-tool.sh
Original file line number Diff line number Diff line change
Expand Up @@ -63,9 +63,11 @@ function build {
error "Cannot find docker image. This script must be run from a runnable distribution of Apache Spark."
fi

local DOCKERFILE=${DOCKERFILE:-"$IMG_PATH/spark/Dockerfile"}

docker build "${BUILD_ARGS[@]}" \
-t $(image_ref spark) \
-f "$IMG_PATH/spark/Dockerfile" .
-f "$DOCKERFILE" .
}

function push {
Expand All @@ -83,6 +85,7 @@ Commands:
push Push a pre-built image to a registry. Requires a repository address to be provided.
Options:
-f file Dockerfile to build. By default builds the Dockerfile shipped with Spark.
-r repo Repository address.
-t tag Tag to apply to the built image, or to identify the image to be pushed.
-m Use minikube's Docker daemon.
Expand Down Expand Up @@ -112,10 +115,12 @@ fi

REPO=
TAG=
while getopts mr:t: option
DOCKERFILE=
while getopts f:mr:t: option
do
case "${option}"
in
f) DOCKERFILE=${OPTARG};;
r) REPO=${OPTARG};;
t) TAG=${OPTARG};;
m)
Expand Down
39 changes: 12 additions & 27 deletions core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
Original file line number Diff line number Diff line change
Expand Up @@ -18,13 +18,13 @@
package org.apache.spark.deploy

import java.io._
import java.lang.reflect.{InvocationTargetException, UndeclaredThrowableException}
import java.lang.reflect.{InvocationTargetException, Modifier, UndeclaredThrowableException}
import java.net.URL
import java.security.PrivilegedExceptionAction
import java.text.ParseException

import scala.annotation.tailrec
import scala.collection.mutable.ArrayBuffer
import scala.collection.mutable.{ArrayBuffer, HashMap, Map}
import scala.util.{Properties, Try}

import org.apache.commons.lang3.StringUtils
Expand Down Expand Up @@ -99,7 +99,7 @@ object SparkSubmit extends CommandLineUtils with Logging {
private[deploy] val REST_CLUSTER_SUBMIT_CLASS = classOf[RestSubmissionClientApp].getName()
private[deploy] val STANDALONE_CLUSTER_SUBMIT_CLASS = classOf[ClientApp].getName()
private[deploy] val KUBERNETES_CLUSTER_SUBMIT_CLASS =
"org.apache.spark.deploy.k8s.submit.Client"
"org.apache.spark.deploy.k8s.submit.KubernetesClientApplication"

// scalastyle:off println
private[spark] def printVersionAndExit(): Unit = {
Expand Down Expand Up @@ -310,10 +310,6 @@ object SparkSubmit extends CommandLineUtils with Logging {

// Fail fast, the following modes are not supported or applicable
(clusterManager, deployMode) match {
case (KUBERNETES, CLIENT) =>
printErrorAndExit("Client mode is currently not supported for Kubernetes.")
case (KUBERNETES, CLUSTER) if args.isR =>
printErrorAndExit("Kubernetes does not currently support R applications.")
case (STANDALONE, CLUSTER) if args.isPython =>
printErrorAndExit("Cluster deploy mode is currently not supported for python " +
"applications on standalone clusters.")
Expand Down Expand Up @@ -343,8 +339,8 @@ object SparkSubmit extends CommandLineUtils with Logging {
}
val isYarnCluster = clusterManager == YARN && deployMode == CLUSTER
val isMesosCluster = clusterManager == MESOS && deployMode == CLUSTER
val isKubernetesCluster = clusterManager == KUBERNETES && deployMode == CLUSTER
val isStandAloneCluster = clusterManager == STANDALONE && deployMode == CLUSTER
val isKubernetesCluster = clusterManager == KUBERNETES && deployMode == CLUSTER

if (!isMesosCluster && !isStandAloneCluster) {
// Resolve maven dependencies if there are any and add classpath to jars. Add them to py-files
Expand Down Expand Up @@ -579,9 +575,6 @@ object SparkSubmit extends CommandLineUtils with Logging {
OptionAssigner(args.principal, YARN, ALL_DEPLOY_MODES, confKey = "spark.yarn.principal"),
OptionAssigner(args.keytab, YARN, ALL_DEPLOY_MODES, confKey = "spark.yarn.keytab"),

OptionAssigner(args.kubernetesNamespace, KUBERNETES, ALL_DEPLOY_MODES,
confKey = "spark.kubernetes.namespace"),

// Other options
OptionAssigner(args.executorCores, STANDALONE | YARN | KUBERNETES, ALL_DEPLOY_MODES,
confKey = "spark.executor.cores"),
Expand Down Expand Up @@ -649,9 +642,8 @@ object SparkSubmit extends CommandLineUtils with Logging {

// Add the application jar automatically so the user doesn't have to call sc.addJar
// For YARN cluster mode, the jar is already distributed on each node as "app.jar"
// In Kubernetes cluster mode, the jar will be uploaded by the client separately.
// For python and R files, the primary resource is already distributed as a regular file
if (!isYarnCluster && !isKubernetesCluster && !args.isPython && !args.isR) {
if (!isYarnCluster && !args.isPython && !args.isR) {
var jars = sparkConf.getOption("spark.jars").map(x => x.split(",").toSeq).getOrElse(Seq.empty)
if (isUserJar(args.primaryResource)) {
jars = jars ++ Seq(args.primaryResource)
Expand Down Expand Up @@ -733,21 +725,14 @@ object SparkSubmit extends CommandLineUtils with Logging {

if (isKubernetesCluster) {
childMainClass = KUBERNETES_CLUSTER_SUBMIT_CLASS
if (args.isPython) {
childArgs ++= Array("--primary-py-file", args.primaryResource)
childArgs ++= Array("--main-class", "org.apache.spark.deploy.PythonRunner")
if (args.pyFiles != null) {
childArgs ++= Array("--other-py-files", args.pyFiles)
}
} else {
if (args.primaryResource != SparkLauncher.NO_RESOURCE) {
childArgs ++= Array("--primary-java-resource", args.primaryResource)
}
childArgs ++= Array("--main-class", args.mainClass)
if (args.primaryResource != SparkLauncher.NO_RESOURCE) {
childArgs ++= Array("--primary-java-resource", args.primaryResource)
}
args.childArgs.foreach { arg =>
childArgs += "--arg"
childArgs += arg
childArgs ++= Array("--main-class", args.mainClass)
if (args.childArgs != null) {
args.childArgs.foreach { arg =>
childArgs += ("--arg", arg)
}
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -74,9 +74,6 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
var principal: String = null
var keytab: String = null

// Kubernetes only
var kubernetesNamespace: String = null

// Standalone cluster mode only
var supervise: Boolean = false
var driverCores: String = null
Expand Down Expand Up @@ -201,9 +198,6 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
queue = Option(queue).orElse(sparkProperties.get("spark.yarn.queue")).orNull
keytab = Option(keytab).orElse(sparkProperties.get("spark.yarn.keytab")).orNull
principal = Option(principal).orElse(sparkProperties.get("spark.yarn.principal")).orNull
kubernetesNamespace = Option(kubernetesNamespace)
.orElse(sparkProperties.get("spark.kubernetes.namespace"))
.orNull

// Try to set main class from JAR if no --class argument is given
if (mainClass == null && !isPython && !isR && primaryResource != null) {
Expand Down Expand Up @@ -460,9 +454,6 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
case KEYTAB =>
keytab = value

case KUBERNETES_NAMESPACE =>
kubernetesNamespace = value

case HELP =>
printUsageAndExit(0)

Expand Down
8 changes: 0 additions & 8 deletions dev/deps/spark-deps-hadoop-palantir
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@ aws-java-sdk-core-1.11.45.jar
aws-java-sdk-kms-1.11.45.jar
aws-java-sdk-s3-1.11.45.jar
base64-2.3.8.jar
bcpkix-jdk15on-1.58.jar
bcprov-jdk15on-1.58.jar
breeze-macros_2.11-0.13.2.jar
breeze_2.11-0.13.2.jar
Expand All @@ -46,8 +45,6 @@ commons-math3-3.4.1.jar
commons-net-2.2.jar
commons-pool-1.6.jar
compress-lzf-1.0.3.jar
converter-jackson-2.3.0.jar
converter-scalars-2.3.0.jar
core-1.1.2.jar
curator-client-2.7.1.jar
curator-framework-2.7.1.jar
Expand Down Expand Up @@ -102,8 +99,6 @@ jackson-datatype-guava-2.6.7.jar
jackson-datatype-jdk7-2.6.7.jar
jackson-datatype-joda-2.6.7.jar
jackson-jaxrs-1.9.13.jar
jackson-jaxrs-base-2.6.7.jar
jackson-jaxrs-json-provider-2.6.7.jar
jackson-mapper-asl-1.9.13.jar
jackson-module-afterburner-2.6.7.jar
jackson-module-jaxb-annotations-2.6.7.jar
Expand All @@ -129,7 +124,6 @@ jersey-container-servlet-2.25.1.jar
jersey-container-servlet-core-2.25.1.jar
jersey-guava-2.25.1.jar
jersey-media-jaxb-2.25.1.jar
jersey-media-multipart-2.25.1.jar
jersey-server-2.25.1.jar
jets3t-0.9.4.jar
jetty-6.1.26.jar
Expand Down Expand Up @@ -160,7 +154,6 @@ metrics-graphite-3.2.5.jar
metrics-influxdb-1.1.8.jar
metrics-json-3.2.5.jar
metrics-jvm-3.2.5.jar
mimepull-1.9.6.jar
minlog-1.3.0.jar
netty-3.10.6.Final.jar
netty-all-4.1.17.Final.jar
Expand All @@ -184,7 +177,6 @@ parquet-jackson-1.9.1-palantir3.jar
protobuf-java-2.5.0.jar
py4j-0.10.6.jar
pyrolite-4.13.jar
retrofit-2.3.0.jar
scala-compiler-2.11.8.jar
scala-library-2.11.8.jar
scala-parser-combinators_2.11-1.0.4.jar
Expand Down
Loading

0 comments on commit 95cf5f7

Please sign in to comment.