Skip to content

Commit

Permalink
Temporary commit to support running tests locally, should be part of a…
Browse files Browse the repository at this point in the history
  • Loading branch information
holdenk committed Mar 7, 2019
1 parent e770092 commit 0cad83a
Show file tree
Hide file tree
Showing 2 changed files with 31 additions and 14 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -59,50 +59,59 @@ while (( "$#" )); do
shift
done

if [[ $SPARK_TGZ == "N/A" ]];
rm -rf "$UNPACKED_SPARK_TGZ"
if [[ $SPARK_TGZ == "N/A" && $IMAGE_TAG == "N/A" ]];
then
echo "Must specify a Spark tarball to build Docker images against with --spark-tgz." && exit 1;
# If there is no spark image tag to test with and no src dir, build from current
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
SPARK_INPUT_DIR="$(cd "$SCRIPT_DIR/"../../../../ >/dev/null 2>&1 && pwd )"
DOCKER_FILE_BASE_PATH="$SPARK_INPUT_DIR/resource-managers/kubernetes/docker/src/main/dockerfiles/spark"
elif [[ $IMAGE_TAG == "N/A" ]];
then
# If there is a test src tarball and no image tag we will want to build from that
mkdir -p $UNPACKED_SPARK_TGZ
tar -xzvf $SPARK_TGZ --strip-components=1 -C $UNPACKED_SPARK_TGZ;
SPARK_INPUT_DIR="$UNPACKED_SPARK_TGZ"
DOCKER_FILE_BASE_PATH="$SPARK_INPUT_DIR/kubernetes/dockerfiles/spark"
fi

rm -rf $UNPACKED_SPARK_TGZ
mkdir -p $UNPACKED_SPARK_TGZ
tar -xzvf $SPARK_TGZ --strip-components=1 -C $UNPACKED_SPARK_TGZ;

# If there is a specific Spark image skip building and extraction/copy
if [[ $IMAGE_TAG == "N/A" ]];
then
IMAGE_TAG=$(uuidgen);
cd $UNPACKED_SPARK_TGZ
cd $SPARK_INPUT_DIR

# Build PySpark image
LANGUAGE_BINDING_BUILD_ARGS="-p $UNPACKED_SPARK_TGZ/kubernetes/dockerfiles/spark/bindings/python/Dockerfile"
LANGUAGE_BINDING_BUILD_ARGS="-p $DOCKER_FILE_BASE_PATH/bindings/python/Dockerfile"

# Build SparkR image
LANGUAGE_BINDING_BUILD_ARGS="$LANGUAGE_BINDING_BUILD_ARGS -R $UNPACKED_SPARK_TGZ/kubernetes/dockerfiles/spark/bindings/R/Dockerfile"
LANGUAGE_BINDING_BUILD_ARGS="$LANGUAGE_BINDING_BUILD_ARGS -R $DOCKER_FILE_BASE_PATH/bindings/R/Dockerfile"

case $DEPLOY_MODE in
cloud)
# Build images
$UNPACKED_SPARK_TGZ/bin/docker-image-tool.sh -r $IMAGE_REPO -t $IMAGE_TAG $LANGUAGE_BINDING_BUILD_ARGS build
$SPARK_INPUT_DIR/bin/docker-image-tool.sh -r $IMAGE_REPO -t $IMAGE_TAG $LANGUAGE_BINDING_BUILD_ARGS build

# Push images appropriately
if [[ $IMAGE_REPO == gcr.io* ]] ;
then
gcloud docker -- push $IMAGE_REPO/spark:$IMAGE_TAG
else
$UNPACKED_SPARK_TGZ/bin/docker-image-tool.sh -r $IMAGE_REPO -t $IMAGE_TAG push
$SPARK_INPUT_DIR/bin/docker-image-tool.sh -r $IMAGE_REPO -t $IMAGE_TAG push
fi
;;

docker-for-desktop)
# Only need to build as this will place it in our local Docker repo which is all
# we need for Docker for Desktop to work so no need to also push
$UNPACKED_SPARK_TGZ/bin/docker-image-tool.sh -r $IMAGE_REPO -t $IMAGE_TAG $LANGUAGE_BINDING_BUILD_ARGS build
$SPARK_INPUT_DIR/bin/docker-image-tool.sh -r $IMAGE_REPO -t $IMAGE_TAG $LANGUAGE_BINDING_BUILD_ARGS build
;;

minikube)
# Only need to build and if we do this with the -m option for minikube we will
# build the images directly using the minikube Docker daemon so no need to push
$UNPACKED_SPARK_TGZ/bin/docker-image-tool.sh -m -r $IMAGE_REPO -t $IMAGE_TAG $LANGUAGE_BINDING_BUILD_ARGS build
$SPARK_INPUT_DIR/bin/docker-image-tool.sh -m -r $IMAGE_REPO -t $IMAGE_TAG $LANGUAGE_BINDING_BUILD_ARGS build
;;
*)
echo "Unrecognized deploy mode $DEPLOY_MODE" && exit 1
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -107,8 +107,16 @@ class KubernetesSuite extends SparkFunSuite
System.clearProperty(key)
}

val sparkDirProp = System.getProperty(CONFIG_KEY_UNPACK_DIR)
require(sparkDirProp != null, "Spark home directory must be provided in system properties.")
val possible_spark_dirs = List(
// If someone specified the tgz for the tests look at the extraction dir
System.getProperty(CONFIG_KEY_UNPACK_DIR),
// Try the spark test home
sys.props("spark.test.home")
)
val sparkDirProp = possible_spark_dirs.filter(x =>
new File(Paths.get(x).toFile, "bin/spark-submit").exists).headOption.getOrElse(null)
require(sparkDirProp != null,
s"Spark home directory must be provided in system properties tested $possible_spark_dirs")
sparkHomeDir = Paths.get(sparkDirProp)
require(sparkHomeDir.toFile.isDirectory,
s"No directory found for spark home specified at $sparkHomeDir.")
Expand Down

0 comments on commit 0cad83a

Please sign in to comment.