Skip to content

Commit

Permalink
Merge pull request apache#67 from mesosphere/fix-build
Browse files Browse the repository at this point in the history
Fix build
  • Loading branch information
mgummelt authored Sep 28, 2016
2 parents 2893d8b + c2b7a5c commit 77da455
Show file tree
Hide file tree
Showing 9 changed files with 144 additions and 135 deletions.
2 changes: 1 addition & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -10,4 +10,4 @@ test:
dist:
bin/dist.sh

.PHONY: build test
.PHONY: docker universe test dist
9 changes: 7 additions & 2 deletions bin/dist.sh
Original file line number Diff line number Diff line change
Expand Up @@ -10,12 +10,17 @@
# AWS_SECRET_ACCESS_KEY
# GIT_COMMIT

set -e -x -o pipefail

DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
SPARK_DIR="${DIR}/../../spark"

source "${DIR}/jenkins.sh"

make_distribution
GIT_COMMIT_NAME="spark-${GIT_COMMIT}"
DIST_NAME=${DIST_NAME:-$GIT_COMMIT_NAME} make_distribution
upload_to_s3

SPARK_DIST_URI="http://${S3_BUCKET}.s3.amazonaws.com/${S3_PREFIX}spark-${GIT_COMMIT}.tgz"
SPARK_FILENAME=$(basename $(ls ${SPARK_DIR}/spark*.tgz))
SPARK_DIST_URI="http://${S3_BUCKET}.s3.amazonaws.com/${S3_PREFIX}/${SPARK_FILENAME}"
echo "SPARK_DIST_URI=${SPARK_DIST_URI}" > spark_dist_uri.properties
17 changes: 10 additions & 7 deletions bin/jenkins-dist-publish.sh
Original file line number Diff line number Diff line change
@@ -1,11 +1,14 @@
#!/bin/bash

export S3_BUCKET=downloads.mesosphere.io
export S3_PREFIX=spark/assets
export AWS_ACCESS_KEY_ID=${PROD_AWS_ACCESS_KEY_ID}
export AWS_SECRET_ACCESS_KEY=${PROD_AWS_SECRET_ACCESS_KEY}
# Env Vars:
# GIT_BRANCH (assumed to have prefix "refs/tags/custom-")

source spark-build/bin/jenkins.sh
set -e -x -o pipefail

rename_dist
upload_to_s3
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
SPARK_BUILD_DIR=${DIR}/..

pushd "${SPARK_BUILD_DIR}"
VERSION=${GIT_BRANCH#origin/tags/custom-}
DIST_NAME="spark-${VERSION}" make dist
popd
26 changes: 11 additions & 15 deletions bin/jenkins-dist-test.sh
Original file line number Diff line number Diff line change
Expand Up @@ -2,22 +2,18 @@

set -e -x -o pipefail

# export S3_BUCKET=infinity-artifacts
# export S3_PREFIX=spark/
# export DOCKER_IMAGE=mesosphere/spark-dev:${GIT_COMMIT}
# export AWS_ACCESS_KEY_ID=${DEV_AWS_ACCESS_KEY_ID}
# export AWS_SECRET_ACCESS_KEY=${DEV_AWS_SECRET_ACCESS_KEY}

DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
SPARK_BUILD_DIR=${DIR}../

pushd "${SPARK_BUILD_DIR}"
SPARK_BUILD_DIR=${DIR}/..

source bin/jenkins.sh

install_cli
make dist
$(cat spark_dist_uri.properties) make universe
$(cat $stub-universe.properties) make test
function run() {
source bin/jenkins.sh
install_cli
docker_login
make dist && export $(cat spark_dist_uri.properties)
make universe && export $(cat "${WORKSPACE}/stub-universe.properties")
make test
}

pushd "${SPARK_BUILD_DIR}"
run
popd
20 changes: 13 additions & 7 deletions bin/jenkins-package-publish.sh
Original file line number Diff line number Diff line change
@@ -1,12 +1,18 @@
#!/bin/bash

export VERSION=${GIT_BRANCH#refs/tags/}
export S3_BUCKET=downloads.mesosphere.io
export S3_PREFIX=spark/assets
export DEV_S3_BUCKET=infinity-artifacts
export DEV_S3_PREFIX=autodelete7d/spark
export DOCKER_IMAGE=mesosphere/spark:${VERSION}
set -e -x -o pipefail

DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
SPARK_BUILD_DIR=${DIR}/..

source spark-build/bin/jenkins.sh

spark_test
pushd "${SPARK_BUILD_DIR}"
install_cli
docker_login
make universe
cp ../stub-universe.properties ../build.properties
VERSION=${GIT_BRANCH#origin/tags/}
echo "RELEASE_VERSION=${VERSION}" >> ../build.properties
echo "RELEASE_DOCKER_IMAGE=mesosphere/spark:${VERSION}" >> ../build.properties
popd
70 changes: 44 additions & 26 deletions bin/jenkins-package-test.sh
Original file line number Diff line number Diff line change
@@ -1,28 +1,46 @@
#!/bin/bash

export VERSION=${ghprbActualCommit}
if [ -z "$VERSION" ]; then
export VERSION=${GIT_COMMIT}
fi

export DOCKER_IMAGE=mesosphere/spark-dev:${VERSION}

export S3_BUCKET=infinity-artifacts
export S3_PREFIX=autodelete7d/spark/${VERSION}
# fill in any missing DEV_* AWS envvars required by test.sh:
if [ -z "$DEV_S3_BUCKET" ]; then
export DEV_S3_BUCKET=$S3_BUCKET
fi
if [ -z "$DEV_S3_PREFIX" ]; then
export DEV_S3_PREFIX=$S3_PREFIX
fi
if [ -z "$DEV_AWS_ACCESS_KEY_ID" ]; then
export DEV_AWS_ACCESS_KEY_ID=$AWS_ACCESS_KEY_ID
fi
if [ -z "$DEV_AWS_SECRET_ACCESS_KEY" ]; then
export DEV_AWS_SECRET_ACCESS_KEY=$AWS_SECRET_ACCESS_KEY
fi

source spark-build/bin/jenkins.sh

spark_test
set -e -x -o pipefail

DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
SPARK_BUILD_DIR=${DIR}/..

function run() {
source bin/jenkins.sh
install_cli
docker_login
make universe && export $(cat "${WORKSPACE}/stub-universe.properties")
make test
}

pushd "${SPARK_BUILD_DIR}"
run
popd
# #!/bin/bash

# export VERSION=${ghprbActualCommit}
# if [ -z "$VERSION" ]; then
# export VERSION=${GIT_COMMIT}
# fi

# export DOCKER_IMAGE=mesosphere/spark-dev:${VERSION}

# export S3_BUCKET=infinity-artifacts
# export S3_PREFIX=autodelete7d/spark/${VERSION}
# # fill in any missing DEV_* AWS envvars required by test.sh:
# if [ -z "$DEV_S3_BUCKET" ]; then
# export DEV_S3_BUCKET=$S3_BUCKET
# fi
# if [ -z "$DEV_S3_PREFIX" ]; then
# export DEV_S3_PREFIX=$S3_PREFIX
# fi
# if [ -z "$DEV_AWS_ACCESS_KEY_ID" ]; then
# export DEV_AWS_ACCESS_KEY_ID=$AWS_ACCESS_KEY_ID
# fi
# if [ -z "$DEV_AWS_SECRET_ACCESS_KEY" ]; then
# export DEV_AWS_SECRET_ACCESS_KEY=$AWS_SECRET_ACCESS_KEY
# fi

# source spark-build/bin/jenkins.sh

# spark_test
93 changes: 46 additions & 47 deletions bin/jenkins.sh
Original file line number Diff line number Diff line change
Expand Up @@ -7,11 +7,9 @@ DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
SPARK_DIR="${DIR}/../../spark"
SPARK_BUILD_DIR="${DIR}/.."

export TEST_JAR_PATH=$(pwd)/mesos-spark-integration-tests-assembly-0.1.0.jar
export COMMONS_TOOLS_DIR=$(pwd)/dcos-commons/tools/
export CCM_TEMPLATE=single-master.cloudformation.json

function make_distribution {
# Env var: DIST_NAME

pushd "${SPARK_DIR}"

if [[ -n "${SPARK_DIST_URI}" ]]; then
Expand All @@ -23,19 +21,18 @@ function make_distribution {
./dev/make-distribution.sh -Pmesos -Phadoop-2.6 -DskipTests
fi

local DIST="spark-${GIT_COMMIT}"
mv dist ${DIST}
tar czf ${DIST}.tgz ${DIST}
mv dist ${DIST_NAME}
tar czf ${DIST_NAME}.tgz ${DIST_NAME}
fi

popd
}

# rename spark/spark-<SHA1>.tgz to spark/spark-<TAG>.tgz
# rename spark/spark-*.tgz to spark/spark-<TAG>.tgz
function rename_dist {
pushd "${SPARK_DIR}"

local VERSION=${GIT_BRANCH#refs/tags/private-}
local VERSION=${GIT_BRANCH#refs/tags/custom-}

# rename to spark-<tag>
tar xvf spark-*.tgz
Expand All @@ -46,35 +43,37 @@ function rename_dist {
popd
}

# uploads spark/spark-*.tgz to S3
function upload_to_s3 {
pushd "${SPARK_DIR}"

env
aws --debug s3 cp \
--acl public-read \
spark-*.tgz \
"s3://${S3_BUCKET}/${S3_PREFIX}"
"s3://${S3_BUCKET}/${S3_PREFIX}/"

popd
}

function update_manifest {
pushd "${SPARK_BUILD_DIR}"
# function update_manifest {
# pushd "${SPARK_BUILD_DIR}"

# update manifest.json with new spark dist:
SPARK_DIST=$(ls ../spark/spark*.tgz)
SPARK_URI="http://${S3_BUCKET}.s3.amazonaws.com/${S3_PREFIX}$(basename ${SPARK_DIST})"
cat manifest.json | jq ".spark_uri=\"${SPARK_URI}\"" > manifest.json.tmp
mv manifest.json.tmp manifest.json
# # update manifest.json with new spark dist:
# SPARK_DIST=$(ls ../spark/spark*.tgz)
# SPARK_URI="http://${S3_BUCKET}.s3.amazonaws.com/${S3_PREFIX}$(basename ${SPARK_DIST})"
# cat manifest.json | jq ".spark_uri=\"${SPARK_URI}\"" > manifest.json.tmp
# mv manifest.json.tmp manifest.json

popd
}
# popd
# }

function install_cli {
curl -O https://downloads.mesosphere.io/dcos-cli/install.sh
rm -rf cli/
mkdir cli
bash install.sh cli http://change.me --add-path no
source cli/bin/env-setup
rm -rf dcos-cli/
mkdir dcos-cli
bash install.sh dcos-cli http://change.me --add-path no
source dcos-cli/bin/env-setup

# hack because the installer forces an old CLI version
pip install -U dcoscli
Expand All @@ -87,27 +86,27 @@ function docker_login {
docker login [email protected] --username="${DOCKER_USERNAME}" --password="${DOCKER_PASSWORD}"
}

function spark_test {
install_cli

pushd spark-build
docker_login
# build/upload artifacts: docker + cli + stub universe:
make build
# in CI environments, ci_upload.py creates a 'stub-universe.properties' file
# grab the STUB_UNIVERSE_URL from the file for use by test.sh:
export $(cat $WORKSPACE/stub-universe.properties)
# run tests against build artifacts:
CLUSTER_NAME=spark-package-${BUILD_NUMBER} \
TEST_DIR=$(pwd)/../mesos-spark-integration-tests/ \
S3_BUCKET=${DEV_S3_BUCKET} \
S3_PREFIX=${DEV_S3_PREFIX} \
make test
popd
}

function upload_distribution {
make_distribution
upload_to_s3
update_manifest
}
# function spark_test {
# install_cli

# pushd spark-build
# docker_login
# # build/upload artifacts: docker + cli + stub universe:
# make build
# # in CI environments, ci_upload.py creates a 'stub-universe.properties' file
# # grab the STUB_UNIVERSE_URL from the file for use by test.sh:
# export $(cat $WORKSPACE/stub-universe.properties)
# # run tests against build artifacts:
# CLUSTER_NAME=spark-package-${BUILD_NUMBER} \
# TEST_DIR=$(pwd)/../mesos-spark-integration-tests/ \
# S3_BUCKET=${DEV_S3_BUCKET} \
# S3_PREFIX=${DEV_S3_PREFIX} \
# make test
# popd
# }

# function upload_distribution {
# make_distribution
# upload_to_s3
# update_manifest
# }
7 changes: 1 addition & 6 deletions bin/test.sh
Original file line number Diff line number Diff line change
Expand Up @@ -99,12 +99,7 @@ run_tests() {
fi
source env/bin/activate
pip install -r requirements.txt
AWS_ACCESS_KEY_ID=${DEV_AWS_ACCESS_KEY_ID} \
AWS_SECRET_ACCESS_KEY=${DEV_AWS_SECRET_ACCESS_KEY} \
S3_BUCKET=${DEV_S3_BUCKET} \
S3_PREFIX=${DEV_S3_PREFIX} \
TEST_JAR_PATH=${TEST_JAR_PATH} \
python test.py
python test.py
if [ $? -ne 0 ]; then
notify_github failure "Tests failed"
exit 1
Expand Down
Loading

0 comments on commit 77da455

Please sign in to comment.