forked from apache/spark
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request apache#67 from mesosphere/fix-build
Fix build
- Loading branch information
Showing
9 changed files
with
144 additions
and
135 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -10,4 +10,4 @@ test: | |
dist: | ||
bin/dist.sh | ||
|
||
.PHONY: build test | ||
.PHONY: docker universe test dist |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,11 +1,14 @@ | ||
#!/bin/bash | ||
|
||
export S3_BUCKET=downloads.mesosphere.io | ||
export S3_PREFIX=spark/assets | ||
export AWS_ACCESS_KEY_ID=${PROD_AWS_ACCESS_KEY_ID} | ||
export AWS_SECRET_ACCESS_KEY=${PROD_AWS_SECRET_ACCESS_KEY} | ||
# Env Vars: | ||
# GIT_BRANCH (assumed to have prefix "refs/tags/custom-") | ||
|
||
source spark-build/bin/jenkins.sh | ||
set -e -x -o pipefail | ||
|
||
rename_dist | ||
upload_to_s3 | ||
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" | ||
SPARK_BUILD_DIR=${DIR}/.. | ||
|
||
pushd "${SPARK_BUILD_DIR}" | ||
VERSION=${GIT_BRANCH#origin/tags/custom-} | ||
DIST_NAME="spark-${VERSION}" make dist | ||
popd |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,12 +1,18 @@ | ||
#!/bin/bash | ||
|
||
export VERSION=${GIT_BRANCH#refs/tags/} | ||
export S3_BUCKET=downloads.mesosphere.io | ||
export S3_PREFIX=spark/assets | ||
export DEV_S3_BUCKET=infinity-artifacts | ||
export DEV_S3_PREFIX=autodelete7d/spark | ||
export DOCKER_IMAGE=mesosphere/spark:${VERSION} | ||
set -e -x -o pipefail | ||
|
||
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" | ||
SPARK_BUILD_DIR=${DIR}/.. | ||
|
||
source spark-build/bin/jenkins.sh | ||
|
||
spark_test | ||
pushd "${SPARK_BUILD_DIR}" | ||
install_cli | ||
docker_login | ||
make universe | ||
cp ../stub-universe.properties ../build.properties | ||
VERSION=${GIT_BRANCH#origin/tags/} | ||
echo "RELEASE_VERSION=${VERSION}" >> ../build.properties | ||
echo "RELEASE_DOCKER_IMAGE=mesosphere/spark:${VERSION}" >> ../build.properties | ||
popd |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,28 +1,46 @@ | ||
#!/bin/bash | ||
|
||
export VERSION=${ghprbActualCommit} | ||
if [ -z "$VERSION" ]; then | ||
export VERSION=${GIT_COMMIT} | ||
fi | ||
|
||
export DOCKER_IMAGE=mesosphere/spark-dev:${VERSION} | ||
|
||
export S3_BUCKET=infinity-artifacts | ||
export S3_PREFIX=autodelete7d/spark/${VERSION} | ||
# fill in any missing DEV_* AWS envvars required by test.sh: | ||
if [ -z "$DEV_S3_BUCKET" ]; then | ||
export DEV_S3_BUCKET=$S3_BUCKET | ||
fi | ||
if [ -z "$DEV_S3_PREFIX" ]; then | ||
export DEV_S3_PREFIX=$S3_PREFIX | ||
fi | ||
if [ -z "$DEV_AWS_ACCESS_KEY_ID" ]; then | ||
export DEV_AWS_ACCESS_KEY_ID=$AWS_ACCESS_KEY_ID | ||
fi | ||
if [ -z "$DEV_AWS_SECRET_ACCESS_KEY" ]; then | ||
export DEV_AWS_SECRET_ACCESS_KEY=$AWS_SECRET_ACCESS_KEY | ||
fi | ||
|
||
source spark-build/bin/jenkins.sh | ||
|
||
spark_test | ||
set -e -x -o pipefail | ||
|
||
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" | ||
SPARK_BUILD_DIR=${DIR}/.. | ||
|
||
function run() { | ||
source bin/jenkins.sh | ||
install_cli | ||
docker_login | ||
make universe && export $(cat "${WORKSPACE}/stub-universe.properties") | ||
make test | ||
} | ||
|
||
pushd "${SPARK_BUILD_DIR}" | ||
run | ||
popd | ||
# #!/bin/bash | ||
|
||
# export VERSION=${ghprbActualCommit} | ||
# if [ -z "$VERSION" ]; then | ||
# export VERSION=${GIT_COMMIT} | ||
# fi | ||
|
||
# export DOCKER_IMAGE=mesosphere/spark-dev:${VERSION} | ||
|
||
# export S3_BUCKET=infinity-artifacts | ||
# export S3_PREFIX=autodelete7d/spark/${VERSION} | ||
# # fill in any missing DEV_* AWS envvars required by test.sh: | ||
# if [ -z "$DEV_S3_BUCKET" ]; then | ||
# export DEV_S3_BUCKET=$S3_BUCKET | ||
# fi | ||
# if [ -z "$DEV_S3_PREFIX" ]; then | ||
# export DEV_S3_PREFIX=$S3_PREFIX | ||
# fi | ||
# if [ -z "$DEV_AWS_ACCESS_KEY_ID" ]; then | ||
# export DEV_AWS_ACCESS_KEY_ID=$AWS_ACCESS_KEY_ID | ||
# fi | ||
# if [ -z "$DEV_AWS_SECRET_ACCESS_KEY" ]; then | ||
# export DEV_AWS_SECRET_ACCESS_KEY=$AWS_SECRET_ACCESS_KEY | ||
# fi | ||
|
||
# source spark-build/bin/jenkins.sh | ||
|
||
# spark_test |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -7,11 +7,9 @@ DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" | |
SPARK_DIR="${DIR}/../../spark" | ||
SPARK_BUILD_DIR="${DIR}/.." | ||
|
||
export TEST_JAR_PATH=$(pwd)/mesos-spark-integration-tests-assembly-0.1.0.jar | ||
export COMMONS_TOOLS_DIR=$(pwd)/dcos-commons/tools/ | ||
export CCM_TEMPLATE=single-master.cloudformation.json | ||
|
||
function make_distribution { | ||
# Env var: DIST_NAME | ||
|
||
pushd "${SPARK_DIR}" | ||
|
||
if [[ -n "${SPARK_DIST_URI}" ]]; then | ||
|
@@ -23,19 +21,18 @@ function make_distribution { | |
./dev/make-distribution.sh -Pmesos -Phadoop-2.6 -DskipTests | ||
fi | ||
|
||
local DIST="spark-${GIT_COMMIT}" | ||
mv dist ${DIST} | ||
tar czf ${DIST}.tgz ${DIST} | ||
mv dist ${DIST_NAME} | ||
tar czf ${DIST_NAME}.tgz ${DIST_NAME} | ||
fi | ||
|
||
popd | ||
} | ||
|
||
# rename spark/spark-<SHA1>.tgz to spark/spark-<TAG>.tgz | ||
# rename spark/spark-*.tgz to spark/spark-<TAG>.tgz | ||
function rename_dist { | ||
pushd "${SPARK_DIR}" | ||
|
||
local VERSION=${GIT_BRANCH#refs/tags/private-} | ||
local VERSION=${GIT_BRANCH#refs/tags/custom-} | ||
|
||
# rename to spark-<tag> | ||
tar xvf spark-*.tgz | ||
|
@@ -46,35 +43,37 @@ function rename_dist { | |
popd | ||
} | ||
|
||
# uploads spark/spark-*.tgz to S3 | ||
function upload_to_s3 { | ||
pushd "${SPARK_DIR}" | ||
|
||
env | ||
aws --debug s3 cp \ | ||
--acl public-read \ | ||
spark-*.tgz \ | ||
"s3://${S3_BUCKET}/${S3_PREFIX}" | ||
"s3://${S3_BUCKET}/${S3_PREFIX}/" | ||
|
||
popd | ||
} | ||
|
||
function update_manifest { | ||
pushd "${SPARK_BUILD_DIR}" | ||
# function update_manifest { | ||
# pushd "${SPARK_BUILD_DIR}" | ||
|
||
# update manifest.json with new spark dist: | ||
SPARK_DIST=$(ls ../spark/spark*.tgz) | ||
SPARK_URI="http://${S3_BUCKET}.s3.amazonaws.com/${S3_PREFIX}$(basename ${SPARK_DIST})" | ||
cat manifest.json | jq ".spark_uri=\"${SPARK_URI}\"" > manifest.json.tmp | ||
mv manifest.json.tmp manifest.json | ||
# # update manifest.json with new spark dist: | ||
# SPARK_DIST=$(ls ../spark/spark*.tgz) | ||
# SPARK_URI="http://${S3_BUCKET}.s3.amazonaws.com/${S3_PREFIX}$(basename ${SPARK_DIST})" | ||
# cat manifest.json | jq ".spark_uri=\"${SPARK_URI}\"" > manifest.json.tmp | ||
# mv manifest.json.tmp manifest.json | ||
|
||
popd | ||
} | ||
# popd | ||
# } | ||
|
||
function install_cli { | ||
curl -O https://downloads.mesosphere.io/dcos-cli/install.sh | ||
rm -rf cli/ | ||
mkdir cli | ||
bash install.sh cli http://change.me --add-path no | ||
source cli/bin/env-setup | ||
rm -rf dcos-cli/ | ||
mkdir dcos-cli | ||
bash install.sh dcos-cli http://change.me --add-path no | ||
source dcos-cli/bin/env-setup | ||
|
||
# hack because the installer forces an old CLI version | ||
pip install -U dcoscli | ||
|
@@ -87,27 +86,27 @@ function docker_login { | |
docker login [email protected] --username="${DOCKER_USERNAME}" --password="${DOCKER_PASSWORD}" | ||
} | ||
|
||
function spark_test { | ||
install_cli | ||
|
||
pushd spark-build | ||
docker_login | ||
# build/upload artifacts: docker + cli + stub universe: | ||
make build | ||
# in CI environments, ci_upload.py creates a 'stub-universe.properties' file | ||
# grab the STUB_UNIVERSE_URL from the file for use by test.sh: | ||
export $(cat $WORKSPACE/stub-universe.properties) | ||
# run tests against build artifacts: | ||
CLUSTER_NAME=spark-package-${BUILD_NUMBER} \ | ||
TEST_DIR=$(pwd)/../mesos-spark-integration-tests/ \ | ||
S3_BUCKET=${DEV_S3_BUCKET} \ | ||
S3_PREFIX=${DEV_S3_PREFIX} \ | ||
make test | ||
popd | ||
} | ||
|
||
function upload_distribution { | ||
make_distribution | ||
upload_to_s3 | ||
update_manifest | ||
} | ||
# function spark_test { | ||
# install_cli | ||
|
||
# pushd spark-build | ||
# docker_login | ||
# # build/upload artifacts: docker + cli + stub universe: | ||
# make build | ||
# # in CI environments, ci_upload.py creates a 'stub-universe.properties' file | ||
# # grab the STUB_UNIVERSE_URL from the file for use by test.sh: | ||
# export $(cat $WORKSPACE/stub-universe.properties) | ||
# # run tests against build artifacts: | ||
# CLUSTER_NAME=spark-package-${BUILD_NUMBER} \ | ||
# TEST_DIR=$(pwd)/../mesos-spark-integration-tests/ \ | ||
# S3_BUCKET=${DEV_S3_BUCKET} \ | ||
# S3_PREFIX=${DEV_S3_PREFIX} \ | ||
# make test | ||
# popd | ||
# } | ||
|
||
# function upload_distribution { | ||
# make_distribution | ||
# upload_to_s3 | ||
# update_manifest | ||
# } |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.