Skip to content

Commit

Permalink
CI: add spark-shell wrapper with dependencies
Browse files Browse the repository at this point in the history
Signed-off-by: Alexander Bezzubov <[email protected]>
  • Loading branch information
bzz committed Sep 22, 2017
1 parent abe8a40 commit d9bb0c5
Show file tree
Hide file tree
Showing 4 changed files with 30 additions and 6 deletions.
3 changes: 1 addition & 2 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -27,9 +27,8 @@ before_script:

script:
- ./sbt ++$TRAVIS_SCALA_VERSION jacoco:cover || travis_terminate 1
- ./sbt assembly
- ./_tools/getApacheSpark.sh "2.2.0" "2.7"
- ./spark/bin/spark-shell --jars ./target/scala-2.11/spark-api-uber.jar -i src/test/resources/SparkShellScript.scala --properties-file src/main/resources/spark-config.properties
- ./spark-shell -i src/test/resources/SparkShellScript.scala

after_success:
- bash <(curl -s https://codecov.io/bash)
4 changes: 1 addition & 3 deletions build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -13,16 +13,14 @@ lazy val root = (project in file(".")).
libraryDependencies += scalaTest % Test,
libraryDependencies += sparkSql % Provided,
libraryDependencies += newerHadoopClient % Provided, //due to newer v. of guava in bblfsh
libraryDependencies += fixNettyForGrpc, // grpc for bblfsh/client-scala needs to be newer then in Spark
libraryDependencies += fixNettyForGrpc % Compile, // grpc for bblfsh/client-scala needs to be newer then in Spark
libraryDependencies += jgit % Compile,
libraryDependencies += siva % Compile,
libraryDependencies += bblfsh % Compile,
libraryDependencies += commonsIO % Compile,
libraryDependencies += enry % Compile,

resolvers += "jitpack" at "https://jitpack.io",
// TODO: remove this local resolver when enry-java will be available from jitpack.
resolvers += "Local Ivy repository" at "file://" + Path.userHome.absolutePath + "/.ivy2/repository",

test in assembly := {},
assemblyJarName in assembly := s"${name.value}-uber.jar"
Expand Down
28 changes: 28 additions & 0 deletions spark-shell
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
#!/bin/bash

hash java >/dev/null 2>&1 || { echo "Please install is Java" >&2; exit 1; }


sparkShell() {
if hash spark-shell 2>/dev/null; then
exec spark-submit "$@"
elif [[ -d "./spark" ]]; then
echo "Using spark-shell from ./spark/"
SPARK_HOME='' "./spark/bin/spark-shell" "$@"
elif [[ -n "${SPARK_HOME}" ]]; then
echo "Using spark-shell from ${SPARK_HOME}"
"${SPARK_HOME}/bin/spark-shell" "$@"
else
echo "Please, install and configure Apache Spark and set SPARK_HOME"
exit "${E_NO_SPARK}"
fi
}


sparkShell \
--jars ./target/scala-2.11/spark-api_2.11-0.1.0-SNAPSHOT.jar \
--repositories "https://jitpack.io" \
--packages "tech.sourced:enry-java:1.0,com.github.src-d:siva-java:master-SNAPSHOT,org.eclipse.jgit:org.eclipse.jgit:4.8.0.201706111038-r,com.github.bblfsh:client-scala:v0.1.0" \
--exclude-packages "org.slf4j:slf4j-api" \
--properties-file src/main/resources/spark-config.properties \
"$@"
1 change: 0 additions & 1 deletion src/main/scala/tech/sourced/api/udf/ExtractUASTsUDF.scala
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@ object ExtractUASTsUDF extends CustomUDF {
}

def extractUASTsWithLang(path: String, content: Array[Byte], lang: String): Array[Byte] = {
println(s"extractUASTsWithLang: $path, $content, $lang")
extractUAST(path, content, lang)
}

Expand Down

0 comments on commit d9bb0c5

Please sign in to comment.