From bdd5e298b95a2a0bb05bf520e787069dcba1d6e9 Mon Sep 17 00:00:00 2001 From: Ian O'Connell Date: Tue, 13 Sep 2016 12:01:02 -0700 Subject: [PATCH] Persistent/worker scala compiler (#91) * Squashed commit of the following: commit d3ed2080ba81d0b91c6c119fb1512976f4e912ed Merge: 61b3b57 cd24529 Author: Ian O'Connell Date: Mon Sep 12 21:21:53 2016 -0700 Merge pull request #3 from ianoc/oscar/worker-resources Add resource support, minor cleanups commit cd24529ab19a4ab1f124da6eefb9a90ce1b48511 Author: Oscar Boykin Date: Mon Sep 12 18:05:01 2016 -1000 Add resource support, minor cleanups commit 61b3b57362d29f2ee4729fbfd452441222ad756d Merge: 9bf9087 d0d6658 Author: P. Oscar Boykin Date: Mon Sep 12 16:53:40 2016 -1000 Merge pull request #2 from ianoc/oscar/javac_scala_worker Add javac support for mixed targets commit d0d6658d1f6ff948949ac47ea1ab7f9f128c8606 Author: Oscar Boykin Date: Mon Sep 12 14:29:18 2016 -1000 Add javac support for mixed targets commit 9bf9087750c6cc54075f018625aafee9dbc9124b Merge: 617885e 30b0efc Author: Ian O'Connell Date: Mon Sep 12 16:25:56 2016 -0700 Merge pull request #1 from ianoc/oscar/resident-compiler Factor out ScalaCInvoker option parsing commit 30b0efca3ad04ae4360b51fbb707ca96b1e6d55e Author: Oscar Boykin Date: Mon Sep 12 13:22:56 2016 -1000 address review commit b28dcd15176a9752205a26170ae30592b54ebc20 Author: Oscar Boykin Date: Mon Sep 12 12:35:14 2016 -1000 Factor out ScalaCInvoker option parsing commit 617885e4bcd271373917c0374dfd42842a9b4189 Author: Ian O Connell Date: Sun Sep 11 21:43:59 2016 -0700 Adding commit 821ab21013b452fd0520f3a92454c0dfc11b4948 Author: Ian O Connell Date: Sun Sep 11 19:47:11 2016 -0700 wip commit f6bebe65b7ed790b180c64f671f612e6d20bf7c4 Author: Ian O Connell Date: Sun Sep 11 19:38:41 2016 -0700 wip commit 4ab68100dc4200eccb84865e4b1cb37e569d9b90 Author: Ian O Connell Date: Sun Sep 11 19:33:02 2016 -0700 wip commit cdc4995d2b949e335cd1de9577161da751f06da2 Author: Ian O Connell Date: Sun Sep 11 17:55:40 2016 -0700 wip commit 51b92510f1d4e19336feddbd07d5da46f5b11b2b Author: Ian O Connell Date: Sun Sep 11 17:45:45 2016 -0700 WIP commit e4b53aa5c08bf1c78222635759e7e15ddcaff85d Author: Ian O Connell Date: Sun Sep 11 17:29:09 2016 -0700 WIP commit 1cc779c4eefc39430f6b5a5249b691cc829c8e51 Author: Ian O Connell Date: Sun Sep 11 17:16:05 2016 -0700 Wip commit 1f9600c5110c7259a2b5fdf01260f0fdb9d5a809 Author: Ian O Connell Date: Sun Sep 11 14:13:33 2016 -0700 WIP commit d2dce8938c14a9ff14ddc09d1b27ef4143a2c702 Author: Ian O Connell Date: Sun Sep 11 14:11:14 2016 -0700 Undo commit ec05677f061dbdd11b9b56f6dac03512b5e58015 Author: Ian O Connell Date: Sun Sep 11 14:06:17 2016 -0700 WIP commit dcf82b40aeb9bc111d1c66f7757bdb9958fea97c Author: Ian O Connell Date: Sun Sep 11 12:19:19 2016 -0700 Some formatting so my editor is less cranky commit f11926aa8bbf4d259f05b2866d221ac7660ec9bb Author: Ian O Connell Date: Sun Sep 11 12:12:29 2016 -0700 WIP -- have the jar creator code inlined commit 0a1ad35ae26b42c3d8b0afb9ba30712ec71ccf4a Author: Ian O Connell Date: Sat Sep 10 21:01:44 2016 -0700 wip commit 3a0c1abb26c43ce34292e7592f35e39002d83f36 Author: Ian O Connell Date: Sat Sep 10 20:58:10 2016 -0700 WIP commit 15d02b1e981552b61d07a0f5ee369d1542c6b7f4 Author: Ian O Connell Date: Sat Sep 10 18:10:28 2016 -0700 Make deploy jar name easier to grep for commit d3823aa31735bc8d1a33f9ffac4a1ed98841fee0 Author: Ian O Connell Date: Tue Aug 9 11:40:40 2016 -0700 Use bind's for twitter scrooge so local repo's can override scrooge versions commit f11e814e4ea93d508fc6f94d2eb20273651a0d50 Merge: f82800b 4146c9e Author: Ian O Connell Date: Thu Jul 28 16:28:02 2016 -0700 Merge branch 'master' of github.com:bazelbuild/rules_scala commit f82800b9324626b8239213a68b85a5e48b222a88 Author: Ian O Connell Date: Thu Jul 28 15:32:14 2016 -0700 Fix repl test commit 9c3f37cf4a5aa0d7e38037b3040eaa1af813ee4a Author: Ian O Connell Date: Thu Jul 28 15:30:46 2016 -0700 Update test rules * Handle passing through jvm options correctly, fix up merges from master * Set bazel version to a tag * Bump bazel version * Remove cat * Review comments. Collapsing code, killing code * delete code, unused import statements. * Remove unused code/ unneeded comments * Add the jvm options back in for usage with javac * Review comments * Add comment describing the args to a worker cmd * Move compile java sources to its own method * Kill maven_jar of guava * Update readme * Readme edit --- .bazel-installer-linux-x86_64.sh.sha256 | 2 +- .travis.yml | 7 +- README.md | 15 +- WORKSPACE | 7 + scala/scala.bzl | 679 +++++++++--------- scala/support/BUILD | 8 +- src/java/io/bazel/rulesscala/jar/BUILD | 10 +- .../io/bazel/rulesscala/jar/JarCreator.java | 18 +- src/java/io/bazel/rulesscala/scalac/BUILD | 15 + .../rulesscala/scalac/CompileOptions.java | 139 ++++ .../rulesscala/scalac/ScalaCInvoker.java | 329 +++++++++ src/scala/scripts/BUILD | 18 +- .../scripts/TwitterScroogeGenerator.scala | 2 +- test/BUILD | 2 +- 14 files changed, 901 insertions(+), 350 deletions(-) create mode 100644 src/java/io/bazel/rulesscala/scalac/BUILD create mode 100644 src/java/io/bazel/rulesscala/scalac/CompileOptions.java create mode 100644 src/java/io/bazel/rulesscala/scalac/ScalaCInvoker.java diff --git a/.bazel-installer-linux-x86_64.sh.sha256 b/.bazel-installer-linux-x86_64.sh.sha256 index 9350addec..eaca0225c 100644 --- a/.bazel-installer-linux-x86_64.sh.sha256 +++ b/.bazel-installer-linux-x86_64.sh.sha256 @@ -1 +1 @@ -e0e4efe35b2c9f2b1f3c3929fc401e27c312090e6a305c046ecb59b9e3128e00 bazel-0.2.3-installer-linux-x86_64.sh +97dd53414e12da1c9a8a23911ebe732b4b278295ed6b226a5ddee4cd6775a01b bazel-0.3.1-installer-linux-x86_64.sh diff --git a/.travis.yml b/.travis.yml index 9eaa1bc8a..81d478ff2 100644 --- a/.travis.yml +++ b/.travis.yml @@ -16,12 +16,11 @@ jdk: - oraclejdk8 before_install: - - wget 'https://github.com/bazelbuild/bazel/releases/download/0.2.3/bazel-0.2.3-installer-linux-x86_64.sh' + - wget 'https://github.com/bazelbuild/bazel/releases/download/0.3.1/bazel-0.3.1-installer-linux-x86_64.sh' - sha256sum -c .bazel-installer-linux-x86_64.sh.sha256 - - chmod +x bazel-0.2.3-installer-linux-x86_64.sh - - ./bazel-0.2.3-installer-linux-x86_64.sh --user + - chmod +x bazel-0.3.1-installer-linux-x86_64.sh + - ./bazel-0.3.1-installer-linux-x86_64.sh --user - mv .bazelrc.travis .bazelrc - - cat ~/.bazelrc >> .bazelrc script: - bash test_run.sh diff --git a/README.md b/README.md index 8dffa9982..abbde3015 100644 --- a/README.md +++ b/README.md @@ -19,7 +19,7 @@ and `scala_test`. ## Getting started In order to use `scala_library`, `scala_macro_library`, and `scala_binary`, -you must have bazel 0.2.3 and add the following to your WORKSPACE file: +you must have bazel 0.3.1 and add the following to your WORKSPACE file: ```python git_repository( @@ -27,6 +27,12 @@ git_repository( remote = "https://github.com/bazelbuild/rules_scala.git", commit = "7b891adb975b4e3e6569b763d39ab6e9234196c9", # update this as needed ) +git_repository( + name = "io_bazel", + remote = "git://github.com/bazelbuild/bazel.git", + tag = "0.3.1", +) + load("@io_bazel_rules_scala//scala:scala.bzl", "scala_repositories") scala_repositories() ``` @@ -40,6 +46,13 @@ load("@io_bazel_rules_scala//scala:scala.bzl", "scala_library", "scala_binary", ``` You may wish to have these rules loaded by default using bazel's prelude. You can add the above to the file `tools/build_rules/prelude_bazel` in your repo (don't forget to have a, possibly empty, BUILD file there) and then it will be automatically prepended to every BUILD file in the workspace. +To run with a persistant worker (much faster), you need to add +```python +build --strategy=Scalac=worker +test --strategy=Scalac=worker +``` +to your command line, or to enable by default for building/testing add it to your .bazelrc. + [scala]: http://www.scala-lang.org/ diff --git a/WORKSPACE b/WORKSPACE index f4c77007a..4ab995806 100644 --- a/WORKSPACE +++ b/WORKSPACE @@ -18,3 +18,10 @@ maven_jar( name = "org_psywerx_hairyfotr__linter", artifact = scala_mvn_artifact("org.psywerx.hairyfotr:linter:0.1.13"), sha1 = "e5b3e2753d0817b622c32aedcb888bcf39e275b4") + + +git_repository( + name = "io_bazel", + remote = "git://github.com/bazelbuild/bazel.git", + commit = "0.3.1", +) diff --git a/scala/scala.bzl b/scala/scala.bzl index 69d458209..2a9eae012 100644 --- a/scala/scala.bzl +++ b/scala/scala.bzl @@ -21,360 +21,379 @@ _srcjar_filetype = FileType([".srcjar"]) # TODO is there a way to derive this from the above? _scala_srcjar_filetype = FileType([".scala", ".srcjar", ".java"]) + def _adjust_resources_path(path): - dir_1, dir_2, rel_path = path.partition("resources") - if rel_path: - return dir_1 + dir_2, rel_path - (dir_1,dir_2,rel_path) = path.partition("java") - if rel_path: - return dir_1 + dir_2, rel_path - return "", path + # Here we are looking to find out the offset of this resource inside + # any resources folder. We want to return the root to the resources folder + # and then the sub path inside it + dir_1, dir_2, rel_path = path.partition("resources") + if rel_path: + return dir_1 + dir_2, rel_path + + # The same as the above but just looking for java + (dir_1, dir_2, rel_path) = path.partition("java") + if rel_path: + return dir_1 + dir_2, rel_path + return "", path + def _add_resources_cmd(ctx, dest): - res_cmd = "" - for f in ctx.files.resources: - c_dir, res_path = _adjust_resources_path(f.path) - target_path = res_path - if res_path[0] != "/": - target_path = "/" + res_path - res_cmd += "\nmkdir -p $(dirname {out_dir}{target_path})\ncp {c_dir}{res_path} {out_dir}{target_path}".format( - out_dir=dest, - res_path=res_path, - target_path=target_path, - c_dir=c_dir) - return res_cmd + res_cmd = "" + for f in ctx.files.resources: + c_dir, res_path = _adjust_resources_path(f.path) + target_path = res_path + if res_path[0] != "/": + target_path = "/" + res_path + res_cmd += """ + mkdir -p $(dirname {out_dir}{target_path}) + cp {c_dir}{res_path} {out_dir}{target_path} + """.format( + out_dir=dest, + res_path=res_path, + target_path=target_path, + c_dir=c_dir) + return res_cmd + def _get_jar_path(paths): - for p in paths: - path = p.path - if path.endswith("/jar_deploy.jar"): - return path - return None + for p in paths: + path = p.path + if path.endswith("/binary_deploy.jar"): + return path + return None + def _build_nosrc_jar(ctx, buildijar): - cp_resources = _add_resources_cmd(ctx, "{out}_tmp".format(out=ctx.outputs.jar.path)) - ijar_cmd = "" - if buildijar: - ijar_cmd = "\ncp {out} {ijar_out}".format( - out=ctx.outputs.jar.path, - ijar_out=ctx.outputs.ijar.path) - cmd = """ -rm -rf {out}_tmp -set -e -mkdir -p {out}_tmp -# copy any resources -{cp_resources} -{java} -jar {jar} -m {manifest} {out} -""" + ijar_cmd - cmd = cmd.format( - cp_resources=cp_resources, - out=ctx.outputs.jar.path, - manifest=ctx.outputs.manifest.path, - java=ctx.file._java.path, - jar=_get_jar_path(ctx.files._jar)) - outs = [ctx.outputs.jar] - if buildijar: - outs.extend([ctx.outputs.ijar]) - ctx.action( - inputs= - ctx.files.resources + - ctx.files._jdk + - ctx.files._jar + - [ctx.outputs.manifest, ctx.file._java], - outputs=outs, - command=cmd, - progress_message="scala %s" % ctx.label, - arguments=[]) + cp_resources = _add_resources_cmd(ctx, "{out}_tmp".format( + out=ctx.outputs.jar.path) + ) + ijar_cmd = "" + if buildijar: + ijar_cmd = "\ncp {out} {ijar_out}".format( + out=ctx.outputs.jar.path, + ijar_out=ctx.outputs.ijar.path) + cmd = """ + rm -rf {out}_tmp + set -e + mkdir -p {out}_tmp + # copy any resources + {cp_resources} + {java} -jar {jar} -m {manifest} {out} + """ + ijar_cmd + cmd = cmd.format( + cp_resources=cp_resources, + out=ctx.outputs.jar.path, + manifest=ctx.outputs.manifest.path, + java=ctx.file._java.path, + jar=_get_jar_path(ctx.files._jar)) + outs = [ctx.outputs.jar] + if buildijar: + outs.extend([ctx.outputs.ijar]) + + inputs = ctx.files.resources + ctx.files._jdk + ctx.files._jar + [ + ctx.outputs.manifest, ctx.file._java + ] + + ctx.action( + inputs=inputs, + outputs=outs, + command=cmd, + progress_message="scala %s" % ctx.label, + arguments=[]) + def _collect_plugin_paths(plugins): - paths = set() - for p in plugins: - if hasattr(p, "path"): - paths += [p.path] - elif hasattr(p, "scala"): - paths += [p.scala.outputs.jar.path] - elif hasattr(p, "java"): - paths += [j.class_jar.path for j in p.java.outputs.jars] - # support http_file pointed at a jar. http_jar uses ijar, which breaks scala macros - elif hasattr(p, "files"): - paths += [f.path for f in p.files] - return paths + paths = set() + for p in plugins: + if hasattr(p, "path"): + paths += [p.path] + elif hasattr(p, "scala"): + paths += [p.scala.outputs.jar.path] + elif hasattr(p, "java"): + paths += [j.class_jar.path for j in p.java.outputs.jars] + # support http_file pointed at a jar. http_jar uses ijar, + # which breaks scala macros + elif hasattr(p, "files"): + paths += [f.path for f in p.files] + return paths + def _compile(ctx, _jars, dep_srcjars, buildijar): - jars = _jars - cp_resources = _add_resources_cmd(ctx, "{out}_tmp".format(out=ctx.outputs.jar.path)) - ijar_cmd = "" - if buildijar: - ijar_cmd = "\n{ijar} {out} {ijar_out}".format( - ijar=ctx.file._ijar.path, - out=ctx.outputs.jar.path, - ijar_out=ctx.outputs.ijar.path) - - java_srcs = _java_filetype.filter(ctx.files.srcs) - sources = _scala_filetype.filter(ctx.files.srcs) + java_srcs - srcjars = _srcjar_filetype.filter(ctx.files.srcs) - all_srcjars = set(srcjars + list(dep_srcjars)) - # look for any plugins: - plugins = _collect_plugin_paths(ctx.attr.plugins) - plugin_arg = "" - if (len(plugins) > 0): - plugin_arg = " ".join(["-Xplugin:%s" % p for p in plugins]) - - # Set up the args to pass to scalac because they can be too long for bash - scalac_args_file = ctx.new_file(ctx.outputs.jar, ctx.label.name + "_scalac_args") - scalac_args = """{scala_opts} {plugin_arg} -classpath "{jars}" -d {out}_tmp {files}""".format( - scala_opts=" ".join(ctx.attr.scalacopts), - plugin_arg = plugin_arg, - jars=":".join([j.path for j in jars]), - files=" ".join([f.path for f in sources]), - out=ctx.outputs.jar.path - ) - ctx.file_action(output = scalac_args_file, content = scalac_args) - javac_sources_cmd = "" - compile_java_srcs = len(java_srcs) != 0 - if (compile_java_srcs): - # Set up the args to pass to javac because they can be too long for bash - javac_args_file = ctx.new_file(ctx.outputs.jar, ctx.label.name + "_javac_args") - javac_args = """{javac_opts} -classpath "{jars}:{out}_tmp" -d {out}_tmp {files}""".format( - javac_opts=" ".join(ctx.attr.javacopts), - jars=":".join([j.path for j in jars]), - files=" ".join([f.path for f in java_srcs]), - out=ctx.outputs.jar.path - ) - ctx.file_action(output = javac_args_file, content = javac_args) - javac_sources_cmd = """ - cat {javac_args} {{out}}_args/files_from_jar > {{out}}_args/java_args - {javac} {{jvm_flags}} @{{out}}_args/java_args""".format(javac_args = javac_args_file.path,javac=ctx.file._javac.path) - - srcjar_cmd = "" - if len(all_srcjars) > 0: - srcjar_cmd = "\nmkdir -p {out}_tmp_expand_srcjars\n" - for srcjar in all_srcjars: - # Note: this is double escaped because we need to do one format call - # per each srcjar, but then we are going to include this in the bigger format - # call that is done to generate the full command - - #TODO would like to be able to switch >/dev/null, -v, etc based on the user's settings - srcjar_cmd += """ -unzip -o {srcjar} -d {{out}}_tmp_expand_srcjars >/dev/null -""".format(srcjar = srcjar.path) - srcjar_cmd += """find {out}_tmp_expand_srcjars -type f -name "*.scala" > {out}_args/files_from_jar\n""" - - cmd = """ -rm -rf {out}_args -rm -rf {out}_tmp -rm -rf {out}_tmp_expand_srcjars -set -e -mkdir -p {out}_args -touch {out}_args/files_from_jar -mkdir -p {out}_tmp""" + srcjar_cmd + """ -cat {scalac_args} {out}_args/files_from_jar > {out}_args/scala_args -env JAVACMD={java} {scalac} {jvm_flags} @{out}_args/scala_args""" + javac_sources_cmd + """ -# add any resources -{cp_resources} -{java} -jar {jar} -m {manifest} {out} {out}_tmp -rm -rf {out}_args -rm -rf {out}_tmp -rm -rf {out}_tmp_expand_srcjars -""" + ijar_cmd - cmd = cmd.format( - cp_resources=cp_resources, - java=ctx.file._java.path, - jvm_flags=" ".join(["-J" + flag for flag in ctx.attr.jvm_flags]), - scalac=ctx.file._scalac.path, - scalac_args=scalac_args_file.path, - out=ctx.outputs.jar.path, - manifest=ctx.outputs.manifest.path, - jar=_get_jar_path(ctx.files._jar), - ijar=ctx.file._ijar.path, + jars = _jars + ijar_output_path = "" + ijar_cmd_path = "" + if buildijar: + ijar_output_path = ctx.outputs.ijar.path + ijar_cmd_path = ctx.file._ijar.path + + java_srcs = _java_filetype.filter(ctx.files.srcs) + sources = _scala_filetype.filter(ctx.files.srcs) + java_srcs + srcjars = _srcjar_filetype.filter(ctx.files.srcs) + all_srcjars = set(srcjars + list(dep_srcjars)) + # look for any plugins: + plugins = _collect_plugin_paths(ctx.attr.plugins) + plugin_arg = ",".join(list(plugins)) + + compiler_classpath = '{scalalib}:{scalacompiler}:{scalareflect}:{jars}'.format( # noqa + scalalib=ctx.file._scalalib.path, + scalacompiler=ctx.file._scalacompiler.path, + scalareflect=ctx.file._scalareflect.path, + jars=":".join([j.path for j in jars]), ) - outs = [ctx.outputs.jar] - if buildijar: - outs.extend([ctx.outputs.ijar]) - ins = (list(jars) + - list(dep_srcjars) + - list(srcjars) + - list(sources) + - ctx.files.srcs + - ctx.files.plugins + - ctx.files.resources + - ctx.files._jdk + - ctx.files._jar + - ctx.files._scalasdk + - [ctx.outputs.manifest, - ctx.file._ijar, - ctx.file._scalac, - ctx.file._java, - scalac_args_file]) - if compile_java_srcs: - ins.extend([javac_args_file]) - ctx.action( - inputs=ins, - outputs=outs, - command=cmd, - mnemonic="Scalac", - progress_message="scala %s" % ctx.label, - arguments=[]) -def _compile_or_empty(ctx, jars, srcjars, buildijar): - # We assume that if a srcjar is present, it is not empty - if len(ctx.files.srcs) + len(srcjars) == 0: - _build_nosrc_jar(ctx, buildijar) - # no need to build ijar when empty - return struct(ijar=ctx.outputs.jar, class_jar=ctx.outputs.jar) - else: - _compile(ctx, jars, srcjars, buildijar) - ijar = None + scalac_args = """ +Classpath: {cp} +EnableIjar: {enableijar} +Files: {files} +IjarCmdPath: {ijar_cmd_path} +IjarOutput: {ijar_out} +JarOutput: {out} +JavacOpts: {javac_opts} +JavacPath: {javac_path} +JavaFiles: {java_files} +JvmFlags: {jvm_flags} +Manifest: {manifest} +Plugins: {plugin_arg} +ResourceDests: {resource_dest} +ResourceSrcs: {resource_src} +ScalacOpts: {scala_opts} +SourceJars: {srcjars} +""".format( + out=ctx.outputs.jar.path, + manifest=ctx.outputs.manifest.path, + scala_opts=",".join(ctx.attr.scalacopts), + plugin_arg=plugin_arg, + cp=compiler_classpath, + files=",".join([f.path for f in sources]), + enableijar=buildijar, + ijar_out=ijar_output_path, + ijar_cmd_path=ijar_cmd_path, + srcjars=",".join([f.path for f in all_srcjars]), + javac_opts=" ".join(ctx.attr.javacopts), + javac_path=ctx.file._javac.path, + java_files=",".join([f.path for f in java_srcs]), + jvm_flags=" ".join(["-J" + flag for flag in ctx.attr.jvm_flags]), + resource_src=",".join([f.path for f in ctx.files.resources]), + resource_dest=",".join( + [_adjust_resources_path(f.path)[1] for f in ctx.files.resources] + ), + ) + argfile = ctx.new_file( + ctx.outputs.jar, + "%s_worker_input" % ctx.label.name + ) + ctx.file_action(output=argfile, content=scalac_args) + + outs = [ctx.outputs.jar] if buildijar: - ijar = ctx.outputs.ijar + outs.extend([ctx.outputs.ijar]) + ins = (list(jars) + + list(dep_srcjars) + + list(srcjars) + + list(sources) + + ctx.files.srcs + + ctx.files.plugins + + ctx.files.resources + + ctx.files._jdk + + ctx.files._scalasdk + + [ctx.outputs.manifest, + ctx.file._ijar, + ctx.file._java, + argfile]) + ctx.action( + inputs=ins, + outputs=outs, + executable=ctx.executable._scalac, + mnemonic="Scalac", + progress_message="scala %s" % ctx.label, + execution_requirements={"supports-workers": "1"}, + # when we run with a worker, the `@argfile.path` is removed and passed + # line by line as arguments in the protobuf. In that case, + # the rest of the arguments are passed to the process that + # starts up and stays resident. + + # In either case (worker or not), they will be jvm flags which will + # be correctly handled since the executable is a jvm app that will + # consume the flags on startup. + + arguments=list(ctx.attr.jvm_flags) + ["@" + argfile.path], + ) + + +def _compile_or_empty(ctx, jars, srcjars, buildijar): + # We assume that if a srcjar is present, it is not empty + if len(ctx.files.srcs) + len(srcjars) == 0: + _build_nosrc_jar(ctx, buildijar) + # no need to build ijar when empty + return struct(ijar=ctx.outputs.jar, class_jar=ctx.outputs.jar) else: - # macro code needs to be available at compile-time, so set ijar == jar - ijar = ctx.outputs.jar - return struct(ijar=ijar, class_jar=ctx.outputs.jar) + _compile(ctx, jars, srcjars, buildijar) + ijar = None + if buildijar: + ijar = ctx.outputs.ijar + else: + # macro code needs to be available at compile-time, + # so set ijar == jar + ijar = ctx.outputs.jar + return struct(ijar=ijar, class_jar=ctx.outputs.jar) def _build_deployable(ctx, jars): - # the _jar_bin program we call below expects one optional argument: - # -m is the argument to pass a manifest to our jar creation code - # the next argument is the path manifest itself - # the manifest is set up by methods that call this function (see usages - # of _build_deployable and note that they always first call write_manifest). - # that is what creates the manifest content - # - # following the manifest argument and the manifest, the next argument is - # the output path for the target jar - # - # finally all the rest of the arguments are jars to be flattened into one - # fat jar - args = ["-m", ctx.outputs.manifest.path, ctx.outputs.deploy_jar.path] - args.extend([j.path for j in jars]) - ctx.action( - inputs=list(jars) + [ctx.outputs.manifest], - outputs=[ctx.outputs.deploy_jar], - executable=ctx.executable._jar_bin, - mnemonic="ScalaDeployJar", - progress_message="scala deployable %s" % ctx.label, - arguments=args) + # the _jar_bin program we call below expects one optional argument: + # -m is the argument to pass a manifest to our jar creation code + # the next argument is the path manifest itself + # the manifest is set up by methods that call this function (see usages + # of _build_deployable and note that they always first call write_manifest) + # that is what creates the manifest content + # + # following the manifest argument and the manifest, the next argument is + # the output path for the target jar + # + # finally all the rest of the arguments are jars to be flattened into one + # fat jar + args = ["-m", ctx.outputs.manifest.path, ctx.outputs.deploy_jar.path] + args.extend([j.path for j in jars]) + ctx.action( + inputs=list(jars) + [ctx.outputs.manifest], + outputs=[ctx.outputs.deploy_jar], + executable=ctx.executable._jar_bin, + mnemonic="ScalaDeployJar", + progress_message="scala deployable %s" % ctx.label, + arguments=args) def write_manifest(ctx): - # TODO(bazel-team): I don't think this classpath is what you want - manifest = "Class-Path: %s\n" % ctx.file._scalalib.path - if getattr(ctx.attr, "main_class", ""): - manifest += "Main-Class: %s\n" % ctx.attr.main_class + # TODO(bazel-team): I don't think this classpath is what you want + manifest = "Class-Path: %s\n" % ctx.file._scalalib.path + if getattr(ctx.attr, "main_class", ""): + manifest += "Main-Class: %s\n" % ctx.attr.main_class + + ctx.file_action( + output=ctx.outputs.manifest, + content=manifest) - ctx.file_action( - output = ctx.outputs.manifest, - content = manifest) def _write_launcher(ctx, jars): - classpath = ':'.join(["$0.runfiles/%s/%s" % (ctx.workspace_name, f.short_path) for f in jars]) - content = """#!/bin/bash -export CLASSPATH={classpath} -$0.runfiles/{repo}/{java} {name} "$@" -""".format( - repo=ctx.workspace_name, - java=ctx.file._java.short_path, - name=ctx.attr.main_class, - deploy_jar=ctx.outputs.jar.path, - classpath=classpath, - ) - ctx.file_action( - output=ctx.outputs.executable, - content=content) + classpath = ':'.join( + ["$0.runfiles/%s/%s" % (ctx.workspace_name, f.short_path) for f in jars] + ) + + content = """#!/bin/bash + export CLASSPATH={classpath} + $0.runfiles/{repo}/{java} {name} "$@" + """.format( + repo=ctx.workspace_name, + java=ctx.file._java.short_path, + name=ctx.attr.main_class, + deploy_jar=ctx.outputs.jar.path, + classpath=classpath, + ) + ctx.file_action( + output=ctx.outputs.executable, + content=content) + def _write_test_launcher(ctx, jars): - if len(ctx.attr.suites) != 0: - print("suites attribute is deprecated. All scalatest test suites are run") + if len(ctx.attr.suites) != 0: + print( + "suites attribute is deprecated. All scalatest test suites are run" + ) - content = """#!/bin/bash + content = """#!/bin/bash {java} -cp {cp} {name} {args} -C io.bazel.rules.scala.JUnitXmlReporter "$@" """ - content = content.format( + content = content.format( java=ctx.file._java.short_path, cp=":".join([j.short_path for j in jars]), name=ctx.attr.main_class, args="-R \"{path}\" -oWDF".format(path=ctx.outputs.jar.short_path)) - ctx.file_action( + ctx.file_action( output=ctx.outputs.executable, content=content) -def collect_srcjars(targets): - srcjars = set() - for target in targets: - if hasattr(target, "srcjars"): - srcjars += [target.srcjars.srcjar] - return srcjars -def _collect_jars(targets): - """Compute the runtime and compile-time dependencies from the given targets""" - compile_jars = set() - runtime_jars = set() - ijars = set() - for target in targets: - found = False - if hasattr(target, "scala"): - if hasattr(target.scala.outputs, "ijar"): - compile_jars += [target.scala.outputs.ijar] - compile_jars += target.scala.transitive_compile_exports - runtime_jars += target.scala.transitive_runtime_deps - runtime_jars += target.scala.transitive_runtime_exports - found = True - if hasattr(target, "java"): - # see JavaSkylarkApiProvider.java, this is just the compile-time deps - # this should be improved in bazel 0.1.5 to get outputs.ijar - # compile_jars += [target.java.outputs.ijar] - compile_jars += target.java.transitive_deps - runtime_jars += target.java.transitive_runtime_deps - found = True - if not found: - # support http_file pointed at a jar. http_jar uses ijar, which breaks scala macros - runtime_jars += target.files - compile_jars += target.files - return struct(compiletime = compile_jars, runtime = runtime_jars) +def collect_srcjars(targets): + srcjars = set() + for target in targets: + if hasattr(target, "srcjars"): + srcjars += [target.srcjars.srcjar] + return srcjars -def _lib(ctx, non_macro_lib): - # This will be used to pick up srcjars from non-scala library - # targets (like thrift code generation) - srcjars = collect_srcjars(ctx.attr.deps) - jars = _collect_jars(ctx.attr.deps) - (cjars, rjars) = (jars.compiletime, jars.runtime) - write_manifest(ctx) - outputs = _compile_or_empty(ctx, cjars, srcjars, non_macro_lib) - rjars += [ctx.outputs.jar] - rjars += _collect_jars(ctx.attr.runtime_deps).runtime +def _collect_jars(targets): + """Compute the runtime and compile-time dependencies from the given targets""" # noqa + compile_jars = set() + runtime_jars = set() + for target in targets: + found = False + if hasattr(target, "scala"): + if hasattr(target.scala.outputs, "ijar"): + compile_jars += [target.scala.outputs.ijar] + compile_jars += target.scala.transitive_compile_exports + runtime_jars += target.scala.transitive_runtime_deps + runtime_jars += target.scala.transitive_runtime_exports + found = True + if hasattr(target, "java"): + # see JavaSkylarkApiProvider.java, + # this is just the compile-time deps + # this should be improved in bazel 0.1.5 to get outputs.ijar + # compile_jars += [target.java.outputs.ijar] + compile_jars += target.java.transitive_deps + runtime_jars += target.java.transitive_runtime_deps + found = True + if not found: + # support http_file pointed at a jar. http_jar uses ijar, + # which breaks scala macros + runtime_jars += target.files + compile_jars += target.files + + return struct(compiletime = compile_jars, runtime = runtime_jars) - rjars += [ctx.file._scalalib, ctx.file._scalareflect] - if not non_macro_lib: - # macros need the scala reflect jar - rjars += [ctx.file._scalareflect] - _build_deployable(ctx, rjars) - rule_outputs = struct(ijar=outputs.ijar, class_jar=outputs.class_jar, deploy_jar=ctx.outputs.deploy_jar) +def _lib(ctx, non_macro_lib): + # This will be used to pick up srcjars from non-scala library + # targets (like thrift code generation) + srcjars = collect_srcjars(ctx.attr.deps) + jars = _collect_jars(ctx.attr.deps) + (cjars, rjars) = (jars.compiletime, jars.runtime) + write_manifest(ctx) + outputs = _compile_or_empty(ctx, cjars, srcjars, non_macro_lib) + + rjars += [ctx.outputs.jar] + rjars += _collect_jars(ctx.attr.runtime_deps).runtime + + rjars += [ctx.file._scalalib, ctx.file._scalareflect] + if not non_macro_lib: + # macros need the scala reflect jar + rjars += [ctx.file._scalareflect] + + _build_deployable(ctx, rjars) + rule_outputs = struct(ijar=outputs.ijar, class_jar=outputs.class_jar, deploy_jar=ctx.outputs.deploy_jar) + + texp = _collect_jars(ctx.attr.exports) + scalaattr = struct(outputs=rule_outputs, + transitive_runtime_deps=rjars, + transitive_compile_exports=texp.compiletime, + transitive_runtime_exports=texp.runtime + ) + runfiles = ctx.runfiles( + files=list(rjars), + collect_data=True) + + return struct( + files=set([ctx.outputs.jar]), # Here is the default output + scala=scalaattr, + runfiles=runfiles, + # This is a free monoid given to the graph for the purpose of + # extensibility. This is necessary when one wants to create + # new targets which want to leverage a scala_library. For example, + # new_target1 -> scala_library -> new_target2. There might be + # information that new_target2 needs to get from new_target1, + # but we do not want to ohave to change scala_library to pass + # this information through. extra_information allows passing + # this information through, and it is up to the new_targets + # to filter and make sense of this information. + extra_information=_collect_extra_information(ctx.attr.deps), + ) - texp = _collect_jars(ctx.attr.exports) - scalaattr = struct(outputs = rule_outputs, - transitive_runtime_deps = rjars, - transitive_compile_exports = texp.compiletime, - transitive_runtime_exports = texp.runtime - ) - runfiles = ctx.runfiles( - files = list(rjars), - collect_data = True) - return struct( - files = set([ctx.outputs.jar]), # Here is the default output - scala = scalaattr, - runfiles=runfiles, - # This is a free monoid given to the graph for the purpose of - # extensibility. This is necessary when one wants to create - # new targets which want to leverage a scala_library. For example, - # new_target1 -> scala_library -> new_target2. There might be - # information that new_target2 needs to get from new_target1, - # but we do not want to ohave to change scala_library to pass - # this information through. extra_information allows passing - # this information through, and it is up to the new_targets - # to filter and make sense of this information. - extra_information=_collect_extra_information(ctx.attr.deps), - ) def _collect_extra_information(targets): r = [] @@ -452,28 +471,36 @@ env JAVACMD=$0.runfiles/{repo}/{java} $0.runfiles/{repo}/{scala} {jvm_flags} -cl runfiles=runfiles) def _scala_test_impl(ctx): - deps = ctx.attr.deps - deps += [ctx.attr._scalatest_reporter] - jars = _collect_jars(deps) - (cjars, rjars) = (jars.compiletime, jars.runtime) - cjars += [ctx.file._scalareflect, ctx.file._scalatest, ctx.file._scalaxml] - rjars += [ctx.outputs.jar, ctx.file._scalalib, ctx.file._scalareflect, ctx.file._scalatest, ctx.file._scalaxml] - rjars += _collect_jars(ctx.attr.runtime_deps).runtime - _write_test_launcher(ctx, rjars) - return _scala_binary_common(ctx, cjars, rjars) + deps = ctx.attr.deps + deps += [ctx.attr._scalatest_reporter] + jars = _collect_jars(deps) + (cjars, rjars) = (jars.compiletime, jars.runtime) + cjars += [ctx.file._scalareflect, ctx.file._scalatest, ctx.file._scalaxml] + rjars += [ + ctx.outputs.jar, + ctx.file._scalalib, + ctx.file._scalareflect, + ctx.file._scalatest, + ctx.file._scalaxml + ] + rjars += _collect_jars(ctx.attr.runtime_deps).runtime + _write_test_launcher(ctx, rjars) + return _scala_binary_common(ctx, cjars, rjars) _implicit_deps = { "_ijar": attr.label(executable=True, default=Label("@bazel_tools//tools/jdk:ijar"), single_file=True, allow_files=True), "_scala": attr.label(executable=True, default=Label("@scala//:bin/scala"), single_file=True, allow_files=True), - "_scalac": attr.label(executable=True, default=Label("@scala//:bin/scalac"), single_file=True, allow_files=True), + "_scalac": attr.label(executable=True, default=Label("//src/java/io/bazel/rulesscala/scalac"), allow_files=True), "_scalalib": attr.label(default=Label("@scala//:lib/scala-library.jar"), single_file=True, allow_files=True), + "_scalareflect": attr.label(default=Label("@scala//:lib/scala-reflect.jar"), single_file=True, allow_files=True), + "_scalacompiler": attr.label(default=Label("@scala//:lib/scala-compiler.jar"), single_file=True, allow_files=True), "_scalaxml": attr.label(default=Label("@scala//:lib/scala-xml_2.11-1.0.4.jar"), single_file=True, allow_files=True), "_scalasdk": attr.label(default=Label("@scala//:sdk"), allow_files=True), "_scalareflect": attr.label(default=Label("@scala//:lib/scala-reflect.jar"), single_file=True, allow_files=True), "_java": attr.label(executable=True, default=Label("@bazel_tools//tools/jdk:java"), single_file=True, allow_files=True), "_javac": attr.label(executable=True, default=Label("@bazel_tools//tools/jdk:javac"), single_file=True, allow_files=True), - "_jar": attr.label(executable=True, default=Label("//src/java/io/bazel/rulesscala/jar:jar_deploy.jar"), allow_files=True), - "_jar_bin": attr.label(executable=True, default=Label("//src/java/io/bazel/rulesscala/jar")), + "_jar": attr.label(executable=True, default=Label("//src/java/io/bazel/rulesscala/jar:binary_deploy.jar"), allow_files=True), + "_jar_bin": attr.label(executable=True, default=Label("//src/java/io/bazel/rulesscala/jar:binary")), "_jdk": attr.label(default=Label("//tools/defaults:jdk"), allow_files=True), } diff --git a/scala/support/BUILD b/scala/support/BUILD index 3f29921ee..72c1a5939 100644 --- a/scala/support/BUILD +++ b/scala/support/BUILD @@ -1,7 +1,13 @@ load("//scala:scala.bzl", "scala_library") + +java_import( + name = "scala_xml", + jars = ["@scala//:lib/scala-xml_2.11-1.0.4.jar"] +) + scala_library(name = "test_reporter", srcs = ["JUnitXmlReporter.scala"], - deps = ["@scalatest//file"], + deps = ["@scalatest//file", ":scala_xml"], visibility = ["//visibility:public"], ) diff --git a/src/java/io/bazel/rulesscala/jar/BUILD b/src/java/io/bazel/rulesscala/jar/BUILD index 009207740..f8e6c90ed 100644 --- a/src/java/io/bazel/rulesscala/jar/BUILD +++ b/src/java/io/bazel/rulesscala/jar/BUILD @@ -1,5 +1,13 @@ -java_binary(name = "jar", +java_library(name = "jar", srcs = ["JarCreator.java", "JarHelper.java"], + visibility = ["//visibility:public"], +) + + +java_binary(name = "binary", + runtime_deps = [ + ":jar", + ], main_class = "io.bazel.rulesscala.jar.JarCreator", visibility = ["//visibility:public"], ) diff --git a/src/java/io/bazel/rulesscala/jar/JarCreator.java b/src/java/io/bazel/rulesscala/jar/JarCreator.java index fd2281dce..c98c3f6ab 100644 --- a/src/java/io/bazel/rulesscala/jar/JarCreator.java +++ b/src/java/io/bazel/rulesscala/jar/JarCreator.java @@ -173,10 +173,7 @@ public void execute() throws IOException { } } - /** - * A simple way to create Jar file using the JarCreator class. - */ - public static void main(String[] args) { + public static void buildJar(String[] args) throws IOException { if (args.length < 1) { System.err.println("usage: CreateJar [-m manifest] output [root directories]"); System.exit(1); @@ -203,15 +200,18 @@ public static void main(String[] args) { } createJar.setCompression(true); createJar.setNormalize(true); - long start = System.currentTimeMillis(); + createJar.execute(); + } + + /** + * A simple way to create Jar file using the JarCreator class. + */ + public static void main(String[] args) { try { - createJar.execute(); + buildJar(args); } catch (Throwable e) { e.printStackTrace(); System.exit(1); } - long stop = System.currentTimeMillis(); - //System.err.println((stop - start) + "ms."); - //System.err.println(output); } } diff --git a/src/java/io/bazel/rulesscala/scalac/BUILD b/src/java/io/bazel/rulesscala/scalac/BUILD new file mode 100644 index 000000000..04b15ffbc --- /dev/null +++ b/src/java/io/bazel/rulesscala/scalac/BUILD @@ -0,0 +1,15 @@ +java_binary(name = "scalac", + main_class = "io.bazel.rulesscala.scalac.ScalaCInvoker", + srcs = ["ScalaCInvoker.java", "CompileOptions.java"], + + deps = [ + "@scala//:lib/scala-library.jar", + "@scala//:lib/scala-reflect.jar", + "@scala//:lib/scala-compiler.jar", + "@scala//:lib/scala-xml_2.11-1.0.4.jar", + "//src/java/io/bazel/rulesscala/jar", + "@io_bazel//src/main/protobuf:worker_protocol_java_proto", + "@io_bazel//third_party:guava", + ], + visibility = ["//visibility:public"], +) diff --git a/src/java/io/bazel/rulesscala/scalac/CompileOptions.java b/src/java/io/bazel/rulesscala/scalac/CompileOptions.java new file mode 100644 index 000000000..7fa1ea0b5 --- /dev/null +++ b/src/java/io/bazel/rulesscala/scalac/CompileOptions.java @@ -0,0 +1,139 @@ +package io.bazel.rulesscala.scalac; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class CompileOptions { + final public String outputName; + final public String manifestPath; + final public String[] scalaOpts; + final public String[] pluginArgs; + final public String classpath; + final public String[] files; + final public String[] sourceJars; + final public boolean iJarEnabled; + final public String ijarOutput; + final public String ijarCmdPath; + final public String[] javaFiles; + final public String javacPath; + final public String javacOpts; + final public String jvmFlags; + final public Map resourceFiles; + + public CompileOptions(List args) { + Map argMap = buildArgMap(args); + + outputName = getOrError(argMap, "JarOutput", "Missing required arg JarOutput"); + manifestPath = getOrError(argMap, "Manifest", "Missing required arg Manifest"); + + scalaOpts = getCommaList(argMap, "ScalacOpts"); + pluginArgs = buildPluginArgs(getOrEmpty(argMap, "Plugins")); + classpath = getOrError(argMap, "Classpath", "Must supply the classpath arg"); + files = getCommaList(argMap, "Files"); + + javaFiles = getCommaList(argMap, "JavaFiles"); + javacPath = getOrEmpty(argMap, "JavacPath"); + javacOpts = getOrEmpty(argMap, "JavacOpts"); + jvmFlags = getOrEmpty(argMap, "JvmFlags"); + + sourceJars = getCommaList(argMap, "SourceJars"); + iJarEnabled = booleanGetOrFalse(argMap, "EnableIjar"); + if(iJarEnabled) { + ijarOutput = getOrError(argMap, "IjarOutput", "Missing required arg ijarOutput when ijar enabled"); + ijarCmdPath = getOrError(argMap, "IjarCmdPath", "Missing required arg ijarCmdPath when ijar enabled"); + } + else { + ijarOutput = null; + ijarCmdPath = null; + } + resourceFiles = getResources(argMap); + } + + private static Map getResources(Map args) { + String[] keys = getCommaList(args, "ResourceSrcs"); + String[] vals = getCommaList(args, "ResourceDests"); + if (keys.length != vals.length) + throw new RuntimeException(String.format("mismatch in resources: keys: %s vals: %s", + getOrEmpty(args, "ResourceSrcs"), getOrEmpty(args, "ResourceDests"))); + HashMap res = new HashMap(); + for(int idx = 0; idx < keys.length; idx++) { + res.put(keys[idx], vals[idx]); + } + return res; + } + + private static HashMap buildArgMap(List lines) { + HashMap hm = new HashMap(); + for(String line: lines) { + String[] lSplit = line.split(": "); + if(lSplit.length > 2) { + throw new RuntimeException("Bad arg, should have at most 1 space/2 spans. arg: " + line); + } + if(lSplit.length > 1) { + hm.put(lSplit[0], lSplit[1]); + } + } + return hm; + } + + private static String[] getCommaList(Map m, String k) { + if(m.containsKey(k)) { + String v = m.get(k); + if (v == "") { + return new String[]{}; + } + else { + return v.split(","); + } + } else { + return new String[]{}; + } + } + + private static String getOrEmpty(Map m, String k) { + if(m.containsKey(k)) { + return m.get(k); + } else { + return ""; + } + } + + private static String getOrError(Map m, String k, String errorMessage) { + if(m.containsKey(k)) { + return m.get(k); + } else { + throw new RuntimeException(errorMessage); + } + } + + private static boolean booleanGetOrFalse(Map m, String k) { + if(m.containsKey(k)) { + String v = m.get(k); + if(v.trim().equals("True") || v.trim().equals("true")) { + return true; + } + } + return false; + } + public static String[] buildPluginArgs(String packedPlugins) { + String[] pluginElements = packedPlugins.split(","); + int numPlugins = 0; + for(int i =0; i< pluginElements.length; i++){ + if(pluginElements[i].length() > 0) { + numPlugins += 1; + } + } + + String[] result = new String[numPlugins]; + int idx = 0; + for(int i =0; i< pluginElements.length; i++){ + if(pluginElements[i].length() > 0) { + result[idx] = "-Xplugin:" + pluginElements[i]; + idx += 1; + } + } + return result; + } +} diff --git a/src/java/io/bazel/rulesscala/scalac/ScalaCInvoker.java b/src/java/io/bazel/rulesscala/scalac/ScalaCInvoker.java new file mode 100644 index 000000000..0b9caeac9 --- /dev/null +++ b/src/java/io/bazel/rulesscala/scalac/ScalaCInvoker.java @@ -0,0 +1,329 @@ +// Copyright 2014 The Bazel Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package io.bazel.rulesscala.scalac; + +import com.google.common.collect.ImmutableSet; +import com.google.devtools.build.lib.worker.WorkerProtocol.WorkRequest; +import com.google.devtools.build.lib.worker.WorkerProtocol.WorkResponse; +import io.bazel.rulesscala.jar.JarCreator; +import java.io.ByteArrayOutputStream; +import java.io.File; +import java.io.FileInputStream; +import java.io.FileNotFoundException; +import java.io.FileOutputStream; +import java.io.InputStream; +import java.io.IOException; +import java.io.PrintStream; +import java.lang.reflect.Field; +import java.nio.file.attribute.BasicFileAttributes; +import java.nio.file.Files; +import java.nio.file.FileSystems; +import java.nio.file.FileVisitResult; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.nio.file.SimpleFileVisitor; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Enumeration; +import java.util.jar.JarEntry; +import java.util.jar.JarFile; +import java.util.jar.JarOutputStream; +import java.util.List; +import java.util.Map.Entry; +import java.util.Map; +import java.util.TreeMap; +import scala.Console$; +import scala.tools.nsc.*; +import scala.tools.nsc.reporters.ConsoleReporter; +import static java.nio.charset.StandardCharsets.UTF_8; + +/** + * This is our entry point to producing a scala target + * this can act as one of Bazel's persistant workers. + */ +public class ScalaCInvoker { + // Mostly lifted from bazel + private static void runPersistentWorker() throws IOException { + PrintStream originalStdOut = System.out; + PrintStream originalStdErr = System.err; + + while (true) { + try { + WorkRequest request = WorkRequest.parseDelimitedFrom(System.in); + if (request == null) { + break; + } + + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + int exitCode = 0; + + try (PrintStream ps = new PrintStream(baos)) { + System.setOut(ps); + System.setErr(ps); + Console$.MODULE$.setErrDirect(ps); + Console$.MODULE$.setOutDirect(ps); + try { + processRequest(request.getArgumentsList()); + } catch (Exception e) { + e.printStackTrace(); + exitCode = 1; + } + } finally { + System.setOut(originalStdOut); + System.setErr(originalStdErr); + } + + WorkResponse.newBuilder() + .setOutput(baos.toString()) + .setExitCode(exitCode) + .build() + .writeDelimitedTo(System.out); + System.out.flush(); + } finally { + System.gc(); + } + } + } + + static private String[] extractSourceJars(CompileOptions opts, Path tmpParent) throws IOException { + List sourceFiles = new ArrayList(); + + for(String jarPath : opts.sourceJars) { + if (jarPath.length() > 0){ + Path tmpPath = Files.createTempDirectory(tmpParent, "tmp"); + sourceFiles.addAll(extractJar(jarPath, tmpPath.toString())); + } + } + String[] files = appendToString(opts.files, sourceFiles); + if(files.length == 0) { + throw new RuntimeException("Must have input files from either source jars or local files."); + } + return files; + } + + private static List extractJar(String jarPath, + String outputFolder) throws IOException, FileNotFoundException { + + List outputPaths = new ArrayList(); + JarFile jar = new JarFile(jarPath); + Enumeration e = jar.entries(); + while (e.hasMoreElements()) { + JarEntry file = (JarEntry) e.nextElement(); + File f = new File(outputFolder + File.separator + file.getName()); + + if (file.isDirectory()) { // if its a directory, create it + f.mkdirs(); + continue; + } + + File parent = f.getParentFile(); + parent.mkdirs(); + outputPaths.add(f); + + InputStream is = jar.getInputStream(file); // get the input stream + FileOutputStream fos = new FileOutputStream(f); + while (is.available() > 0) { // write contents of 'is' to 'fos' + fos.write(is.read()); + } + fos.close(); + is.close(); + } + return outputPaths; + } + + static String[] appendToString(String[] init, List rest) { + String[] tmp = new String[init.length + rest.size()]; + System.arraycopy(init, 0, tmp, 0, init.length); + int baseIdx = init.length; + for(T t : rest) { + tmp[baseIdx] = t.toString(); + baseIdx += 1; + } + return tmp; + } + public static String[] merge(String[]... arrays) { + int totalLength = 0; + for(String[] arr:arrays){ + totalLength += arr.length; + } + + String[] result = new String[totalLength]; + int offset = 0; + for(String[] arr:arrays){ + System.arraycopy(arr, 0, result, offset, arr.length); + offset += arr.length; + } + return result; + } + + /** + * This is the reporter field for scalac, which we want to access + */ + private static Field reporterField; + static { + try { + reporterField = Driver.class.getDeclaredField("reporter"); //NoSuchFieldException + reporterField.setAccessible(true); + } + catch (Exception ex) { + throw new RuntimeException("nope", ex); + } + } + + private static void processRequest(List args) throws Exception { + Path tmpPath = null; + try { + if (args.size() == 1 && args.get(0).startsWith("@")) { + args = Files.readAllLines(Paths.get(args.get(0).substring(1)), UTF_8); + } + CompileOptions ops = new CompileOptions(args); + + Path outputPath = FileSystems.getDefault().getPath(ops.outputName); + tmpPath = Files.createTempDirectory(outputPath.getParent(), "tmp"); + String[] constParams = { + "-classpath", + ops.classpath, + "-d", + tmpPath.toString() + }; + + String[] compilerArgs = merge( + ops.scalaOpts, + ops.pluginArgs, + constParams, + extractSourceJars(ops, outputPath.getParent())); + + MainClass comp = new MainClass(); + long start = System.currentTimeMillis(); + comp.process(compilerArgs); + long stop = System.currentTimeMillis(); + System.err.println("Compiler runtime: " + (stop - start) + "ms."); + + ConsoleReporter reporter = (ConsoleReporter) reporterField.get(comp); + + if (reporter.hasErrors()) { + reporter.printSummary(); + reporter.flush(); + throw new RuntimeException("Build failed"); + } else { + /** + * See if there are java sources to compile + */ + if (ops.javaFiles.length > 0) { + compileJavaSources(ops, tmpPath); + } + /** + * Copy the resources + */ + copyResources(ops.resourceFiles, tmpPath); + /** + * Now build the output jar + */ + String[] jarCreatorArgs = { + "-m", + ops.manifestPath, + outputPath.toString(), + tmpPath.toString() + }; + JarCreator.buildJar(jarCreatorArgs); + + /** + * Now build the output ijar + */ + if(ops.iJarEnabled) { + Process iostat = new ProcessBuilder() + .command(ops.ijarCmdPath, ops.outputName, ops.ijarOutput) + .inheritIO() + .start(); + int exitCode = iostat.waitFor(); + if(exitCode != 0) { + throw new RuntimeException("ijar process failed!"); + } + } + } + } + finally { + removeTmp(tmpPath); + } + } + + private static void compileJavaSources(CompileOptions ops, Path tmpPath) throws IOException, InterruptedException { + StringBuilder cmd = new StringBuilder(); + cmd.append(ops.javacPath); + if (ops.jvmFlags != "") cmd.append(ops.jvmFlags); + if (ops.javacOpts != "") cmd.append(ops.javacOpts); + + StringBuilder files = new StringBuilder(); + int cnt = 0; + for(String javaFile : ops.javaFiles) { + if (cnt > 0) files.append(" "); + files.append(javaFile); + cnt += 1; + } + Process iostat = new ProcessBuilder() + .command(cmd.toString(), + "-classpath", ops.classpath + ":" + tmpPath.toString(), + "-d", tmpPath.toString(), + files.toString()) + .inheritIO() + .start(); + int exitCode = iostat.waitFor(); + if(exitCode != 0) { + throw new RuntimeException("javac process failed!"); + } + } + private static void removeTmp(Path tmp) throws IOException { + if (tmp != null) { + Files.walkFileTree(tmp, new SimpleFileVisitor() { + @Override + public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { + Files.delete(file); + return FileVisitResult.CONTINUE; + } + + @Override + public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException { + Files.delete(dir); + return FileVisitResult.CONTINUE; + } + }); + } + } + private static void copyResources(Map resources, Path dest) throws IOException { + for(Entry e : resources.entrySet()) { + Path source = Paths.get(e.getKey()); + String dstr = e.getValue(); + if (dstr.charAt(0) == '/') dstr = dstr.substring(1); + Path target = dest.resolve(dstr); + File tfile = target.getParent().toFile(); + tfile.mkdirs(); + Files.copy(source, target); + } + } + + public static void main(String[] args) { + try { + if (ImmutableSet.copyOf(args).contains("--persistent_worker")) { + runPersistentWorker(); + } + else { + processRequest(Arrays.asList(args)); + } + } + catch (Exception ex) { + throw new RuntimeException("nope", ex); + } + } +} diff --git a/src/scala/scripts/BUILD b/src/scala/scripts/BUILD index d064db9fd..66a47555f 100644 --- a/src/scala/scripts/BUILD +++ b/src/scala/scripts/BUILD @@ -1,9 +1,8 @@ -load("//scala:scala.bzl", "scala_binary") +load("//scala:scala.bzl", "scala_binary", "scala_library") -scala_binary( - name = "generator", +scala_library( + name = "generator_lib", srcs = ["TwitterScroogeGenerator.scala"], - main_class = "scripts.ScroogeGenerator", deps = [ "@scrooge_generator//jar", "@util_core//jar", @@ -13,7 +12,16 @@ scala_binary( visibility = ["//visibility:public"], ) +scala_binary( + name = "generator", + main_class = "scripts.ScroogeGenerator", + deps = [ + ":generator_lib", + ], + visibility = ["//visibility:public"], +) + java_import( name = "scala_parsers", jars = ["@scala//:lib/scala-parser-combinators_2.11-1.0.4.jar"], -) \ No newline at end of file +) diff --git a/src/scala/scripts/TwitterScroogeGenerator.scala b/src/scala/scripts/TwitterScroogeGenerator.scala index 060847550..68c83fdb5 100644 --- a/src/scala/scripts/TwitterScroogeGenerator.scala +++ b/src/scala/scripts/TwitterScroogeGenerator.scala @@ -168,4 +168,4 @@ object ScroogeGenerator { // Clean it out to be idempotent dirsToDelete.foreach { deleteDir(_) } } -} \ No newline at end of file +} diff --git a/test/BUILD b/test/BUILD index 2a1f33e06..0ef294289 100644 --- a/test/BUILD +++ b/test/BUILD @@ -150,7 +150,7 @@ scala_library( #Mix java scala scala_library( name = "MixJavaScalaLib", - srcs = glob(["src/main/scala/scala/test/mix_java_scala/*.scala"]) + + srcs = glob(["src/main/scala/scala/test/mix_java_scala/*.scala"]) + glob(["src/main/scala/scala/test/mix_java_scala/*.java"]), ) #needed to test java sources are compiled