diff --git a/.gitignore b/.gitignore index 3a209dda9..b0868fed8 100644 --- a/.gitignore +++ b/.gitignore @@ -6,3 +6,5 @@ hash2 .DS_store .bazel_cache .ijwb +.metals +unformatted-*.backup.scala diff --git a/.scalafmt.conf b/.scalafmt.conf new file mode 100644 index 000000000..dfb81c64b --- /dev/null +++ b/.scalafmt.conf @@ -0,0 +1,15 @@ +align.openParenCallSite = false +align.openParenDefnSite = false +continuationIndent.defnSite = 2 +danglingParentheses = true +docstrings = JavaDoc +importSelectors = singleLine +maxColumn = 120 +verticalMultiline.newlineBeforeImplicitKW = true +rewrite.redundantBraces.stringInterpolation = true +rewrite.rules = [ + RedundantParens, + PreferCurlyFors, + SortImports +] +unindentTopLevelOperators = false diff --git a/.travis.yml b/.travis.yml index 79d47e914..c164352dc 100644 --- a/.travis.yml +++ b/.travis.yml @@ -48,20 +48,12 @@ jobs: <<: *linux env: TEST_SCRIPT=test_lint # Test - - <<: *linux - env: TEST_SCRIPT=test_rules_scala BAZEL_VERSION=1.1.0 - <<: *linux env: TEST_SCRIPT=test_rules_scala BAZEL_VERSION=2.0.0 - - <<: *linux - env: TEST_SCRIPT=test_reproducibility BAZEL_VERSION=1.1.0 - <<: *linux env: TEST_SCRIPT=test_reproducibility BAZEL_VERSION=2.0.0 - - <<: *osx - env: TEST_SCRIPT=test_rules_scala BAZEL_VERSION=1.1.0 - <<: *osx env: TEST_SCRIPT=test_rules_scala BAZEL_VERSION=2.0.0 - - <<: *osx - env: TEST_SCRIPT=test_reproducibility BAZEL_VERSION=1.1.0 - <<: *osx env: TEST_SCRIPT=test_reproducibility BAZEL_VERSION=2.0.0 diff --git a/BUILD b/BUILD new file mode 100644 index 000000000..e69de29bb diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index b82797489..35a68e900 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -46,3 +46,10 @@ you can use the `lint.sh` script: Note that Skylint failures are ignored and that the fix command will modify your files in place. + +### Additional Tests to Run +Some changes reqiring running additional tests which are not currently +part of the CI pipeline. + +When editing code in `./third_party`, please run `./dangerous_test_thirdparty_version.sh` +but read the comments at the beginning of the file first. diff --git a/README.md b/README.md index 8e0a0f0d4..65fd3b22d 100644 --- a/README.md +++ b/README.md @@ -40,13 +40,14 @@ http_archive( sha256 = "2ef429f5d7ce7111263289644d233707dba35e39696377ebab8b0bc701f7818e", ) -rules_scala_version="69d3c5b5d9b51537231746e93b4383384c9ebcf4" # update this as needed +rules_scala_version="a2f5852902f5b9f0302c727eead52ca2c7b6c3e2" # update this as needed http_archive( name = "io_bazel_rules_scala", strip_prefix = "rules_scala-%s" % rules_scala_version, type = "zip", url = "https://github.com/bazelbuild/rules_scala/archive/%s.zip" % rules_scala_version, + sha256 = "8c48283aeb70e7165af48191b0e39b7434b0368718709d1bced5c3781787d8e7", ) load("@io_bazel_rules_scala//scala:toolchains.bzl", "scala_register_toolchains") @@ -55,12 +56,12 @@ scala_register_toolchains() load("@io_bazel_rules_scala//scala:scala.bzl", "scala_repositories") scala_repositories() -protobuf_version="09745575a923640154bcf307fba8aedff47f240a" -protobuf_version_sha256="416212e14481cff8fd4849b1c1c1200a7f34808a54377e22d7447efdf54ad758" +protobuf_version="3.11.3" +protobuf_version_sha256="cf754718b0aa945b00550ed7962ddc167167bd922b842199eeb6505e6f344852" http_archive( name = "com_google_protobuf", - url = "https://github.com/protocolbuffers/protobuf/archive/%s.tar.gz" % protobuf_version, + url = "https://github.com/protocolbuffers/protobuf/archive/v%s.tar.gz" % protobuf_version, strip_prefix = "protobuf-%s" % protobuf_version, sha256 = protobuf_version_sha256, ) @@ -83,10 +84,49 @@ build --worker_sandboxing ``` to your command line, or to enable by default for building/testing add it to your .bazelrc. +## Coverage support + +rules_scala supports coverage, but it's disabled by default. You need to enable it with an extra toolchain: + +``` +bazel coverage --extra_toolchains="@io_bazel_rules_scala//scala:code_coverage_toolchain" //... +``` + +It will produce several .dat files with results for your targets. + +You can also add more options to receive a combined coverage report: + +``` +bazel coverage \ + --extra_toolchains="@io_bazel_rules_scala//scala:code_coverage_toolchain" \ + --combined_report=lcov \ + --coverage_report_generator="@bazel_tools//tools/test/CoverageOutputGenerator/java/com/google/devtools/coverageoutputgenerator:Main" \ + //... +``` + +This should produce a single `bazel-out/_coverage/_coverage_report.dat` from all coverage files that are generated. + +You can extract information from your coverage reports with `lcov`: + +``` +# For a summary: +lcov --summary your-coverage-report.dat +# For details: +lcov --list your-coverage-report.dat +``` + +If you prefer an HTML report, then you can use `genhtml` provided also by the `lcov` package. + +Coverage support has been only tested with [ScalaTest](http://www.scalatest.org/). + +Please check [coverage.md](docs/coverage.md) for more details on coverage support. + ## Selecting Scala version -Rules scala supports all minor versions of Scala 2.11/2.12. By default `Scala 2.11.12` is used and to use another -version you need to +Rules scala supports the last two released minor versions for each of Scala 2.11 and 2.12. +Previous minor versions may work but are supported only on a best effort basis. + +By default `Scala 2.11.12` is used and to use another version you need to specify it when calling `scala_repositories`. `scala_repositories` takes a tuple `(scala_version, scala_version_jar_shas)` as a parameter where `scala_version` is the scala version and `scala_version_jar_shas` is a `dict` with `sha256` hashes for the maven artifacts `scala_compiler`, `scala_library`, and `scala_reflect`: @@ -112,7 +152,7 @@ for an example workspace using another scala version. | bazel | rules_scala gitsha | |--------|--------------------| | 2.0.0 | HEAD | -| 1.1.0 | HEAD | +| 1.1.0 | d681a952da74fc61a49fc3167b03548f42fc5dde | | 0.28.1 | bd0c388125e12f4f173648fc4474f73160a5c628 | | 0.23.x | ca655e5a330cbf1d66ce1d9baa63522752ec6011 | | 0.22.x | f3113fb6e9e35cb8f441d2305542026d98afc0a2 | @@ -121,6 +161,12 @@ for an example workspace using another scala version. | 0.14.x | 3b9ab9be31ac217d3337c709cb6bfeb89c8dcbb1 | | 0.13.x | 3c987b6ae8a453886759b132f1572c0efca2eca2 | +## Breaking changes + +If you're upgrading to a version containing one of these commits, you may encounter a breaking change where there was previously undefined behavior. + +- [929b318](https://github.com/bazelbuild/rules_scala/commit/929b3180cc099ba76859f5e88710d2ac087fbfa3) on 2020-01-30: Fixed a bug in the JMH benchmark build that was allowing build failures to creep through. Previously you were able to build a benchmark suite with JMH build errors. Running the benchmark suite would only run the successfully-built benchmarks. + ## Usage with [bazel-deps](https://github.com/johnynek/bazel-deps) Bazel-deps allows you to generate bazel dependencies transitively for maven artifacts. Generally we don't want bazel-deps to fetch @@ -150,31 +196,75 @@ replacements: "@io_bazel_rules_scala_scala_xml//:io_bazel_rules_scala_scala_xml" ``` -## [Experimental] Using strict-deps -Bazel pushes towards explicit and minimal dependencies to keep BUILD file hygiene and allow for targets to refactor their dependencies without fear of downstream breaking. -Currently rules_scala does this at the cost of having cryptic `scalac` errors when one mistakenly depends on a transitive dependency or, as more often the case for some, a transitive dependency is needed to [please scalac](https://github.com/scalacenter/advisoryboard/blob/master/proposals/009-improve-direct-dependency-experience.md) itself. -To learn more about the motivation of strict-deps itself you can visit this Bazel blog [post](https://blog.bazel.build/2017/06/28/sjd-unused_deps.html) on the subject. +## [Experimental] Dependency options + +There are a number of dependency options which can be set in the scala toolchain. These include `dependency_mode`, `strict_deps_mode`, `unused_dependency_checker_mode`, and `dependency_tracking_method`. + +### [Experimental] Recommended options + +We recommend one of the following sets of options -To use it just add `--strict_java_deps=WARN|ERROR` to your `bazel` invocation. -In both cases of `WARN` or `ERROR` you will get the following text in the event of a violation: +**Option A** +Accept the defaults, which might work well enough for you. The defaults are ``` -... -Target '//some_package:transitive_dependency' is used but isn't explicitly declared, please add it to the deps. -You can use the following buildozer command: -buildozer 'add deps //some_package:transitive_dependency' //some_other_package:transitive_dependency_user + dependency_mode = "direct", + strict_deps_mode = "off", + unused_dependency_checker_mode = "off", + dependency_tracking_method = "high-level", +``` +but you do not need to include this in the toolchain as they are the defaults. + +**Option B** +``` + dependency_mode = "plus-one", + strict_deps_mode = "error", + unused_dependency_checker_mode = "error", + dependency_tracking_method = "ast", ``` -Note that if you have `buildozer` installed you can just run the last line and have it automatically apply the fix for you. -**Caveats:** +Should the first option result in too much effort in handling build files and the like due to confusing dependencies and you becoming confused as to why some specific dependency is needed when the code being compiled never references it, consider this set of options. It will include both dependencies and dependencies of dependencies, which in practice is enough to stop almost all strange missing dependency errors at the cost of somewhat more incremental compile cost in certain cases. + +With these settings, we also will error on dependencies which are unneeded, and dependencies which should be included in `deps` due to be directly referenced in the code, but are not. + +The dependency tracking method `ast` is experimental but so far proves to be better than the default for computing the direct dependencies for `plus-one` mode code. In the future we hope to make this the default for `plus-one` mode and remove the option altogether. + +### [Experimental] Dependency mode + +There are three dependency modes. The reason for the multiple modes is that often `scalac` depends on jars which seem unnecessary at first glance. Hence, in order to reduce the need to please `scalac`, we provide the following options. +- `dependency_mode = "direct"` - only include direct dependencies during compiliation; that is, those in the `deps` attribute +- `dependency_mode = "plus-one"` - only include `deps` and `deps` of `deps` during compiliation. +- `dependency_mode = "transitive"` - all transitive dependencies are included during compiliation. That is, `deps`, `deps` of `deps`, `deps` of `deps` of `deps`, and so on. + +Note when a dependency is included, that means its jars are included on the classpath, along with the jars of any targets that it exports. + +When using `direct` mode, there can be cryptic `scalac` errors when one mistakenly depends on a transitive dependency or, as more often the case for some, a transitive dependency is needed to [please scalac](https://github.com/scalacenter/advisoryboard/blob/master/proposals/009-improve-direct-dependency-experience.md) itself. + +As one goes down the list, more dependencies are included which helps reduce confusing requirements to add `deps`, at the cost of increased incremental builds due to a greater number of dependencies. In practice, using `plus-one` deps results in almost no confusing `deps` entries required while still being relatively small in terms of the number of total dependencies included. + +**Caveats for `plus_one` and `transitive`:** -Note: Currently strict-deps is protected by a feature toggle but we're strongly considering making it the default behavior as `java_*` rules do. +Note: the last two issues are bugs which will be addressed by [https://github.com/bazelbuild/rules_scala/issues/839]. + +### [Experimental] Strict deps mode +We have a strict dependency checker which requires that any type referenced in the sources of a scala target should be included in that rule's deps. To learn about the motivation for this you can visit this Bazel blog [post](https://blog.bazel.build/2017/06/28/sjd-unused_deps.html) on the subject. -## [Experimental] Unused dependency checking +The option `strict_deps_mode` can be set to `off`, `warn`, or `error`. We highly recommend setting it to `error`. + +In both cases of `warn` or `error` you will get the following text in the event of a violation: +``` +... +Target '//some_package:transitive_dependency' is used but isn't explicitly declared, please add it to the deps. +You can use the following buildozer command: +buildozer 'add deps //some_package:transitive_dependency' //some_other_package:transitive_dependency_user +``` +Note that if you have `buildozer` installed you can just run the last line and have it automatically apply the fix for you. + +### [Experimental] Unused dependency checking To allow for better caching and faster builds we want to minimize the direct dependencies of our targets. Unused dependency checking makes sure that all targets specified as direct dependencies are actually used. If `unused_dependency_checker_mode` is set to either `error` or `warn` you will get the following message for any dependencies that are not used: @@ -184,12 +274,38 @@ You can use the following buildozer command: buildozer 'remove deps //some_package:unused_dep' //target:target ``` -Currently unused dependency checking and strict-deps can't be used simultaneously, if both are set only strict-deps will run. - Unused dependency checking can either be enabled globally for all targets using a scala toolchain or for individual targets using the -`unused_dependency_checker_mode` attribute. The feature is still experimental and there can thus be cases where it works incorrectly, -in these cases you can enable unused dependency checking globally through a toolchain and override individual misbehaving targets -using the attribute. +`unused_dependency_checker_mode` attribute. + +The feature is still experimental and there can thus be cases where it works incorrectly, in these cases you can enable unused dependency checking globally through a toolchain and disable reports of individual misbehaving targets with `unused_dependency_checker_ignored_targets` which is a list of labels. + +### [Experimental] Dependency tracking method + +The strict dependency tracker and unused dependency tracker need to track the used dependencies of a scala compilation unit. This toggle allows one to pick which method of tracking to use. + +- `dependency_tracking_method = "high-level"` - This is the existing tracking method which has false positives and negatives but generally works reasonably well for `direct` dependency mode. +- `dependency_tracking_method = "ast"` - This is a new tracking method which is being developed for `plus-one` and `transitive` dependency modes. It is still being developed and may have issues which need fixing. If you discover an issue, please submit a small repro of the problem. + +Note we intend to eventually remove this flag and use `high-level` as the method for `direct` dependency mode, and `ast` as the method for `plus-one` and `transitive` dependency modes. + +In the meantime, if you are using `plus-one` or `transitive` dependency modes, you can use `ast` dependency tracking mode and see how well it works for you. + +### [Experimental] Turning on strict_deps_mode/unused_dependency_checker_mode + +It can be daunting to turn on strict deps checking or unused dependency mode checking on a large codebase. However, it need not be so bad if this is done in phases + +1. Have a default scala toolchain `A` with the option of interest set to `off` (the starting state) +2. Create a second scala toolchain `B` with the option of interest set to `warn` or `error`. Those who are working on enabling the flag can run with this toolchain as a command line argument to help identify issues and fix them. +3. Once all issues are fixed, change `A` to have the option of interest set to `error` and delete `B`. + +We recommend turning on strict_deps_mode first, as rule `A` might have an entry `B` in its `deps`, and `B` in turn depends on `C`. Meanwhile, the code of `A` only uses `C` but not `B`. Hence, the unused dependency checker, if on, will request that `B` be removed from `A`'s deps. But this will lead to a compile error as `A` can no longer depend on `C`. However, if strict dependency checking was on, then `A`'s deps is guaranteed to have `C` in it. + +### [Experimental] Migrating from deprecated configurations + +There are a few deprecated configuration methods which we will be removing in the near future. + +- `plus_one_deps_mode = "on"` on the scala toolchain. Instead, set `dependency_mode = "plus-one"` on the scala toolchain. `plus_one_deps_mode` will be removed in the future. +- The command line argument `--strict_java_deps=WARN/ERROR`. Instead, set `dependency_mode = "transitive"` on the scala toolchain, and if only a warning is desired set `strict_deps_mode = "warn"` on the toolchain. In the future, `strict_java_deps` will no longer affect how scala files are compiled. Note that `strict_java_deps` will still control java compilation. ## Advanced configurable rules To make the ruleset more flexible and configurable, we introduce a phase architecture. By using a phase architecture, where rule implementations are defined as a list of phases that are executed sequentially, functionality can easily be added (or modified) by adding (or swapping) phases. @@ -201,6 +317,9 @@ Phases provide 3 major benefits: See [Customizable Phase](docs/customizable_phase.md) for more info. +### Phase extensions + - [Scala Format](docs/phase_scalafmt.md) + ## Building from source Test & Build: ``` diff --git a/WORKSPACE b/WORKSPACE index 2dd6084e8..15fd16df2 100644 --- a/WORKSPACE +++ b/WORKSPACE @@ -17,8 +17,9 @@ buildifier_dependencies() load("//scala:scala.bzl", "scala_repositories") -scala_repositories() +scala_repositories(fetch_sources = True) +load("//scala:scala_cross_version.bzl", "default_maven_server_urls") load("//scala:scala_maven_import_external.bzl", "scala_maven_import_external") load("//twitter_scrooge:twitter_scrooge.bzl", "scrooge_scala_library", "twitter_scrooge") @@ -40,12 +41,20 @@ load("//specs2:specs2_junit.bzl", "specs2_junit_repositories") specs2_junit_repositories() +load("//scala/scalafmt:scalafmt_repositories.bzl", "scalafmt_default_config", "scalafmt_repositories") + +scalafmt_default_config() + +scalafmt_repositories() + load("//scala:scala_cross_version.bzl", "default_scala_major_version", "scala_mvn_artifact") -MAVEN_SERVER_URLS = [ - "https://jcenter.bintray.com", - "https://repo1.maven.org/maven2", -] +MAVEN_SERVER_URLS = default_maven_server_urls() + +# needed for the cross repo proto test +load("//test/proto_cross_repo_boundary:repo.bzl", "proto_cross_repo_boundary_repository") + +proto_cross_repo_boundary_repository() # test adding a scala jar: jvm_maven_import_external( @@ -100,9 +109,7 @@ scala_maven_import_external( artifact_sha256 = "4eb582bc99d96c8df92fc6f0f608fd123d278223982555ba16219bf8be9f75a9", fetch_sources = True, licenses = ["notice"], - server_urls = [ - "https://repo.maven.apache.org/maven2/", - ], + server_urls = MAVEN_SERVER_URLS, srcjar_sha256 = "5e586357a289f5fe896f7b48759e1c16d9fa419333156b496696887e613d7a19", ) @@ -126,6 +133,11 @@ filegroup( path = "third_party/test/new_local_repo", ) +local_repository( + name = "strip_resource_external_workspace", + path = "third_party/test/strip_resource_external_workspace", +) + load("@io_bazel_rules_scala//scala:toolchains.bzl", "scala_register_unused_deps_toolchains") scala_register_unused_deps_toolchains() @@ -140,10 +152,7 @@ scala_maven_import_external( artifact_sha256 = "972139718abc8a4893fa78cba8cf7b2c903f35c97aaf44fa3031b0669948b480", fetch_sources = True, licenses = ["notice"], # Apache 2.0 - server_urls = [ - "https://repo1.maven.org/maven2/", - "https://mirror.bazel.build/repo1.maven.org/maven2", - ], + server_urls = MAVEN_SERVER_URLS, srcjar_sha256 = "b186965c9af0a714632fe49b33378c9670f8f074797ab466f49a67e918e116ea", ) @@ -213,10 +222,7 @@ scala_maven_import_external( artifact = "org.springframework:spring-core:5.1.5.RELEASE", artifact_sha256 = "f771b605019eb9d2cf8f60c25c050233e39487ff54d74c93d687ea8de8b7285a", licenses = ["notice"], # Apache 2.0 - server_urls = [ - "https://repo1.maven.org/maven2/", - "https://mirror.bazel.build/repo1.maven.org/maven2", - ], + server_urls = MAVEN_SERVER_URLS, ) scala_maven_import_external( @@ -224,10 +230,7 @@ scala_maven_import_external( artifact = "org.springframework:spring-tx:5.1.5.RELEASE", artifact_sha256 = "666f72b73c7e6b34e5bb92a0d77a14cdeef491c00fcb07a1e89eb62b08500135", licenses = ["notice"], # Apache 2.0 - server_urls = [ - "https://repo1.maven.org/maven2/", - "https://mirror.bazel.build/repo1.maven.org/maven2", - ], + server_urls = MAVEN_SERVER_URLS, deps = [ "@org_springframework_spring_core", ], @@ -243,7 +246,5 @@ scala_maven_import_external( artifact_sha256 = "897460d4488b7dd6ac9198937d6417b36cc6ec8ab3693fdf2c532652f26c4373", fetch_sources = False, licenses = ["notice"], - server_urls = [ - "https://repo.maven.apache.org/maven2/", - ], + server_urls = MAVEN_SERVER_URLS, ) diff --git a/dangerous_test_thirdparty_version.sh b/dangerous_test_thirdparty_version.sh new file mode 100755 index 000000000..8309f5682 --- /dev/null +++ b/dangerous_test_thirdparty_version.sh @@ -0,0 +1,148 @@ +#!/usr/bin/env bash + +# This test is dangerous in that it modifies the root rules_scala +# WORKSPACE file. It attempts to restore the existing WORKSPACE file +# but there are risks that it may not be successful. + +# Hence when running this test one should be sure that they do not +# have changes in the WORKSPACE file which they cannot recover +# from if the file gets lost. + +# Note that due to performance constraints this is purposely not +# part of CI but when modifying the dependency_analyzer plugin, +# this should be run to ensure no regressions. + +set -e + +replace_workspace() { + sed -i '' \ + -e "s|scala_repositories(.*)|$1|" \ + $dir/WORKSPACE +} + +test_scala_version() { + SCALA_VERSION=$1 + + SCALA_VERSION_SHAS='' + SCALA_VERSION_SHAS+='"scala_compiler": "'$2'",' + SCALA_VERSION_SHAS+='"scala_library": "'$3'",' + SCALA_VERSION_SHAS+='"scala_reflect": "'$4'"' + + cp $dir/WORKSPACE $dir/WORKSPACE.bak + replace_workspace "scala_repositories((\"$SCALA_VERSION\", { $SCALA_VERSION_SHAS }))" + + bazel test //third_party/... + RESPONSE_CODE=$? + # Restore old behavior + rm $dir/WORKSPACE + mv $dir/WORKSPACE.bak $dir/WORKSPACE + exit $RESPONSE_CODE + +} + +dir=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd ) +test_dir=$dir/test/shell +# shellcheck source=./test_runner.sh +. "${test_dir}"/test_runner.sh +runner=$(get_test_runner "${1:-local}") + + +# Latest versions of each major version + +$runner test_scala_version "2.12.10" \ + "cedc3b9c39d215a9a3ffc0cc75a1d784b51e9edc7f13051a1b4ad5ae22cfbc0c" \ + "0a57044d10895f8d3dd66ad4286891f607169d948845ac51e17b4c1cf0ab569d" \ + "56b609e1bab9144fb51525bfa01ccd72028154fc40a58685a1e9adcbe7835730" + + +$runner test_scala_version "2.11.12" \ + "3e892546b72ab547cb77de4d840bcfd05c853e73390fed7370a8f19acb0735a0" \ + "0b3d6fd42958ee98715ba2ec5fe221f4ca1e694d7c981b0ae0cd68e97baf6dce" \ + "6ba385b450a6311a15c918cf8688b9af9327c6104f0ecbd35933cfcd3095fe04" + + +# Earliest functioning versions of each major version + +$runner test_scala_version "2.12.0" \ + "c767f79f9c529cedba2805db910164d5846f1f6d02424c6d7aadfc42ae5dadf1" \ + "0e72ec4ea955d0bad7f1a494e8df95163f1631df0ce8ec4f9f278fe4d5fd1824" \ + "f56553934378e6d3e8bf1d759a51f8b2fc4c99370774f0aaedaab8619517ccbe" + + +# Note: 2.11.0-2.11.8 do not work due to an error unrelated to the plugin +# Error is that argument -Ypartial-unification is invalid +# Hence we start with 2.11.9. +$runner test_scala_version "2.11.9" \ + "fa01b414674cb38adc90ccf7a2042e82198dbb19dc41faccf0b5941ec08b1998" \ + "e435d5ef31cc12dbf66719b7d5ab677ad739c63c3e451757b9688dcbeda0a984" \ + "d932f809012d2cf832226b52a8bd82ed35b0257b1471c98968c0cd9ddf5327ab" + +# Intermediate versions of 2.12.x + +$runner test_scala_version "2.12.1" \ + "fdd7679ce8a3fb4e30fbb9eaf9451f42c042f5ac3b5497f0fd01c379a3df9b3f" \ + "9dab78f3f205a038f48183b2391f8a593235f794d8129a479e06af3e6bc50ef8" \ + "d8a2b9d6d78c7457a40e394dc0c4fa6d6244acf0d156bbbcb311a9d497b85eec" + + +$runner test_scala_version "2.12.2" \ + "b3d41a2887c114821878d45c1cb43cf7576c6854c7a303ef3d7be866dc44de34" \ + "dd668b609002b3578f2db83a1a684d706155bba2fc801cd411359fdd48218d00" \ + "98f9876d14b39fc7ec863c6b1b73c41a7653f886901b3ead0c4ca9215a688408" + + +$runner test_scala_version "2.12.3" \ + "99d28c90ef1b8569da76a7e04415184cc54b57221ee259ffc55b2fcd64fb2107" \ + "a8dd181a996dcc53a8c0bbb554bef7a1a9017ca09a377603167cf15444a85404" \ + "93db412846912a1c212dd83c36dd51aa0adb9f39bfa6c4c3d65682afc94366c4" + + +$runner test_scala_version "2.12.4" \ + "8b681302aac584f7234547eed04d2beeeb4a4f00032220e29d40943be6906a01" \ + "17824fcee4d3f46cfaa4da84ebad4f58496426c2b9bc9e341f812ab23a667d5d" \ + "ea70fe0e550e24d23fc52a18963b2be9c3b24283f4cb18b98327eb72746567cc" + + +$runner test_scala_version "2.12.5" \ + "a113394b6f857e69ef5d95b77114ec3f12cb0e14d9ede32de0bbc9c36d677455" \ + "c2636320d6479c82f2da6b8d76a820de9345a61327e648d4298a0048154fb87c" \ + "27036d7574afff72294f0e63d54aa13acd8b16b525d51475691118b835e626e7" + + +$runner test_scala_version "2.12.6" \ + "3023b07cc02f2b0217b2c04f8e636b396130b3a8544a8dfad498a19c3e57a863" \ + "f81d7144f0ce1b8123335b72ba39003c4be2870767aca15dd0888ba3dab65e98" \ + "ffa70d522fc9f9deec14358aa674e6dd75c9dfa39d4668ef15bb52f002ce99fa" + + +$runner test_scala_version "2.12.7" \ + "6e80ef4493127214d31631287a6789170bf6c9a771d6094acd8dc785e8970270" \ + "8f3dc6091db688464ad8b1ee6c7343d7aa5940d474ee8b90406c71e45dd74fc0" \ + "7427d7ee5771e8c36c1db5a09368fa3078f6eceb77d7c797a322a088c5dddb76" + + +$runner test_scala_version "2.12.8" \ + "f34e9119f45abd41e85b9e121ba19dd9288b3b4af7f7047e86dc70236708d170" \ + "321fb55685635c931eba4bc0d7668349da3f2c09aee2de93a70566066ff25c28" \ + "4d6405395c4599ce04cea08ba082339e3e42135de9aae2923c9f5367e957315a" + + +$runner test_scala_version "2.12.9" \ + "5fd556459fd189b820db7d7c0a644ea5f7e8e032c421f2ad47038e72247fbf65" \ + "364ee6ffd45f4fb8f9de40d1473d266ed5c199a44c1d4e2bdc895b1fbe35c75f" \ + "4285ba64044d1a62b19304fe3ddd0088da240649c9fe2a6571c989feda1d0829" + + +# Intermediate versions of 2.11.x + + +$runner test_scala_version "2.11.10" \ + "b70b748857213efe6f3a47d66acfa014c1bf51af3178b3a946eaae09f709fecc" \ + "14a520328ea4ca7f423b30154a54d3df0a531a9c51f5e98eda272c9821bc5331" \ + "fd896db4806875f538843ea24411e483ee4d0734710a108d0308ef108e83cf80" + + +$runner test_scala_version "2.11.11" \ + "5f929ed57c515ef9545497374eec88ffd129b8f04079dedb7e32107104325cdd" \ + "f2ba1550a39304e5d06caaddfa226cdf0a4cbccee189828fa8c1ddf1110c4872" \ + "73aef1a6ccabd3a3c15cc153ec846e12d0f045587a2a1d88cc1b49293f47cb20" \ No newline at end of file diff --git a/docs/coverage.md b/docs/coverage.md new file mode 100644 index 000000000..16ea4ef15 --- /dev/null +++ b/docs/coverage.md @@ -0,0 +1,61 @@ +## Coverage support + +### Running tests with coverage + +rules_scala supports coverage, but it's disabled by default. You need to enable it with an extra toolchain: + +``` +bazel coverage --extra_toolchains="@io_bazel_rules_scala//scala:code_coverage_toolchain" //... +``` + +It will produce several .dat files with results for your targets. + +You can also add more options to receive a combined coverage report: + +``` +bazel coverage \ + --extra_toolchains="@io_bazel_rules_scala//scala:code_coverage_toolchain" \ + --combined_report=lcov \ + --coverage_report_generator="@bazel_tools//tools/test/CoverageOutputGenerator/java/com/google/devtools/coverageoutputgenerator:Main" \ + //... +``` + +This should produce a single `bazel-out/_coverage/_coverage_report.dat` from all coverage files that are generated. + +### Processing coverage reports + +You can install `lcov` package (that supports the format Bazel uses for coverage reports) to have access to additional tools: + +``` +# Use your system package manager. E.g. on Ubuntu: +sudo apt install lcov +``` + +Having `lcov` package installed you can extract information from your coverage reports: + +``` +# For a summary: +lcov --summary your-coverage-report.dat +# For details: +lcov --list your-coverage-report.dat +``` + +If you prefer an HTML report, then you can use `genhtml` provided also by the `lcov` package. + +An example with a bit of ceremony: + +```bash +# Output html reports to a new directory. +destdir="my-coverage-reports" +mkdir -p ${destdir} + +# Generate HTML report from the results. +genhtml -o ${destdir} --ignore-errors source bazel-out/_coverage/_coverage_report.dat + +echo "coverage report at file://${destdir}/index.html" + +``` + +### Support for testing frameworks + +Coverage support has been only tested with [ScalaTest](http://www.scalatest.org/). \ No newline at end of file diff --git a/docs/customizable_phase.md b/docs/customizable_phase.md index 5d53ac359..6e625dd97 100644 --- a/docs/customizable_phase.md +++ b/docs/customizable_phase.md @@ -128,22 +128,25 @@ Currently phase architecture is used by 7 rules: - scala_junit_test - scala_repl -In each of the rule implementation, it calls `run_phases` and returns the information from `phase_final`, which groups the final returns of the rule. To prevent consumers from accidently removing `phase_final` from the list, we make it a non-customizable phase. +If you need to expose providers to downstream targets you need to return a dict of providers (provider-name to provider instance) from your phase under the `external_providers` attribute. + +If you need to override a provider returned by a previous phase you can adjust your phase to be after it and return the same key from your phase and it will override it. +Note you probably have a good reason to override since you're meddling with the public return value of a different phase. + +In each of the rule implementations, it calls `run_phases` and returns the accumulated values of the `external_providers` dict declared by the phases. To make a new phase, you have to define a new `phase_.bzl` in `scala/private/phases/`. Function definition should have 2 arguments, `ctx` and `p`. You may expose the information for later phases by returning a `struct`. In some phases, there are multiple phase functions since different rules may take slightly different input arguemnts. You may want to re-expose the phase definition in `scala/private/phases/phases.bzl`, so it's more convenient to access in rule files. In the rule implementations, put your new phase in `builtin_customizable_phases` list. The phases are executed sequentially, the order matters if the new phase depends on previous phases. -If you are making new return fields of the rule, remember to modify `phase_final`. - ### Phase naming convention Files in `scala/private/phases/` - `phase_.bzl`: phase definition file Function names in `phase_.bzl` - - `phase__`: function with custom inputs of specific rule - - `phase_common_`: function without custom inputs - - `_phase_default_`: private function that takes `_args` for custom inputs + - `phase__`: function with custom inputs of specific rule + - `phase__common`: function without custom inputs + - `_phase__default`: private function that takes `_args` for custom inputs - `_phase_`: private function with the actual logic See `phase_compile.bzl` for example. diff --git a/docs/phase_scalafmt.md b/docs/phase_scalafmt.md new file mode 100644 index 000000000..28fdb92f2 --- /dev/null +++ b/docs/phase_scalafmt.md @@ -0,0 +1,43 @@ +# Phase Scalafmt + +## Contents +* [Overview](#overview) +* [How to set up](#how-to-set-up) + +## Overview +A phase extension `phase_scalafmt` can format Scala source code via [Scalafmt](https://scalameta.org/scalafmt/). + +## How to set up +Add this snippet to `WORKSPACE` +``` +load("//scala/scalafmt:scalafmt_repositories.bzl", "scalafmt_default_config", "scalafmt_repositories") +scalafmt_default_config() +scalafmt_repositories() +``` + +To add this phase to a rule, you have to pass the extension to a rule macro. Take `scala_binary` for example, +``` +load("//scala:advanced_usage/scala.bzl", "make_scala_binary") +load("//scala/scalafmt:phase_scalafmt_ext.bzl", "ext_scalafmt") + +scalafmt_scala_binary = make_scala_binary(ext_scalafmt) +``` +Then use `scalafmt_scala_binary` as normal. + +The extension adds 2 additional attributes to the rule + - `format`: enable formatting + - `config`: the Scalafmt configuration file + +When `format` is set to `true`, you can do +``` +bazel run .format +``` +to format the source code, and do +``` +bazel run .format-test +``` +to check the format (without modifying source code). + +The extension provides default configuration, but there are 2 ways to use custom configuration + - Put `.scalafmt.conf` at root of your workspace + - Pass `.scalafmt.conf` in via `config` attribute diff --git a/jmh/jmh.bzl b/jmh/jmh.bzl index d4274de0d..de45c9ade 100644 --- a/jmh/jmh.bzl +++ b/jmh/jmh.bzl @@ -1,10 +1,14 @@ load("//scala:scala.bzl", "scala_binary", "scala_library") +load( + "//scala:scala_cross_version.bzl", + _default_maven_server_urls = "default_maven_server_urls", +) load( "@io_bazel_rules_scala//scala:scala_maven_import_external.bzl", _scala_maven_import_external = "scala_maven_import_external", ) -def jmh_repositories(maven_servers = ["https://repo.maven.apache.org/maven2"]): +def jmh_repositories(maven_servers = _default_maven_server_urls()): _scala_maven_import_external( name = "io_bazel_rules_scala_org_openjdk_jmh_jmh_core", artifact = "org.openjdk.jmh:jmh-core:1.20", diff --git a/junit/junit.bzl b/junit/junit.bzl index 4279483ea..7ea79f617 100644 --- a/junit/junit.bzl +++ b/junit/junit.bzl @@ -1,9 +1,13 @@ +load( + "//scala:scala_cross_version.bzl", + _default_maven_server_urls = "default_maven_server_urls", +) load( "@io_bazel_rules_scala//scala:scala_maven_import_external.bzl", _scala_maven_import_external = "scala_maven_import_external", ) -def junit_repositories(maven_servers = ["https://repo.maven.apache.org/maven2"]): +def junit_repositories(maven_servers = _default_maven_server_urls()): _scala_maven_import_external( name = "io_bazel_rules_scala_junit_junit", artifact = "junit:junit:4.12", diff --git a/manual_test/scalac_jvm_opts/BUILD b/manual_test/scalac_jvm_opts/BUILD index 65d39b83a..0c20353c1 100644 --- a/manual_test/scalac_jvm_opts/BUILD +++ b/manual_test/scalac_jvm_opts/BUILD @@ -1,9 +1,7 @@ +load("@rules_proto//proto:defs.bzl", "proto_library") load("//scala:scala_toolchain.bzl", "scala_toolchain") load("//scala:scala.bzl", "scala_library") -load( - "//scala_proto:scala_proto.bzl", - "scala_proto_library", -) +load("//scala_proto:scala_proto.bzl", "scala_proto_library") scala_toolchain( name = "failing_toolchain_impl", diff --git a/private/BUILD b/private/BUILD index e0235a43d..469006a8d 100644 --- a/private/BUILD +++ b/private/BUILD @@ -1,3 +1,5 @@ +load("@rules_java//java:defs.bzl", "java_binary") + java_binary( name = "java_format", main_class = "com.google.googlejavaformat.java.Main", diff --git a/scala/BUILD b/scala/BUILD index a5c8f2416..ebf35506a 100644 --- a/scala/BUILD +++ b/scala/BUILD @@ -1,3 +1,4 @@ +load("@rules_java//java:defs.bzl", "java_import", "java_library") load( "@io_bazel_rules_scala//scala:providers.bzl", _declare_scalac_provider = "declare_scalac_provider", @@ -35,6 +36,19 @@ toolchain( visibility = ["//visibility:public"], ) +scala_toolchain( + name = "code_coverage_toolchain_impl", + enable_code_coverage_aspect = "on", + visibility = ["//visibility:public"], +) + +toolchain( + name = "code_coverage_toolchain", + toolchain = "code_coverage_toolchain_impl", + toolchain_type = "@io_bazel_rules_scala//scala:toolchain_type", + visibility = ["//visibility:public"], +) + java_import( name = "bazel_test_runner_deploy", jars = ["@bazel_tools//tools/jdk:TestRunner_deploy.jar"], @@ -44,17 +58,17 @@ java_import( _declare_scalac_provider( name = "scalac_default", default_classpath = [ - "@io_bazel_rules_scala_scala_library", - "@io_bazel_rules_scala_scala_reflect", + "//external:io_bazel_rules_scala/dependency/scala/scala_library", + "//external:io_bazel_rules_scala/dependency/scala/scala_reflect", ], default_macro_classpath = [ - "@io_bazel_rules_scala_scala_library", - "@io_bazel_rules_scala_scala_reflect", + "//external:io_bazel_rules_scala/dependency/scala/scala_library", + "//external:io_bazel_rules_scala/dependency/scala/scala_reflect", ], default_repl_classpath = [ - "@io_bazel_rules_scala_scala_library", - "@io_bazel_rules_scala_scala_reflect", - "@io_bazel_rules_scala_scala_compiler", + "//external:io_bazel_rules_scala/dependency/scala/scala_library", + "//external:io_bazel_rules_scala/dependency/scala/scala_reflect", + "//external:io_bazel_rules_scala/dependency/scala/scala_compiler", ], visibility = ["//visibility:public"], ) diff --git a/scala/plusone.bzl b/scala/plusone.bzl index 1efd17093..f2f71a969 100644 --- a/scala/plusone.bzl +++ b/scala/plusone.bzl @@ -10,7 +10,7 @@ PlusOneDeps = provider( ) def _collect_plus_one_deps_aspect_impl(target, ctx): - if (ctx.toolchains["@io_bazel_rules_scala//scala:toolchain_type"].plus_one_deps_mode == "off"): + if (ctx.toolchains["@io_bazel_rules_scala//scala:toolchain_type"].dependency_mode != "plus-one"): return [] export_plus_one_deps = [] for exported_dep in getattr(ctx.rule.attr, "exports", []): diff --git a/scala/private/common.bzl b/scala/private/common.bzl index 456e2b6c1..2a3de8d14 100644 --- a/scala/private/common.bzl +++ b/scala/private/common.bzl @@ -11,44 +11,15 @@ def write_manifest_file(actions, output_file, main_class): def collect_jars( dep_targets, - dependency_analyzer_is_off = True, - unused_dependency_checker_is_off = True, - plus_one_deps_is_off = True): + dependency_mode, + need_direct_info, + need_indirect_info): """Compute the runtime and compile-time dependencies from the given targets""" # noqa - if dependency_analyzer_is_off: - return _collect_jars_when_dependency_analyzer_is_off( - dep_targets, - unused_dependency_checker_is_off, - plus_one_deps_is_off, - ) - else: - return _collect_jars_when_dependency_analyzer_is_on(dep_targets) - -def collect_plugin_paths(plugins): - """Get the actual jar paths of plugins as a depset.""" - paths = [] - for p in plugins: - if hasattr(p, "path"): - paths.append(p) - elif JavaInfo in p: - paths.extend([j.class_jar for j in p[JavaInfo].outputs.jars]) - # support http_file pointed at a jar. http_jar uses ijar, - # which breaks scala macros - - elif hasattr(p, "files"): - paths.extend([f for f in p.files.to_list() if not_sources_jar(f.basename)]) - return depset(paths) - -def _collect_jars_when_dependency_analyzer_is_off( - dep_targets, - unused_dependency_checker_is_off, - plus_one_deps_is_off): + transitive_compile_jars = [] + jars2labels = {} compile_jars = [] - plus_one_deps_compile_jars = [] runtime_jars = [] - jars2labels = {} - deps_providers = [] for dep_target in dep_targets: @@ -59,60 +30,67 @@ def _collect_jars_when_dependency_analyzer_is_off( compile_jars.append(java_provider.compile_jars) runtime_jars.append(java_provider.transitive_runtime_jars) - if not unused_dependency_checker_is_off: + additional_transitive_compile_jars = _additional_transitive_compile_jars( + java_provider = java_provider, + dep_target = dep_target, + dependency_mode = dependency_mode, + ) + transitive_compile_jars.append(additional_transitive_compile_jars) + + if need_direct_info or need_indirect_info: + if need_indirect_info: + all_jars = additional_transitive_compile_jars.to_list() + else: + all_jars = [] add_labels_of_jars_to( jars2labels, dep_target, - [], + all_jars, java_provider.compile_jars.to_list(), ) - if (not plus_one_deps_is_off) and (PlusOneDeps in dep_target): - plus_one_deps_compile_jars.append( - depset(transitive = [dep[JavaInfo].compile_jars for dep in dep_target[PlusOneDeps].direct_deps if JavaInfo in dep]), - ) - return struct( compile_jars = depset(transitive = compile_jars), transitive_runtime_jars = depset(transitive = runtime_jars), jars2labels = JarsToLabelsInfo(jars_to_labels = jars2labels), - transitive_compile_jars = depset(transitive = compile_jars + plus_one_deps_compile_jars), + transitive_compile_jars = depset(transitive = transitive_compile_jars), deps_providers = deps_providers, ) -def _collect_jars_when_dependency_analyzer_is_on(dep_targets): - transitive_compile_jars = [] - jars2labels = {} - compile_jars = [] - runtime_jars = [] - deps_providers = [] - - for dep_target in dep_targets: - # we require a JavaInfo for dependencies - # must use java_import or scala_import if you have raw files - java_provider = dep_target[JavaInfo] - deps_providers.append(java_provider) - current_dep_compile_jars = java_provider.compile_jars - current_dep_transitive_compile_jars = java_provider.transitive_compile_time_jars - runtime_jars.append(java_provider.transitive_runtime_jars) - - compile_jars.append(current_dep_compile_jars) - transitive_compile_jars.append(current_dep_transitive_compile_jars) +def collect_plugin_paths(plugins): + """Get the actual jar paths of plugins as a depset.""" + paths = [] + for p in plugins: + if hasattr(p, "path"): + paths.append(p) + elif JavaInfo in p: + paths.extend([j.class_jar for j in p[JavaInfo].outputs.jars]) + # support http_file pointed at a jar. http_jar uses ijar, + # which breaks scala macros - add_labels_of_jars_to( - jars2labels, - dep_target, - current_dep_transitive_compile_jars.to_list(), - current_dep_compile_jars.to_list(), - ) + elif hasattr(p, "files"): + paths.extend([f for f in p.files.to_list() if not_sources_jar(f.basename)]) + return depset(paths) - return struct( - compile_jars = depset(transitive = compile_jars), - transitive_runtime_jars = depset(transitive = runtime_jars), - jars2labels = JarsToLabelsInfo(jars_to_labels = jars2labels), - transitive_compile_jars = depset(transitive = transitive_compile_jars), - deps_providers = deps_providers, - ) +def _additional_transitive_compile_jars( + java_provider, + dep_target, + dependency_mode): + if dependency_mode == "transitive": + return java_provider.transitive_compile_time_jars + elif dependency_mode == "plus-one": + # dep_target will not always have a PlusOneDeps provider, such as + # with scala_maven_import_external, hence the need for the fallback. + if PlusOneDeps in dep_target: + plus_one_jars = [dep[JavaInfo].compile_jars for dep in dep_target[PlusOneDeps].direct_deps if JavaInfo in dep] + + # plus_one_jars only contains the deps of deps, not the deps themselves. + # Hence the need to include the dep's compile jars anyways + return depset(transitive = plus_one_jars + [java_provider.compile_jars]) + else: + return java_provider.compile_jars + else: # direct + return java_provider.compile_jars # When import mavan_jar's for scala macros we have to use the jar:file requirement # since bazel 0.6.0 this brings in the source jar too @@ -125,11 +103,6 @@ def _collect_jars_when_dependency_analyzer_is_on(dep_targets): def not_sources_jar(name): return "-sources.jar" not in name -def filter_not_sources(deps): - return depset( - [dep for dep in deps.to_list() if not_sources_jar(dep.basename)], - ) - def add_labels_of_jars_to(jars2labels, dependency, all_jars, direct_jars): for jar in direct_jars: jars2labels[jar.path] = dependency.label diff --git a/scala/private/common_attributes.bzl b/scala/private/common_attributes.bzl index 2982536fe..bd6c3fc3c 100644 --- a/scala/private/common_attributes.bzl +++ b/scala/private/common_attributes.bzl @@ -66,13 +66,6 @@ common_attrs.update({ ], mandatory = False, ), - "_unused_dependency_checker_plugin": attr.label( - default = Label( - "@io_bazel_rules_scala//third_party/unused_dependency_checker/src/main:unused_dependency_checker", - ), - allow_files = [".jar"], - mandatory = False, - ), "unused_dependency_checker_ignored_targets": attr.label_list(default = []), "_code_coverage_instrumentation_worker": attr.label( default = "@io_bazel_rules_scala//src/java/io/bazel/rulesscala/coverage/instrumenter", diff --git a/scala/private/dependency.bzl b/scala/private/dependency.bzl new file mode 100644 index 000000000..7c9a8032d --- /dev/null +++ b/scala/private/dependency.bzl @@ -0,0 +1,51 @@ +# This file contains all computations for what the dependency mode is +# (i.e. transitive, plus-one, direct, etc) +# and what/how dependency analysis is performed (unused deps, strict deps, etc). + +def new_dependency_info( + dependency_mode, + unused_deps_mode, + strict_deps_mode, + dependency_tracking_method): + is_strict_deps_on = strict_deps_mode != "off" + is_unused_deps_on = unused_deps_mode != "off" + + need_direct_jars = is_strict_deps_on or is_unused_deps_on + need_direct_targets = is_unused_deps_on + + return struct( + dependency_mode = dependency_mode, + need_indirect_info = is_strict_deps_on, + need_direct_jars = need_direct_jars, + need_direct_targets = need_direct_targets, + need_direct_info = need_direct_jars or need_direct_targets, + dependency_tracking_method = dependency_tracking_method, + unused_deps_mode = unused_deps_mode, + strict_deps_mode = strict_deps_mode, + use_analyzer = is_strict_deps_on or is_unused_deps_on, + ) + +# TODO(https://github.com/bazelbuild/rules_scala/issues/987): Clariy the situation +def legacy_unclear_dependency_info_for_protobuf_scrooge(ctx): + return new_dependency_info( + dependency_mode = _legacy_unclear_dependency_mode_for_protobuf_scrooge(ctx), + unused_deps_mode = "off", + strict_deps_mode = get_strict_deps_mode(ctx), + dependency_tracking_method = "high-level", + ) + +# TODO(https://github.com/bazelbuild/rules_scala/issues/987): Clariy the situation +def _legacy_unclear_dependency_mode_for_protobuf_scrooge(ctx): + if _is_strict_deps_on(ctx): + return "transitive" + else: + return "direct" + +def get_strict_deps_mode(ctx): + if not hasattr(ctx.attr, "_dependency_analyzer_plugin"): + return "off" + + return ctx.toolchains["@io_bazel_rules_scala//scala:toolchain_type"].strict_deps_mode + +def _is_strict_deps_on(ctx): + return get_strict_deps_mode(ctx) != "off" diff --git a/scala/private/macros/scala_repositories.bzl b/scala/private/macros/scala_repositories.bzl index bee2fc8e2..ad964498e 100644 --- a/scala/private/macros/scala_repositories.bzl +++ b/scala/private/macros/scala_repositories.bzl @@ -1,6 +1,7 @@ load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive") load( "@io_bazel_rules_scala//scala:scala_cross_version.bzl", + _default_maven_server_urls = "default_maven_server_urls", _default_scala_version = "default_scala_version", _default_scala_version_jar_shas = "default_scala_version_jar_shas", _extract_major_version = "extract_major_version", @@ -56,7 +57,7 @@ def scala_repositories( _default_scala_version(), _default_scala_version_jar_shas(), ), - maven_servers = ["https://repo.maven.apache.org/maven2"], + maven_servers = _default_maven_server_urls(), scala_extra_jars = _default_scala_extra_jars(), fetch_sources = False): (scala_version, scala_version_jar_shas) = scala_version_shas @@ -66,7 +67,7 @@ def scala_repositories( maven_servers = maven_servers, scala_version = scala_version, scala_version_jar_shas = scala_version_jar_shas, - fetch_sources = fetch_sources + fetch_sources = fetch_sources, ) scala_version_extra_jars = scala_extra_jars[major_version] @@ -141,19 +142,52 @@ def scala_repositories( if not native.existing_rule("com_google_protobuf"): http_archive( name = "com_google_protobuf", - sha256 = "d82eb0141ad18e98de47ed7ed415daabead6d5d1bef1b8cccb6aa4d108a9008f", - strip_prefix = "protobuf-b4f193788c9f0f05d7e0879ea96cd738630e5d51", - # Commit from 2019-05-15, update to protobuf 3.8 when available. + sha256 = "cf754718b0aa945b00550ed7962ddc167167bd922b842199eeb6505e6f344852", + strip_prefix = "protobuf-3.11.3", urls = [ - "https://mirror.bazel.build/github.com/protocolbuffers/protobuf/archive/b4f193788c9f0f05d7e0879ea96cd738630e5d51.tar.gz", - "https://github.com/protocolbuffers/protobuf/archive/b4f193788c9f0f05d7e0879ea96cd738630e5d51.tar.gz", + "https://mirror.bazel.build/github.com/protocolbuffers/protobuf/archive/v3.11.3.tar.gz", + "https://github.com/protocolbuffers/protobuf/archive/v3.11.3.tar.gz", ], ) + if not native.existing_rule("rules_cc"): + http_archive( + name = "rules_cc", + sha256 = "29daf0159f0cf552fcff60b49d8bcd4f08f08506d2da6e41b07058ec50cfeaec", + strip_prefix = "rules_cc-b7fe9697c0c76ab2fd431a891dbb9a6a32ed7c3e", + urls = ["https://github.com/bazelbuild/rules_cc/archive/b7fe9697c0c76ab2fd431a891dbb9a6a32ed7c3e.tar.gz"], + ) + + if not native.existing_rule("rules_java"): + http_archive( + name = "rules_java", + sha256 = "220b87d8cfabd22d1c6d8e3cdb4249abd4c93dcc152e0667db061fb1b957ee68", + urls = ["https://github.com/bazelbuild/rules_java/releases/download/0.1.1/rules_java-0.1.1.tar.gz"], + ) + + if not native.existing_rule("rules_proto"): + http_archive( + name = "rules_proto", + sha256 = "4d421d51f9ecfe9bf96ab23b55c6f2b809cbaf0eea24952683e397decfbd0dd0", + strip_prefix = "rules_proto-f6b8d89b90a7956f6782a4a3609b2f0eee3ce965", + urls = [ + "https://mirror.bazel.build/github.com/bazelbuild/rules_proto/archive/f6b8d89b90a7956f6782a4a3609b2f0eee3ce965.tar.gz", + "https://github.com/bazelbuild/rules_proto/archive/f6b8d89b90a7956f6782a4a3609b2f0eee3ce965.tar.gz", + ], + ) + + if not native.existing_rule("rules_python"): + http_archive( + name = "rules_python", + sha256 = "e5470e92a18aa51830db99a4d9c492cc613761d5bdb7131c04bd92b9834380f6", + strip_prefix = "rules_python-4b84ad270387a7c439ebdccfd530e2339601ef27", + urls = ["https://github.com/bazelbuild/rules_python/archive/4b84ad270387a7c439ebdccfd530e2339601ef27.tar.gz"], + ) + if not native.existing_rule("zlib"): # needed by com_google_protobuf http_archive( name = "zlib", - build_file = "@com_google_protobuf//:third_party/zlib.BUILD", + build_file = "@com_google_protobuf//third_party:zlib.BUILD", sha256 = "c3e5e9fdd5004dcb542feda5ee4f0ff0744628baf8ed2dd5d66f8ca1197cb1a1", strip_prefix = "zlib-1.2.11", urls = [ @@ -206,3 +240,13 @@ def scala_repositories( name = "io_bazel_rules_scala/dependency/scala/guava", actual = "@io_bazel_rules_scala_guava", ) + + native.bind( + name = "io_bazel_rules_scala/dependency/scala/scalatest/scalatest", + actual = "@io_bazel_rules_scala_scalatest", + ) + + native.bind( + name = "io_bazel_rules_scala/dependency/scala/scalactic/scalactic", + actual = "@io_bazel_rules_scala_scalactic", + ) diff --git a/scala/private/paths.bzl b/scala/private/paths.bzl new file mode 100644 index 000000000..09b916d4f --- /dev/null +++ b/scala/private/paths.bzl @@ -0,0 +1,12 @@ +java_extension = ".java" + +scala_extension = ".scala" + +srcjar_extension = ".srcjar" + +def get_files_with_extension(ctx, extension): + return [ + f + for f in ctx.files.srcs + if f.basename.endswith(extension) + ] diff --git a/scala/private/phases/api.bzl b/scala/private/phases/api.bzl index cc8a340af..d8da71cd5 100644 --- a/scala/private/phases/api.bzl +++ b/scala/private/phases/api.bzl @@ -6,6 +6,7 @@ load( "@io_bazel_rules_scala//scala:advanced_usage/providers.bzl", _ScalaRulePhase = "ScalaRulePhase", ) +load("@bazel_skylib//lib:dicts.bzl", "dicts") # A method to modify the built-in phase list # - Insert new phases to the first/last position @@ -22,22 +23,26 @@ def _adjust_phases(phases, adjustments): # phase_name: the name of the new phase, also used to access phase information # phase_function: the function of the new phase for (relation, peer_name, phase_name, phase_function) in adjustments: - for idx, (needle, _) in enumerate(phases): - if relation in ["^", "first"]: - phases.insert(0, (phase_name, phase_function)) - elif relation in ["$", "last"]: - phases.append((phase_name, phase_function)) - elif needle == peer_name: - if relation in ["-", "before"]: - phases.insert(idx, (phase_name, phase_function)) - elif relation in ["+", "after"]: - phases.insert(idx + 1, (phase_name, phase_function)) - elif relation in ["=", "replace"]: - phases[idx] = (phase_name, phase_function) + if relation in ["^", "first"]: + phases.insert(0, (phase_name, phase_function)) + elif relation in ["$", "last"]: + phases.append((phase_name, phase_function)) + else: + for idx, (needle, _) in enumerate(phases): + if needle == peer_name: + if relation in ["-", "before"]: + phases.insert(idx, (phase_name, phase_function)) + break + elif relation in ["+", "after"]: + phases.insert(idx + 1, (phase_name, phase_function)) + break + elif relation in ["=", "replace"]: + phases[idx] = (phase_name, phase_function) + break return phases # Execute phases -def run_phases(ctx, builtin_customizable_phases, fixed_phase): +def run_phases(ctx, builtin_customizable_phases): # Loading custom phases # Phases must be passed in by provider phase_providers = [ @@ -59,18 +64,24 @@ def run_phases(ctx, builtin_customizable_phases, fixed_phase): # A placeholder for data shared with later phases global_provider = {} current_provider = struct(**global_provider) - for (name, function) in adjusted_phases + [fixed_phase]: + acculmulated_external_providers = {} + for (name, function) in adjusted_phases: # Run a phase new_provider = function(ctx, current_provider) # If a phase returns data, append it to global_provider # for later phases to access if new_provider != None: + if (hasattr(new_provider, "external_providers")): + acculmulated_external_providers = dicts.add( + acculmulated_external_providers, + new_provider.external_providers, + ) global_provider[name] = new_provider current_provider = struct(**global_provider) # The final return of rules implementation - return current_provider + return acculmulated_external_providers.values() # A method to pass in phase provider def extras_phases(extras): diff --git a/scala/private/phases/phase_collect_exports_jars.bzl b/scala/private/phases/phase_collect_exports_jars.bzl index 35a83dbe3..fe88e9344 100644 --- a/scala/private/phases/phase_collect_exports_jars.bzl +++ b/scala/private/phases/phase_collect_exports_jars.bzl @@ -12,4 +12,9 @@ def phase_collect_exports_jars(ctx, p): # Add information from exports (is key that AFTER all build actions/runfiles analysis) # Since after, will not show up in deploy_jar or old jars runfiles # Notice that compile_jars is intentionally transitive for exports - return collect_jars(ctx.attr.exports) + return collect_jars( + ctx.attr.exports, + "direct", + False, + False, + ) diff --git a/scala/private/phases/phase_collect_jars.bzl b/scala/private/phases/phase_collect_jars.bzl index 086975ffd..a353e0ac7 100644 --- a/scala/private/phases/phase_collect_jars.bzl +++ b/scala/private/phases/phase_collect_jars.bzl @@ -3,17 +3,12 @@ # # DOCUMENT THIS # -load( - "@io_bazel_rules_scala//scala/private:rule_impls.bzl", - "is_dependency_analyzer_off", - "is_plus_one_deps_off", -) load( "@io_bazel_rules_scala//scala/private:common.bzl", "collect_jars", ) -def phase_scalatest_collect_jars(ctx, p): +def phase_collect_jars_scalatest(ctx, p): args = struct( base_classpath = p.scalac_provider.default_classpath + [ctx.attr._scalatest], extra_runtime_deps = [ @@ -21,21 +16,21 @@ def phase_scalatest_collect_jars(ctx, p): ctx.attr._scalatest_runner, ], ) - return _phase_default_collect_jars(ctx, p, args) + return _phase_collect_jars_default(ctx, p, args) -def phase_repl_collect_jars(ctx, p): +def phase_collect_jars_repl(ctx, p): args = struct( base_classpath = p.scalac_provider.default_repl_classpath, ) - return _phase_default_collect_jars(ctx, p, args) + return _phase_collect_jars_default(ctx, p, args) -def phase_macro_library_collect_jars(ctx, p): +def phase_collect_jars_macro_library(ctx, p): args = struct( base_classpath = p.scalac_provider.default_macro_classpath, ) - return _phase_default_collect_jars(ctx, p, args) + return _phase_collect_jars_default(ctx, p, args) -def phase_junit_test_collect_jars(ctx, p): +def phase_collect_jars_junit_test(ctx, p): args = struct( extra_deps = [ ctx.attr._junit, @@ -44,24 +39,18 @@ def phase_junit_test_collect_jars(ctx, p): ctx.attr._bazel_test_runner, ], ) - return _phase_default_collect_jars(ctx, p, args) - -def phase_library_for_plugin_bootstrapping_collect_jars(ctx, p): - args = struct( - unused_dependency_checker_mode = "off", - ) - return _phase_default_collect_jars(ctx, p, args) + return _phase_collect_jars_default(ctx, p, args) -def phase_common_collect_jars(ctx, p): - return _phase_default_collect_jars(ctx, p) +def phase_collect_jars_common(ctx, p): + return _phase_collect_jars_default(ctx, p) -def _phase_default_collect_jars(ctx, p, _args = struct()): +def _phase_collect_jars_default(ctx, p, _args = struct()): return _phase_collect_jars( ctx, + p, _args.base_classpath if hasattr(_args, "base_classpath") else p.scalac_provider.default_classpath, _args.extra_deps if hasattr(_args, "extra_deps") else [], _args.extra_runtime_deps if hasattr(_args, "extra_runtime_deps") else [], - _args.unused_dependency_checker_mode if hasattr(_args, "unused_dependency_checker_mode") else p.unused_deps_checker, ) # Extract very common code out from dependency analysis into single place @@ -69,18 +58,15 @@ def _phase_default_collect_jars(ctx, p, _args = struct()): # collects jars from deps, runtime jars from runtime_deps, and def _phase_collect_jars( ctx, + p, base_classpath, extra_deps, - extra_runtime_deps, - unused_dependency_checker_mode): - unused_dependency_checker_is_off = unused_dependency_checker_mode == "off" - dependency_analyzer_is_off = is_dependency_analyzer_off(ctx) - + extra_runtime_deps): deps_jars = collect_jars( ctx.attr.deps + extra_deps + base_classpath, - dependency_analyzer_is_off, - unused_dependency_checker_is_off, - is_plus_one_deps_off(ctx), + p.dependency.dependency_mode, + p.dependency.need_direct_info, + p.dependency.need_indirect_info, ) ( @@ -108,6 +94,7 @@ def _phase_collect_jars( transitive_compile_jars = transitive_compile_jars, transitive_runtime_jars = transitive_rjars, deps_providers = deps_providers, + external_providers = {"JarsToLabelsInfo": jars2labels}, ) def _collect_runtime_jars(dep_targets): diff --git a/scala/private/phases/phase_compile.bzl b/scala/private/phases/phase_compile.bzl index 0500e2f37..9e4a7a95a 100644 --- a/scala/private/phases/phase_compile.bzl +++ b/scala/private/phases/phase_compile.bzl @@ -5,29 +5,20 @@ # load("@bazel_tools//tools/jdk:toolchain_utils.bzl", "find_java_runtime_toolchain", "find_java_toolchain") load( - "@io_bazel_rules_scala//scala/private:coverage_replacements_provider.bzl", - _coverage_replacements_provider = "coverage_replacements_provider", + "@io_bazel_rules_scala//scala/private:paths.bzl", + _get_files_with_extension = "get_files_with_extension", + _java_extension = "java_extension", + _scala_extension = "scala_extension", + _srcjar_extension = "srcjar_extension", ) load( "@io_bazel_rules_scala//scala/private:rule_impls.bzl", - _adjust_resources_path_by_default_prefixes = "adjust_resources_path_by_default_prefixes", _compile_scala = "compile_scala", _expand_location = "expand_location", ) +load(":resources.bzl", _resource_paths = "paths") -_java_extension = ".java" - -_scala_extension = ".scala" - -_srcjar_extension = ".srcjar" - -_empty_coverage_struct = struct( - instrumented_files = None, - providers = [], - replacements = {}, -) - -def phase_binary_compile(ctx, p): +def phase_compile_binary(ctx, p): args = struct( buildijar = False, unused_dependency_checker_ignored_targets = [ @@ -36,9 +27,9 @@ def phase_binary_compile(ctx, p): ctx.attr.unused_dependency_checker_ignored_targets ], ) - return _phase_default_compile(ctx, p, args) + return _phase_compile_default(ctx, p, args) -def phase_library_compile(ctx, p): +def phase_compile_library(ctx, p): args = struct( srcjars = p.collect_srcjars, unused_dependency_checker_ignored_targets = [ @@ -47,19 +38,15 @@ def phase_library_compile(ctx, p): ctx.attr.unused_dependency_checker_ignored_targets ], ) - return _phase_default_compile(ctx, p, args) + return _phase_compile_default(ctx, p, args) -def phase_library_for_plugin_bootstrapping_compile(ctx, p): +def phase_compile_library_for_plugin_bootstrapping(ctx, p): args = struct( - unused_dependency_checker_ignored_targets = [ - target.label - for target in p.scalac_provider.default_classpath + ctx.attr.exports - ], - unused_dependency_checker_mode = "off", + buildijar = ctx.attr.build_ijar, ) - return _phase_default_compile(ctx, p, args) + return _phase_compile_default(ctx, p, args) -def phase_macro_library_compile(ctx, p): +def phase_compile_macro_library(ctx, p): args = struct( buildijar = False, unused_dependency_checker_ignored_targets = [ @@ -68,9 +55,9 @@ def phase_macro_library_compile(ctx, p): ctx.attr.unused_dependency_checker_ignored_targets ], ) - return _phase_default_compile(ctx, p, args) + return _phase_compile_default(ctx, p, args) -def phase_junit_test_compile(ctx, p): +def phase_compile_junit_test(ctx, p): args = struct( buildijar = False, implicit_junit_deps_needed_for_java_compilation = [ @@ -88,9 +75,9 @@ def phase_junit_test_compile(ctx, p): ctx.attr._bazel_test_runner.label, ], ) - return _phase_default_compile(ctx, p, args) + return _phase_compile_default(ctx, p, args) -def phase_repl_compile(ctx, p): +def phase_compile_repl(ctx, p): args = struct( buildijar = False, unused_dependency_checker_ignored_targets = [ @@ -99,31 +86,31 @@ def phase_repl_compile(ctx, p): ctx.attr.unused_dependency_checker_ignored_targets ], ) - return _phase_default_compile(ctx, p, args) + return _phase_compile_default(ctx, p, args) -def phase_scalatest_compile(ctx, p): +def phase_compile_scalatest(ctx, p): args = struct( buildijar = False, unused_dependency_checker_ignored_targets = [ target.label for target in p.scalac_provider.default_classpath + + [ctx.attr._scalatest] + ctx.attr.unused_dependency_checker_ignored_targets ], ) - return _phase_default_compile(ctx, p, args) + return _phase_compile_default(ctx, p, args) -def phase_common_compile(ctx, p): - return _phase_default_compile(ctx, p) +def phase_compile_common(ctx, p): + return _phase_compile_default(ctx, p) -def _phase_default_compile(ctx, p, _args = struct()): +def _phase_compile_default(ctx, p, _args = struct()): return _phase_compile( ctx, p, _args.srcjars if hasattr(_args, "srcjars") else depset(), _args.buildijar if hasattr(_args, "buildijar") else True, _args.implicit_junit_deps_needed_for_java_compilation if hasattr(_args, "implicit_junit_deps_needed_for_java_compilation") else [], - _args.unused_dependency_checker_ignored_targets if hasattr(_args, "unused_dependency_checker_ignored_targets") else [], - _args.unused_dependency_checker_mode if hasattr(_args, "unused_dependency_checker_mode") else p.unused_deps_checker, + unused_dependency_checker_ignored_targets = _args.unused_dependency_checker_ignored_targets if hasattr(_args, "unused_dependency_checker_ignored_targets") else [], ) def _phase_compile( @@ -133,8 +120,7 @@ def _phase_compile( buildijar, # TODO: generalize this hack implicit_junit_deps_needed_for_java_compilation, - unused_dependency_checker_ignored_targets, - unused_dependency_checker_mode): + unused_dependency_checker_ignored_targets): manifest = ctx.outputs.manifest jars = p.collect_jars.compile_jars rjars = p.collect_jars.transitive_runtime_jars @@ -152,23 +138,20 @@ def _phase_compile( transitive_compile_jars, jars2labels, implicit_junit_deps_needed_for_java_compilation, - unused_dependency_checker_mode, - unused_dependency_checker_ignored_targets, + p.dependency, deps_providers, default_classpath, + unused_dependency_checker_ignored_targets, ) # TODO: simplify the return values and use provider return struct( - class_jar = out.class_jar, - coverage = out.coverage, - full_jars = out.full_jars, - ijar = out.ijar, - ijars = out.ijars, + files = depset(out.full_jars), rjars = depset(out.full_jars, transitive = [rjars]), - java_jar = out.java_jar, - source_jars = _pack_source_jars(ctx) + out.source_jars, merged_provider = out.merged_provider, + external_providers = { + "JavaInfo": out.merged_provider, + }, ) def _compile_or_empty( @@ -180,10 +163,10 @@ def _compile_or_empty( transitive_compile_jars, jars2labels, implicit_junit_deps_needed_for_java_compilation, - unused_dependency_checker_mode, - unused_dependency_checker_ignored_targets, + dependency_info, deps_providers, - default_classpath): + default_classpath, + unused_dependency_checker_ignored_targets): # We assume that if a srcjar is present, it is not empty if len(ctx.files.srcs) + len(srcjars.to_list()) == 0: _build_nosrc_jar(ctx) @@ -192,39 +175,16 @@ def _compile_or_empty( # no need to build ijar when empty return struct( - class_jar = ctx.outputs.jar, - coverage = _empty_coverage_struct, full_jars = [ctx.outputs.jar], - ijar = ctx.outputs.jar, - ijars = [ctx.outputs.jar], - java_jar = False, - source_jars = [], merged_provider = scala_compilation_provider, ) else: - in_srcjars = [ - f - for f in ctx.files.srcs - if f.basename.endswith(_srcjar_extension) - ] + java_srcs = _get_files_with_extension(ctx, _java_extension) + scala_srcs = _get_files_with_extension(ctx, _scala_extension) + in_srcjars = _get_files_with_extension(ctx, _srcjar_extension) all_srcjars = depset(in_srcjars, transitive = [srcjars]) - java_srcs = [ - f - for f in ctx.files.srcs - if f.basename.endswith(_java_extension) - ] - - # We are not able to verify whether dependencies are used when compiling java sources - # Thus we disable unused dependency checking when java sources are found - if len(java_srcs) != 0: - unused_dependency_checker_mode = "off" - - sources = [ - f - for f in ctx.files.srcs - if f.basename.endswith(_scala_extension) - ] + java_srcs + sources = scala_srcs + java_srcs _compile_scala( ctx, ctx.label, @@ -245,9 +205,8 @@ def _compile_or_empty( ctx.attr.expect_java_output, ctx.attr.scalac_jvm_flags, ctx.attr._scalac, - unused_dependency_checker_ignored_targets = - unused_dependency_checker_ignored_targets, - unused_dependency_checker_mode = unused_dependency_checker_mode, + dependency_info, + unused_dependency_checker_ignored_targets, ) # build ijar if needed @@ -263,7 +222,7 @@ def _compile_or_empty( # so set ijar == jar ijar = ctx.outputs.jar - source_jar = _pack_source_jar(ctx) + source_jar = _pack_source_jar(ctx, scala_srcs, in_srcjars) scala_compilation_provider = _create_scala_compilation_provider(ctx, ijar, source_jar, deps_providers) # compile the java now @@ -277,14 +236,8 @@ def _compile_or_empty( ) full_jars = [ctx.outputs.jar] - ijars = [ijar] - source_jars = [] if java_jar: full_jars += [java_jar.jar] - ijars += [java_jar.ijar] - source_jars += java_jar.source_jars - - coverage = _jacoco_offline_instrument(ctx, ctx.outputs.jar) if java_jar: merged_provider = java_common.merge([scala_compilation_provider, java_jar.java_compilation_provider]) @@ -292,22 +245,10 @@ def _compile_or_empty( merged_provider = scala_compilation_provider return struct( - class_jar = ctx.outputs.jar, - coverage = coverage, full_jars = full_jars, - ijar = ijar, - ijars = ijars, - java_jar = java_jar, - source_jars = source_jars, merged_provider = merged_provider, ) -def _pack_source_jars(ctx): - source_jar = _pack_source_jar(ctx) - - #_pack_source_jar may return None if java_common.pack_sources returned None (and it can) - return [source_jar] if source_jar else [] - def _build_nosrc_jar(ctx): resources = _add_resources_cmd(ctx) ijar_cmd = "" @@ -359,74 +300,16 @@ def _create_scala_compilation_provider(ctx, ijar, source_jar, deps_providers): runtime_deps = runtime_deps, ) -def _pack_source_jar(ctx): - # collect .scala sources and pack a source jar for Scala - scala_sources = [ - f - for f in ctx.files.srcs - if f.basename.endswith(_scala_extension) - ] - - # collect .srcjar files and pack them with the scala sources - bundled_source_jars = [ - f - for f in ctx.files.srcs - if f.basename.endswith(_srcjar_extension) - ] - scala_source_jar = java_common.pack_sources( +def _pack_source_jar(ctx, scala_srcs, in_srcjars): + return java_common.pack_sources( ctx.actions, output_jar = ctx.outputs.jar, - sources = scala_sources, - source_jars = bundled_source_jars, + sources = scala_srcs, + source_jars = in_srcjars, java_toolchain = find_java_toolchain(ctx, ctx.attr._java_toolchain), host_javabase = find_java_runtime_toolchain(ctx, ctx.attr._host_javabase), ) - return scala_source_jar - -def _jacoco_offline_instrument(ctx, input_jar): - if not ctx.configuration.coverage_enabled or not hasattr(ctx.attr, "_code_coverage_instrumentation_worker"): - return _empty_coverage_struct - - output_jar = ctx.actions.declare_file( - "{}-offline.jar".format(input_jar.basename.split(".")[0]), - ) - in_out_pairs = [ - (input_jar, output_jar), - ] - - args = ctx.actions.args() - args.add_all(in_out_pairs, map_each = _jacoco_offline_instrument_format_each) - args.set_param_file_format("multiline") - args.use_param_file("@%s", use_always = True) - - ctx.actions.run( - mnemonic = "JacocoInstrumenter", - inputs = [in_out_pair[0] for in_out_pair in in_out_pairs], - outputs = [in_out_pair[1] for in_out_pair in in_out_pairs], - executable = ctx.attr._code_coverage_instrumentation_worker.files_to_run, - execution_requirements = {"supports-workers": "1"}, - arguments = [args], - ) - - replacements = {i: o for (i, o) in in_out_pairs} - provider = _coverage_replacements_provider.create( - replacements = replacements, - ) - instrumented_files_provider = coverage_common.instrumented_files_info( - ctx, - source_attributes = ["srcs"], - dependency_attributes = _coverage_replacements_provider.dependency_attributes, - extensions = ["scala", "java"], - ) - return struct( - providers = [provider, instrumented_files_provider], - replacements = replacements, - ) - -def _jacoco_offline_instrument_format_each(in_out_pair): - return (["%s=%s" % (in_out_pair[0].path, in_out_pair[1].path)]) - def _try_to_compile_java_jar( ctx, scala_output, @@ -480,36 +363,10 @@ def _try_to_compile_java_jar( java_compilation_provider = provider, ) -def _adjust_resources_path(path, resource_strip_prefix): - if resource_strip_prefix: - return _adjust_resources_path_by_strip_prefix(path, resource_strip_prefix) - else: - return _adjust_resources_path_by_default_prefixes(path) - def _add_resources_cmd(ctx): - res_cmd = [] - for f in ctx.files.resources: - c_dir, res_path = _adjust_resources_path( - f.short_path, - ctx.attr.resource_strip_prefix, - ) - target_path = res_path - if target_path[0] == "/": - target_path = target_path[1:] - line = "{target_path}={c_dir}{res_path}\n".format( - res_path = res_path, - target_path = target_path, - c_dir = c_dir, - ) - res_cmd.extend([line]) - return "".join(res_cmd) - -def _adjust_resources_path_by_strip_prefix(path, resource_strip_prefix): - if not path.startswith(resource_strip_prefix): - fail("Resource file %s is not under the specified prefix to strip" % path) - - clean_path = path[len(resource_strip_prefix):] - return resource_strip_prefix, clean_path + paths = _resource_paths(ctx.files.resources, ctx.attr.resource_strip_prefix) + lines = ["{target}={source}\n".format(target = p[0], source = p[1]) for p in paths] + return "".join(lines) def _collect_java_providers_of(deps): providers = [] diff --git a/scala/private/phases/phase_coverage.bzl b/scala/private/phases/phase_coverage.bzl new file mode 100644 index 000000000..f076b5ad4 --- /dev/null +++ b/scala/private/phases/phase_coverage.bzl @@ -0,0 +1,77 @@ +# +# PHASE: coverage +# +# DOCUMENT THIS +# + +load( + "@io_bazel_rules_scala//scala/private:coverage_replacements_provider.bzl", + _coverage_replacements_provider = "coverage_replacements_provider", +) + +def phase_coverage_library(ctx, p): + args = struct( + srcjars = p.collect_srcjars, + ) + return _phase_coverage_default(ctx, p, args) + +def phase_coverage_common(ctx, p): + return _phase_coverage_default(ctx, p) + +def _phase_coverage_default(ctx, p, _args = struct()): + return _phase_coverage( + ctx, + p, + _args.srcjars if hasattr(_args, "srcjars") else depset(), + ) + +def _phase_coverage(ctx, p, srcjars): + if len(ctx.files.srcs) + len(srcjars.to_list()) == 0 or not ctx.configuration.coverage_enabled: + return struct( + replacements = {}, + external_providers = {}, + ) + else: + input_jar = ctx.outputs.jar + output_jar = ctx.actions.declare_file( + "{}-offline.jar".format(input_jar.basename.split(".")[0]), + ) + srcs_paths = [src.path for src in ctx.files.srcs] + records = [ + (input_jar, output_jar, srcs_paths), + ] + + args = ctx.actions.args() + args.add_all(records, map_each = _jacoco_offline_instrument_format_each) + args.set_param_file_format("multiline") + args.use_param_file("@%s", use_always = True) + + ctx.actions.run( + mnemonic = "JacocoInstrumenter", + inputs = [record[0] for record in records], + outputs = [record[1] for record in records], + executable = ctx.attr._code_coverage_instrumentation_worker.files_to_run, + execution_requirements = {"supports-workers": "1"}, + arguments = [args], + ) + + replacements = {i: o for (i, o, _) in records} + provider = _coverage_replacements_provider.create( + replacements = replacements, + ) + instrumented_files_provider = coverage_common.instrumented_files_info( + ctx, + source_attributes = ["srcs"], + dependency_attributes = _coverage_replacements_provider.dependency_attributes, + extensions = ["scala", "java"], + ) + return struct( + replacements = replacements, + external_providers = { + "_CoverageReplacements": provider, + "InstrumentedFilesInfo": instrumented_files_provider, + }, + ) + +def _jacoco_offline_instrument_format_each(records): + return (["%s=%s=%s" % (records[0].path, records[1].path, ",".join(records[2]))]) diff --git a/scala/private/phases/phase_coverage_runfiles.bzl b/scala/private/phases/phase_coverage_runfiles.bzl index 00e50cf44..3031cd438 100644 --- a/scala/private/phases/phase_coverage_runfiles.bzl +++ b/scala/private/phases/phase_coverage_runfiles.bzl @@ -14,7 +14,7 @@ def phase_coverage_runfiles(ctx, p): if ctx.configuration.coverage_enabled and _coverage_replacements_provider.is_enabled(ctx): coverage_replacements = _coverage_replacements_provider.from_ctx( ctx, - base = p.compile.coverage.replacements, + base = p.coverage.replacements, ).replacements rjars = depset([ @@ -24,5 +24,6 @@ def phase_coverage_runfiles(ctx, p): coverage_runfiles = ctx.files._jacocorunner + ctx.files._lcov_merger + coverage_replacements.values() return struct( coverage_runfiles = coverage_runfiles, + runfiles = depset(coverage_runfiles), rjars = rjars, ) diff --git a/scala/private/phases/phase_declare_executable.bzl b/scala/private/phases/phase_declare_executable.bzl index c7bce9a20..749a20f2c 100644 --- a/scala/private/phases/phase_declare_executable.bzl +++ b/scala/private/phases/phase_declare_executable.bzl @@ -10,6 +10,10 @@ load( def phase_declare_executable(ctx, p): if (is_windows(ctx)): - return ctx.actions.declare_file("%s.exe" % ctx.label.name) + return struct( + executable = ctx.actions.declare_file("%s.exe" % ctx.label.name), + ) else: - return ctx.actions.declare_file(ctx.label.name) + return struct( + executable = ctx.actions.declare_file(ctx.label.name), + ) diff --git a/scala/private/phases/phase_default_info.bzl b/scala/private/phases/phase_default_info.bzl new file mode 100644 index 000000000..53f056883 --- /dev/null +++ b/scala/private/phases/phase_default_info.bzl @@ -0,0 +1,41 @@ +# +# PHASE: default_info +# +# DOCUMENT THIS +# + +def phase_default_info(ctx, p): + executable = None + files = [] + runfiles = [] + + phase_names = dir(p) + phase_names.remove("to_json") + phase_names.remove("to_proto") + for phase_name in phase_names: + phase = getattr(p, phase_name) + + if hasattr(phase, "executable"): + if executable == None: + executable = phase.executable + else: + fail("only one executable may be provided") + + if hasattr(phase, "files"): + files.append(phase.files) + + if hasattr(phase, "runfiles"): + runfiles.append(phase.runfiles) + + return struct( + external_providers = { + "DefaultInfo": DefaultInfo( + executable = executable, + files = depset(transitive = files), + # TODO: + # Per Bazel documentation, we should avoid using collect_data. The core phases need to be updated + # before we can make the adjustment. + runfiles = ctx.runfiles(transitive_files = depset(transitive = runfiles), collect_data = True), + ), + }, + ) diff --git a/scala/private/phases/phase_dependency.bzl b/scala/private/phases/phase_dependency.bzl new file mode 100644 index 000000000..2189d2e97 --- /dev/null +++ b/scala/private/phases/phase_dependency.bzl @@ -0,0 +1,66 @@ +# Gathers information about dependency mode and analysis + +load( + "@io_bazel_rules_scala//scala/private:dependency.bzl", + "get_strict_deps_mode", + "new_dependency_info", +) +load( + "@io_bazel_rules_scala//scala/private:paths.bzl", + _get_files_with_extension = "get_files_with_extension", + _java_extension = "java_extension", +) + +def phase_dependency_common(ctx, p): + return _phase_dependency_default(ctx, p) + +def phase_dependency_library_for_plugin_bootstrapping(ctx, p): + args = struct( + unused_deps_always_off = True, + strict_deps_always_off = True, + ) + return _phase_dependency_default(ctx, p, args) + +def _phase_dependency_default(ctx, p, args = struct()): + return _phase_dependency( + ctx, + p, + args.unused_deps_always_off if hasattr(args, "unused_deps_always_off") else False, + args.strict_deps_always_off if hasattr(args, "strict_deps_always_off") else False, + ) + +def _phase_dependency( + ctx, + p, + unused_deps_always_off, + strict_deps_always_off): + toolchain = ctx.toolchains["@io_bazel_rules_scala//scala:toolchain_type"] + + if strict_deps_always_off: + strict_deps_mode = "off" + else: + strict_deps_mode = get_strict_deps_mode(ctx) + + if unused_deps_always_off: + unused_deps_mode = "off" + else: + unused_deps_mode = _get_unused_deps_mode(ctx) + + # We are not able to verify whether dependencies are used when compiling java sources + # Thus we disable unused dependency checking when java sources are found + java_srcs = _get_files_with_extension(ctx, _java_extension) + if len(java_srcs) != 0: + unused_deps_mode = "off" + + return new_dependency_info( + toolchain.dependency_mode, + unused_deps_mode, + strict_deps_mode, + toolchain.dependency_tracking_method, + ) + +def _get_unused_deps_mode(ctx): + if ctx.attr.unused_dependency_checker_mode: + return ctx.attr.unused_dependency_checker_mode + else: + return ctx.toolchains["@io_bazel_rules_scala//scala:toolchain_type"].unused_dependency_checker_mode diff --git a/scala/private/phases/phase_final.bzl b/scala/private/phases/phase_final.bzl deleted file mode 100644 index 1fdb9514a..000000000 --- a/scala/private/phases/phase_final.bzl +++ /dev/null @@ -1,27 +0,0 @@ -# -# PHASE: final -# -# DOCUMENT THIS -# -def phase_binary_final(ctx, p): - defaultInfo = DefaultInfo( - executable = p.declare_executable, - files = depset([p.declare_executable, ctx.outputs.jar]), - runfiles = p.runfiles.runfiles, - ) - return [defaultInfo, p.compile.merged_provider, p.collect_jars.jars2labels] + p.compile.coverage.providers - -def phase_library_final(ctx, p): - defaultInfo = DefaultInfo( - files = depset([ctx.outputs.jar] + p.compile.full_jars), # Here is the default output - runfiles = p.runfiles.runfiles, - ) - return [defaultInfo, p.compile.merged_provider, p.collect_jars.jars2labels] + p.compile.coverage.providers - -def phase_scalatest_final(ctx, p): - defaultInfo = DefaultInfo( - executable = p.declare_executable, - files = depset([p.declare_executable, ctx.outputs.jar]), - runfiles = ctx.runfiles(p.coverage_runfiles.coverage_runfiles, transitive_files = p.runfiles.runfiles.files), - ) - return [defaultInfo, p.compile.merged_provider, p.collect_jars.jars2labels] + p.compile.coverage.providers diff --git a/scala/private/phases/phase_java_wrapper.bzl b/scala/private/phases/phase_java_wrapper.bzl index 1db2cfc8b..a3fab5d76 100644 --- a/scala/private/phases/phase_java_wrapper.bzl +++ b/scala/private/phases/phase_java_wrapper.bzl @@ -8,7 +8,7 @@ load( _java_bin = "java_bin", ) -def phase_repl_java_wrapper(ctx, p): +def phase_java_wrapper_repl(ctx, p): args = struct( args = " ".join(ctx.attr.scalacopts), wrapper_preamble = """ @@ -26,12 +26,12 @@ function finish() { trap finish EXIT """, ) - return _phase_default_java_wrapper(ctx, p, args) + return _phase_java_wrapper_default(ctx, p, args) -def phase_common_java_wrapper(ctx, p): - return _phase_default_java_wrapper(ctx, p) +def phase_java_wrapper_common(ctx, p): + return _phase_java_wrapper_default(ctx, p) -def _phase_default_java_wrapper(ctx, p, _args = struct()): +def _phase_java_wrapper_default(ctx, p, _args = struct()): return _phase_java_wrapper( ctx, _args.args if hasattr(_args, "args") else "", diff --git a/scala/private/phases/phase_merge_jars.bzl b/scala/private/phases/phase_merge_jars.bzl index 880c5bd4e..a8507a5f1 100644 --- a/scala/private/phases/phase_merge_jars.bzl +++ b/scala/private/phases/phase_merge_jars.bzl @@ -12,19 +12,27 @@ def phase_merge_jars(ctx, p): Use --compression to reduce size of deploy jars. """ deploy_jar = ctx.outputs.deploy_jar - jars_list = p.compile.rjars.to_list() + runtime_jars = p.compile.rjars main_class = getattr(ctx.attr, "main_class", "") progress_message = "Merging Scala jar: %s" % ctx.label - args = ["--compression", "--normalize", "--sources"] - args.extend([j.path for j in jars_list]) + args = ctx.actions.args() + args.add_all(["--compression", "--normalize", "--sources"]) + args.add_all(runtime_jars, map_each = _fileToPath) + if main_class: - args.extend(["--main_class", main_class]) - args.extend(["--output", deploy_jar.path]) + args.add_all(["--main_class", main_class]) + args.add_all(["--output", deploy_jar.path]) + + args.set_param_file_format("multiline") + args.use_param_file("@%s") ctx.actions.run( - inputs = jars_list, + inputs = runtime_jars, outputs = [deploy_jar], executable = ctx.executable._singlejar, mnemonic = "ScalaDeployJar", progress_message = progress_message, - arguments = args, + arguments = [args], ) + +def _fileToPath(file): + return file.path diff --git a/scala/private/phases/phase_runfiles.bzl b/scala/private/phases/phase_runfiles.bzl index db5e6e8a2..03ab539a8 100644 --- a/scala/private/phases/phase_runfiles.bzl +++ b/scala/private/phases/phase_runfiles.bzl @@ -3,14 +3,14 @@ # # DOCUMENT THIS # -def phase_library_runfiles(ctx, p): +def phase_runfiles_library(ctx, p): args = struct( # Using transitive_files since transitive_rjars a depset and avoiding linearization transitive_files = p.compile.rjars, ) - return _phase_default_runfiles(ctx, p, args) + return _phase_runfiles_default(ctx, p, args) -def phase_scalatest_runfiles(ctx, p): +def phase_runfiles_scalatest(ctx, p): args = "\n".join([ "-R", ctx.outputs.jar.short_path, @@ -24,21 +24,21 @@ def phase_scalatest_runfiles(ctx, p): args = struct( transitive_files = depset( - [p.declare_executable, p.java_wrapper] + ctx.files._java_runtime + runfiles_ext, + [p.java_wrapper] + ctx.files._java_runtime + runfiles_ext, transitive = [p.compile.rjars], ), args_file = args_file, ) - return _phase_default_runfiles(ctx, p, args) + return _phase_runfiles_default(ctx, p, args) -def phase_common_runfiles(ctx, p): - return _phase_default_runfiles(ctx, p) +def phase_runfiles_common(ctx, p): + return _phase_runfiles_default(ctx, p) -def _phase_default_runfiles(ctx, p, _args = struct()): +def _phase_runfiles_default(ctx, p, _args = struct()): return _phase_runfiles( ctx, _args.transitive_files if hasattr(_args, "transitive_files") else depset( - [p.declare_executable, p.java_wrapper] + ctx.files._java_runtime, + [p.java_wrapper] + ctx.files._java_runtime, transitive = [p.compile.rjars], ), _args.args_file if hasattr(_args, "args_file") else None, @@ -49,10 +49,7 @@ def _phase_runfiles( transitive_files, args_file): return struct( - runfiles = ctx.runfiles( - transitive_files = transitive_files, - collect_data = True, - ), + runfiles = transitive_files, args_file = args_file, ) diff --git a/scala/private/phases/phase_scalafmt.bzl b/scala/private/phases/phase_scalafmt.bzl new file mode 100644 index 000000000..a42410980 --- /dev/null +++ b/scala/private/phases/phase_scalafmt.bzl @@ -0,0 +1,73 @@ +# +# PHASE: phase scalafmt +# +# Outputs to format the scala files when it is explicitly specified +# +load( + "@io_bazel_rules_scala//scala/private:paths.bzl", + _scala_extension = "scala_extension", +) + +def phase_scalafmt(ctx, p): + if ctx.attr.format: + manifest, files = _build_format(ctx) + _formatter(ctx, manifest, files, ctx.file._runner, ctx.outputs.scalafmt_runner) + _formatter(ctx, manifest, files, ctx.file._testrunner, ctx.outputs.scalafmt_testrunner) + else: + _write_empty_content(ctx, ctx.outputs.scalafmt_runner) + _write_empty_content(ctx, ctx.outputs.scalafmt_testrunner) + +def _build_format(ctx): + files = [] + manifest_content = [] + for src in ctx.files.srcs: + # only format scala source files, not generated files + if src.path.endswith(_scala_extension) and src.is_source: + file = ctx.actions.declare_file("{}.fmt.output".format(src.short_path)) + files.append(file) + ctx.actions.run( + arguments = ["--jvm_flag=-Dfile.encoding=UTF-8", _format_args(ctx, src, file)], + executable = ctx.executable._fmt, + outputs = [file], + inputs = [ctx.file.config, src], + execution_requirements = {"supports-workers": "1"}, + mnemonic = "ScalaFmt", + ) + manifest_content.append("{} {}".format(src.short_path, file.short_path)) + + # record the source path and the formatted file path + # so that we know where to copy the formatted file to replace the source file + manifest = ctx.actions.declare_file("format/{}/manifest.txt".format(ctx.label.name)) + ctx.actions.write(manifest, "\n".join(manifest_content) + "\n") + + return manifest, files + +def _formatter(ctx, manifest, files, template, output_runner): + ctx.actions.run_shell( + inputs = [template, manifest] + files, + outputs = [output_runner], + # replace %workspace% and %manifest% in template and rewrite it to output_runner + command = "cat $1 | sed -e s#%workspace%#$2# -e s#%manifest%#$3# > $4", + arguments = [ + template.path, + ctx.workspace_name, + manifest.short_path, + output_runner.path, + ], + execution_requirements = {}, + ) + +def _write_empty_content(ctx, output_runner): + ctx.actions.write( + output = output_runner, + content = "", + ) + +def _format_args(ctx, src, file): + args = ctx.actions.args() + args.add(ctx.file.config.path) + args.add(src.path) + args.add(file.path) + args.set_param_file_format("multiline") + args.use_param_file("@%s", use_always = True) + return args diff --git a/scala/private/phases/phase_unused_deps_checker.bzl b/scala/private/phases/phase_unused_deps_checker.bzl deleted file mode 100644 index 21f0daebb..000000000 --- a/scala/private/phases/phase_unused_deps_checker.bzl +++ /dev/null @@ -1,11 +0,0 @@ -# -# PHASE: unused deps checker -# -# DOCUMENT THIS -# - -def phase_unused_deps_checker(ctx, p): - if ctx.attr.unused_dependency_checker_mode: - return ctx.attr.unused_dependency_checker_mode - else: - return ctx.toolchains["@io_bazel_rules_scala//scala:toolchain_type"].unused_dependency_checker_mode diff --git a/scala/private/phases/phase_write_executable.bzl b/scala/private/phases/phase_write_executable.bzl index 92931196b..3c8c11fbc 100644 --- a/scala/private/phases/phase_write_executable.bzl +++ b/scala/private/phases/phase_write_executable.bzl @@ -16,7 +16,7 @@ load( _coverage_replacements_provider = "coverage_replacements_provider", ) -def phase_scalatest_write_executable(ctx, p): +def phase_write_executable_scalatest(ctx, p): # jvm_flags passed in on the target override scala_test_jvm_flags passed in on the # toolchain final_jvm_flags = first_non_empty( @@ -31,26 +31,26 @@ def phase_scalatest_write_executable(ctx, p): ] + expand_location(ctx, final_jvm_flags), use_jacoco = ctx.configuration.coverage_enabled, ) - return _phase_deafult_write_executable(ctx, p, args) + return _phase_write_executable_default(ctx, p, args) -def phase_repl_write_executable(ctx, p): +def phase_write_executable_repl(ctx, p): args = struct( jvm_flags = ["-Dscala.usejavacp=true"] + ctx.attr.jvm_flags, main_class = "scala.tools.nsc.MainGenericRunner", ) - return _phase_deafult_write_executable(ctx, p, args) + return _phase_write_executable_default(ctx, p, args) -def phase_junit_test_write_executable(ctx, p): +def phase_write_executable_junit_test(ctx, p): args = struct( jvm_flags = p.jvm_flags + ctx.attr.jvm_flags, main_class = "com.google.testing.junit.runner.BazelTestRunner", ) - return _phase_deafult_write_executable(ctx, p, args) + return _phase_write_executable_default(ctx, p, args) -def phase_common_write_executable(ctx, p): - return _phase_deafult_write_executable(ctx, p) +def phase_write_executable_common(ctx, p): + return _phase_write_executable_default(ctx, p) -def _phase_deafult_write_executable(ctx, p, _args = struct()): +def _phase_write_executable_default(ctx, p, _args = struct()): return _phase_write_executable( ctx, p, @@ -67,7 +67,7 @@ def _phase_write_executable( jvm_flags, use_jacoco, main_class): - executable = p.declare_executable + executable = p.declare_executable.executable wrapper = p.java_wrapper if (is_windows(ctx)): diff --git a/scala/private/phases/phases.bzl b/scala/private/phases/phases.bzl index 95e68b575..3f7ff7f06 100644 --- a/scala/private/phases/phases.bzl +++ b/scala/private/phases/phases.bzl @@ -9,57 +9,61 @@ load( ) load( "@io_bazel_rules_scala//scala/private:phases/phase_write_executable.bzl", - _phase_common_write_executable = "phase_common_write_executable", - _phase_junit_test_write_executable = "phase_junit_test_write_executable", - _phase_repl_write_executable = "phase_repl_write_executable", - _phase_scalatest_write_executable = "phase_scalatest_write_executable", + _phase_write_executable_common = "phase_write_executable_common", + _phase_write_executable_junit_test = "phase_write_executable_junit_test", + _phase_write_executable_repl = "phase_write_executable_repl", + _phase_write_executable_scalatest = "phase_write_executable_scalatest", ) load( "@io_bazel_rules_scala//scala/private:phases/phase_java_wrapper.bzl", - _phase_common_java_wrapper = "phase_common_java_wrapper", - _phase_repl_java_wrapper = "phase_repl_java_wrapper", + _phase_java_wrapper_common = "phase_java_wrapper_common", + _phase_java_wrapper_repl = "phase_java_wrapper_repl", ) load( "@io_bazel_rules_scala//scala/private:phases/phase_collect_jars.bzl", - _phase_common_collect_jars = "phase_common_collect_jars", - _phase_junit_test_collect_jars = "phase_junit_test_collect_jars", - _phase_library_for_plugin_bootstrapping_collect_jars = "phase_library_for_plugin_bootstrapping_collect_jars", - _phase_macro_library_collect_jars = "phase_macro_library_collect_jars", - _phase_repl_collect_jars = "phase_repl_collect_jars", - _phase_scalatest_collect_jars = "phase_scalatest_collect_jars", + _phase_collect_jars_common = "phase_collect_jars_common", + _phase_collect_jars_junit_test = "phase_collect_jars_junit_test", + _phase_collect_jars_macro_library = "phase_collect_jars_macro_library", + _phase_collect_jars_repl = "phase_collect_jars_repl", + _phase_collect_jars_scalatest = "phase_collect_jars_scalatest", ) load( "@io_bazel_rules_scala//scala/private:phases/phase_compile.bzl", - _phase_binary_compile = "phase_binary_compile", - _phase_common_compile = "phase_common_compile", - _phase_junit_test_compile = "phase_junit_test_compile", - _phase_library_compile = "phase_library_compile", - _phase_library_for_plugin_bootstrapping_compile = "phase_library_for_plugin_bootstrapping_compile", - _phase_macro_library_compile = "phase_macro_library_compile", - _phase_repl_compile = "phase_repl_compile", - _phase_scalatest_compile = "phase_scalatest_compile", + _phase_compile_binary = "phase_compile_binary", + _phase_compile_common = "phase_compile_common", + _phase_compile_junit_test = "phase_compile_junit_test", + _phase_compile_library = "phase_compile_library", + _phase_compile_library_for_plugin_bootstrapping = "phase_compile_library_for_plugin_bootstrapping", + _phase_compile_macro_library = "phase_compile_macro_library", + _phase_compile_repl = "phase_compile_repl", + _phase_compile_scalatest = "phase_compile_scalatest", ) load( "@io_bazel_rules_scala//scala/private:phases/phase_runfiles.bzl", - _phase_common_runfiles = "phase_common_runfiles", - _phase_library_runfiles = "phase_library_runfiles", - _phase_scalatest_runfiles = "phase_scalatest_runfiles", + _phase_runfiles_common = "phase_runfiles_common", + _phase_runfiles_library = "phase_runfiles_library", + _phase_runfiles_scalatest = "phase_runfiles_scalatest", ) load( - "@io_bazel_rules_scala//scala/private:phases/phase_final.bzl", - _phase_binary_final = "phase_binary_final", - _phase_library_final = "phase_library_final", - _phase_scalatest_final = "phase_scalatest_final", + "@io_bazel_rules_scala//scala/private:phases/phase_coverage.bzl", + _phase_coverage_common = "phase_coverage_common", + _phase_coverage_library = "phase_coverage_library", ) +load("@io_bazel_rules_scala//scala/private:phases/phase_default_info.bzl", _phase_default_info = "phase_default_info") load("@io_bazel_rules_scala//scala/private:phases/phase_scalac_provider.bzl", _phase_scalac_provider = "phase_scalac_provider") load("@io_bazel_rules_scala//scala/private:phases/phase_write_manifest.bzl", _phase_write_manifest = "phase_write_manifest") load("@io_bazel_rules_scala//scala/private:phases/phase_collect_srcjars.bzl", _phase_collect_srcjars = "phase_collect_srcjars") load("@io_bazel_rules_scala//scala/private:phases/phase_collect_exports_jars.bzl", _phase_collect_exports_jars = "phase_collect_exports_jars") -load("@io_bazel_rules_scala//scala/private:phases/phase_unused_deps_checker.bzl", _phase_unused_deps_checker = "phase_unused_deps_checker") +load( + "@io_bazel_rules_scala//scala/private:phases/phase_dependency.bzl", + _phase_dependency_common = "phase_dependency_common", + _phase_dependency_library_for_plugin_bootstrapping = "phase_dependency_library_for_plugin_bootstrapping", +) load("@io_bazel_rules_scala//scala/private:phases/phase_declare_executable.bzl", _phase_declare_executable = "phase_declare_executable") load("@io_bazel_rules_scala//scala/private:phases/phase_merge_jars.bzl", _phase_merge_jars = "phase_merge_jars") load("@io_bazel_rules_scala//scala/private:phases/phase_jvm_flags.bzl", _phase_jvm_flags = "phase_jvm_flags") load("@io_bazel_rules_scala//scala/private:phases/phase_coverage_runfiles.bzl", _phase_coverage_runfiles = "phase_coverage_runfiles") +load("@io_bazel_rules_scala//scala/private:phases/phase_scalafmt.bzl", _phase_scalafmt = "phase_scalafmt") # API run_phases = _run_phases @@ -77,8 +81,9 @@ phase_collect_exports_jars = _phase_collect_exports_jars # write_manifest phase_write_manifest = _phase_write_manifest -# unused_deps_checker -phase_unused_deps_checker = _phase_unused_deps_checker +# dependency +phase_dependency_common = _phase_dependency_common +phase_dependency_library_for_plugin_bootstrapping = _phase_dependency_library_for_plugin_bootstrapping # declare_executable phase_declare_executable = _phase_declare_executable @@ -89,43 +94,47 @@ phase_merge_jars = _phase_merge_jars # jvm_flags phase_jvm_flags = _phase_jvm_flags +# coverage +phase_coverage_library = _phase_coverage_library +phase_coverage_common = _phase_coverage_common + # coverage_runfiles phase_coverage_runfiles = _phase_coverage_runfiles # write_executable -phase_scalatest_write_executable = _phase_scalatest_write_executable -phase_repl_write_executable = _phase_repl_write_executable -phase_junit_test_write_executable = _phase_junit_test_write_executable -phase_common_write_executable = _phase_common_write_executable +phase_write_executable_scalatest = _phase_write_executable_scalatest +phase_write_executable_repl = _phase_write_executable_repl +phase_write_executable_junit_test = _phase_write_executable_junit_test +phase_write_executable_common = _phase_write_executable_common # java_wrapper -phase_repl_java_wrapper = _phase_repl_java_wrapper -phase_common_java_wrapper = _phase_common_java_wrapper +phase_java_wrapper_repl = _phase_java_wrapper_repl +phase_java_wrapper_common = _phase_java_wrapper_common # collect_jars -phase_scalatest_collect_jars = _phase_scalatest_collect_jars -phase_repl_collect_jars = _phase_repl_collect_jars -phase_macro_library_collect_jars = _phase_macro_library_collect_jars -phase_junit_test_collect_jars = _phase_junit_test_collect_jars -phase_library_for_plugin_bootstrapping_collect_jars = _phase_library_for_plugin_bootstrapping_collect_jars -phase_common_collect_jars = _phase_common_collect_jars +phase_collect_jars_scalatest = _phase_collect_jars_scalatest +phase_collect_jars_repl = _phase_collect_jars_repl +phase_collect_jars_macro_library = _phase_collect_jars_macro_library +phase_collect_jars_junit_test = _phase_collect_jars_junit_test +phase_collect_jars_common = _phase_collect_jars_common # compile -phase_binary_compile = _phase_binary_compile -phase_library_compile = _phase_library_compile -phase_library_for_plugin_bootstrapping_compile = _phase_library_for_plugin_bootstrapping_compile -phase_macro_library_compile = _phase_macro_library_compile -phase_junit_test_compile = _phase_junit_test_compile -phase_repl_compile = _phase_repl_compile -phase_scalatest_compile = _phase_scalatest_compile -phase_common_compile = _phase_common_compile +phase_compile_binary = _phase_compile_binary +phase_compile_library = _phase_compile_library +phase_compile_library_for_plugin_bootstrapping = _phase_compile_library_for_plugin_bootstrapping +phase_compile_macro_library = _phase_compile_macro_library +phase_compile_junit_test = _phase_compile_junit_test +phase_compile_repl = _phase_compile_repl +phase_compile_scalatest = _phase_compile_scalatest +phase_compile_common = _phase_compile_common # runfiles -phase_library_runfiles = _phase_library_runfiles -phase_scalatest_runfiles = _phase_scalatest_runfiles -phase_common_runfiles = _phase_common_runfiles - -# final -phase_binary_final = _phase_binary_final -phase_library_final = _phase_library_final -phase_scalatest_final = _phase_scalatest_final +phase_runfiles_library = _phase_runfiles_library +phase_runfiles_scalatest = _phase_runfiles_scalatest +phase_runfiles_common = _phase_runfiles_common + +# default_info +phase_default_info = _phase_default_info + +# scalafmt +phase_scalafmt = _phase_scalafmt diff --git a/scala/private/resources.bzl b/scala/private/resources.bzl new file mode 100644 index 000000000..cb457fa3d --- /dev/null +++ b/scala/private/resources.bzl @@ -0,0 +1,56 @@ +load("@bazel_skylib//lib:paths.bzl", _paths = "paths") + +def paths(resources, resource_strip_prefix): + """Return a list of path tuples (target, source) where: + target - is a path in the archive (with given prefix stripped off) + source - is an absolute path of the resource file + + Tuple ordering is aligned with zipper format ie zip_path=file + + Args: + resources: list of file objects + resource_strip_prefix: string to strip from resource path + """ + return [(_target_path(resource, resource_strip_prefix), resource.path) for resource in resources] + +def _target_path(resource, resource_strip_prefix): + path = _target_path_by_strip_prefix(resource, resource_strip_prefix) if resource_strip_prefix else _target_path_by_default_prefixes(resource) + return _strip_prefix(path, "/") + +def _target_path_by_strip_prefix(resource, resource_strip_prefix): + # Start from absolute resource path and then strip roots so we get to correct short path + # resource.short_path sometimes give weird results ie '../' prefix + path = resource.path + if resource_strip_prefix != resource.owner.workspace_root: + path = _strip_prefix(path, resource.owner.workspace_root + "/") + path = _strip_prefix(path, resource.root.path + "/") + + # proto_library translates strip_import_prefix to proto_source_root which includes root so we have to strip it + prefix = _strip_prefix(resource_strip_prefix, resource.root.path + "/") + if not path.startswith(prefix): + fail("Resource file %s is not under the specified prefix %s to strip" % (path, prefix)) + return path[len(prefix):] + +def _target_path_by_default_prefixes(resource): + path = resource.path + + # Here we are looking to find out the offset of this resource inside + # any resources folder. We want to return the root to the resources folder + # and then the sub path inside it + dir_1, dir_2, rel_path = path.partition("resources") + if rel_path: + return rel_path + + # The same as the above but just looking for java + (dir_1, dir_2, rel_path) = path.partition("java") + if rel_path: + return rel_path + + # Both short_path and path have quirks we wish to avoid, in short_path there are times where + # it is prefixed by `../` instead of `external/`. And in .path it will instead return the entire + # bazel-out/... path, which is also wanting to be avoided. So instead, we return the short-path if + # path starts with bazel-out and the entire path if it does not. + return resource.short_path if path.startswith("bazel-out") else path + +def _strip_prefix(path, prefix): + return path[len(prefix):] if path.startswith(prefix) else path diff --git a/scala/private/rule_impls.bzl b/scala/private/rule_impls.bzl index 15625830c..afe2717cf 100644 --- a/scala/private/rule_impls.bzl +++ b/scala/private/rule_impls.bzl @@ -13,29 +13,12 @@ # limitations under the License. """Rules for supporting the Scala language.""" -load( - "@io_bazel_rules_scala//scala/private:coverage_replacements_provider.bzl", - _coverage_replacements_provider = "coverage_replacements_provider", -) +load("@bazel_skylib//lib:paths.bzl", "paths") load( ":common.bzl", _collect_plugin_paths = "collect_plugin_paths", ) - -def adjust_resources_path_by_default_prefixes(path): - # Here we are looking to find out the offset of this resource inside - # any resources folder. We want to return the root to the resources folder - # and then the sub path inside it - dir_1, dir_2, rel_path = path.partition("resources") - if rel_path: - return dir_1 + dir_2, rel_path - - # The same as the above but just looking for java - (dir_1, dir_2, rel_path) = path.partition("java") - if rel_path: - return dir_1 + dir_2, rel_path - - return "", path +load(":resources.bzl", _resource_paths = "paths") def expand_location(ctx, flags): if hasattr(ctx.attr, "data"): @@ -74,73 +57,56 @@ def compile_scala( expect_java_output, scalac_jvm_flags, scalac, - unused_dependency_checker_mode = "off", - unused_dependency_checker_ignored_targets = []): + dependency_info, + unused_dependency_checker_ignored_targets): # look for any plugins: input_plugins = plugins plugins = _collect_plugin_paths(plugins) internal_plugin_jars = [] - dependency_analyzer_mode = "off" compiler_classpath_jars = cjars + if dependency_info.dependency_mode != "direct": + compiler_classpath_jars = transitive_compile_jars optional_scalac_args = "" classpath_resources = [] if (hasattr(ctx.files, "classpath_resources")): classpath_resources = ctx.files.classpath_resources - if is_dependency_analyzer_on(ctx): - # "off" mode is used as a feature toggle, that preserves original behaviour - dependency_analyzer_mode = ctx.fragments.java.strict_java_deps + optional_scalac_args_map = {} + + if dependency_info.use_analyzer: dep_plugin = ctx.attr._dependency_analyzer_plugin plugins = depset(transitive = [plugins, dep_plugin.files]) internal_plugin_jars = ctx.files._dependency_analyzer_plugin - compiler_classpath_jars = transitive_compile_jars - direct_jars = _join_path(cjars.to_list()) + current_target = str(target_label) + optional_scalac_args_map["CurrentTarget"] = current_target + if dependency_info.need_indirect_info: transitive_cjars_list = transitive_compile_jars.to_list() indirect_jars = _join_path(transitive_cjars_list) indirect_targets = ",".join([str(labels[j.path]) for j in transitive_cjars_list]) - current_target = str(target_label) - - optional_scalac_args = """ -DirectJars: {direct_jars} -IndirectJars: {indirect_jars} -IndirectTargets: {indirect_targets} -CurrentTarget: {current_target} - """.format( - direct_jars = direct_jars, - indirect_jars = indirect_jars, - indirect_targets = indirect_targets, - current_target = current_target, - ) - - elif unused_dependency_checker_mode != "off": - unused_dependency_plugin = ctx.attr._unused_dependency_checker_plugin - plugins = depset(transitive = [plugins, unused_dependency_plugin.files]) - internal_plugin_jars = ctx.files._unused_dependency_checker_plugin - - cjars_list = cjars.to_list() - direct_jars = _join_path(cjars_list) - direct_targets = ",".join([str(labels[j.path]) for j in cjars_list]) + optional_scalac_args_map["IndirectJars"] = indirect_jars + optional_scalac_args_map["IndirectTargets"] = indirect_targets + if dependency_info.unused_deps_mode != "off": ignored_targets = ",".join([str(d) for d in unused_dependency_checker_ignored_targets]) + optional_scalac_args_map["UnusedDepsIgnoredTargets"] = ignored_targets - current_target = str(target_label) - - optional_scalac_args = """ -DirectJars: {direct_jars} -DirectTargets: {direct_targets} -IgnoredTargets: {ignored_targets} -CurrentTarget: {current_target} - """.format( - direct_jars = direct_jars, - direct_targets = direct_targets, - ignored_targets = ignored_targets, - current_target = current_target, - ) - if is_dependency_analyzer_off(ctx) and not is_plus_one_deps_off(ctx): - compiler_classpath_jars = transitive_compile_jars + if dependency_info.need_direct_info: + cjars_list = cjars.to_list() + if dependency_info.need_direct_jars: + direct_jars = _join_path(cjars_list) + optional_scalac_args_map["DirectJars"] = direct_jars + if dependency_info.need_direct_targets: + direct_targets = ",".join([str(labels[j.path]) for j in cjars_list]) + optional_scalac_args_map["DirectTargets"] = direct_targets + + optional_scalac_args = "\n".join([ + "{k}: {v}".format(k = k, v = v) + # We sort the arguments for input stability and reproducibility + for (k, v) in sorted(optional_scalac_args_map.items()) + ]) plugins_list = plugins.to_list() plugin_arg = _join_path(plugins_list) @@ -150,6 +116,7 @@ CurrentTarget: {current_target} toolchain = ctx.toolchains["@io_bazel_rules_scala//scala:toolchain_type"] scalacopts = [ctx.expand_location(v, input_plugins) for v in toolchain.scalacopts + in_scalacopts] + resource_paths = _resource_paths(resources, resource_strip_prefix) scalac_args = """ Classpath: {cp} @@ -160,15 +127,14 @@ Manifest: {manifest} Plugins: {plugin_arg} PrintCompileTime: {print_compile_time} ExpectJavaOutput: {expect_java_output} -ResourceDests: {resource_dest} +ResourceTargets: {resource_targets} +ResourceSources: {resource_sources} ResourceJars: {resource_jars} -ResourceSrcs: {resource_src} -ResourceShortPaths: {resource_short_paths} -ResourceStripPrefix: {resource_strip_prefix} ScalacOpts: {scala_opts} SourceJars: {srcjars} -DependencyAnalyzerMode: {dependency_analyzer_mode} +StrictDepsMode: {strict_deps_mode} UnusedDependencyCheckerMode: {unused_dependency_checker_mode} +DependencyTrackingMethod: {dependency_tracking_method} StatsfileOutput: {statsfile_output} """.format( out = output.path, @@ -182,16 +148,12 @@ StatsfileOutput: {statsfile_output} files = _join_path(sources), srcjars = _join_path(all_srcjars.to_list()), # the resource paths need to be aligned in order - resource_src = ",".join([f.path for f in resources]), - resource_short_paths = ",".join([f.short_path for f in resources]), - resource_dest = ",".join([ - adjust_resources_path_by_default_prefixes(f.short_path)[1] - for f in resources - ]), - resource_strip_prefix = resource_strip_prefix, + resource_targets = ",".join([p[0] for p in resource_paths]), + resource_sources = ",".join([p[1] for p in resource_paths]), resource_jars = _join_path(resource_jars), - dependency_analyzer_mode = dependency_analyzer_mode, - unused_dependency_checker_mode = unused_dependency_checker_mode, + strict_deps_mode = dependency_info.strict_deps_mode, + unused_dependency_checker_mode = dependency_info.unused_deps_mode, + dependency_tracking_method = dependency_info.dependency_tracking_method, statsfile_output = statsfile.path, ) @@ -245,44 +207,17 @@ StatsfileOutput: {statsfile_output} ] + ["@" + argfile.path], ) -def _path_is_absolute(path): - # Returns true for absolute path in Linux/Mac (i.e., '/') or Windows (i.e., - # 'X:\' or 'X:/' where 'X' is a letter), false otherwise. - if len(path) >= 1 and path[0] == "/": - return True - if len(path) >= 3 and \ - path[0].isalpha() and \ - path[1] == ":" and \ - (path[2] == "/" or path[2] == "\\"): - return True - - return False - def runfiles_root(ctx): return "${TEST_SRCDIR}/%s" % ctx.workspace_name def java_bin(ctx): java_path = str(ctx.attr._java_runtime[java_common.JavaRuntimeInfo].java_executable_runfiles_path) - if _path_is_absolute(java_path): + if paths.is_absolute(java_path): javabin = java_path else: runfiles_root_var = runfiles_root(ctx) javabin = "%s/%s" % (runfiles_root_var, java_path) return javabin -def is_dependency_analyzer_on(ctx): - if (hasattr(ctx.attr, "_dependency_analyzer_plugin") and - # when the strict deps FT is removed the "default" check - # will be removed since "default" will mean it's turned on - ctx.fragments.java.strict_java_deps != "default" and - ctx.fragments.java.strict_java_deps != "off"): - return True - -def is_dependency_analyzer_off(ctx): - return not is_dependency_analyzer_on(ctx) - -def is_plus_one_deps_off(ctx): - return ctx.toolchains["@io_bazel_rules_scala//scala:toolchain_type"].plus_one_deps_mode == "off" - def is_windows(ctx): return ctx.configuration.host_path_separator == ";" diff --git a/scala/private/rules/scala_binary.bzl b/scala/private/rules/scala_binary.bzl index 971e24141..bfe6fa249 100644 --- a/scala/private/rules/scala_binary.bzl +++ b/scala/private/rules/scala_binary.bzl @@ -12,16 +12,17 @@ load("@io_bazel_rules_scala//scala/private:common_outputs.bzl", "common_outputs" load( "@io_bazel_rules_scala//scala/private:phases/phases.bzl", "extras_phases", - "phase_binary_compile", - "phase_binary_final", - "phase_common_collect_jars", - "phase_common_java_wrapper", - "phase_common_runfiles", - "phase_common_write_executable", + "phase_collect_jars_common", + "phase_compile_binary", + "phase_coverage_common", "phase_declare_executable", + "phase_default_info", + "phase_dependency_common", + "phase_java_wrapper_common", "phase_merge_jars", + "phase_runfiles_common", "phase_scalac_provider", - "phase_unused_deps_checker", + "phase_write_executable_common", "phase_write_manifest", "run_phases", ) @@ -33,19 +34,19 @@ def _scala_binary_impl(ctx): [ ("scalac_provider", phase_scalac_provider), ("write_manifest", phase_write_manifest), - ("unused_deps_checker", phase_unused_deps_checker), - ("collect_jars", phase_common_collect_jars), - ("java_wrapper", phase_common_java_wrapper), + ("dependency", phase_dependency_common), + ("collect_jars", phase_collect_jars_common), + ("java_wrapper", phase_java_wrapper_common), ("declare_executable", phase_declare_executable), # no need to build an ijar for an executable - ("compile", phase_binary_compile), + ("compile", phase_compile_binary), + ("coverage", phase_coverage_common), ("merge_jars", phase_merge_jars), - ("runfiles", phase_common_runfiles), - ("write_executable", phase_common_write_executable), + ("runfiles", phase_runfiles_common), + ("write_executable", phase_write_executable_common), + ("default_info", phase_default_info), ], - # fixed phase - ("final", phase_binary_final), - ).final + ) _scala_binary_attrs = { "main_class": attr.string(mandatory = True), diff --git a/scala/private/rules/scala_junit_test.bzl b/scala/private/rules/scala_junit_test.bzl index f0a142c60..93792f989 100644 --- a/scala/private/rules/scala_junit_test.bzl +++ b/scala/private/rules/scala_junit_test.bzl @@ -11,17 +11,18 @@ load("@io_bazel_rules_scala//scala/private:common_outputs.bzl", "common_outputs" load( "@io_bazel_rules_scala//scala/private:phases/phases.bzl", "extras_phases", - "phase_binary_final", - "phase_common_java_wrapper", - "phase_common_runfiles", + "phase_collect_jars_junit_test", + "phase_compile_junit_test", + "phase_coverage_common", "phase_declare_executable", - "phase_junit_test_collect_jars", - "phase_junit_test_compile", - "phase_junit_test_write_executable", + "phase_default_info", + "phase_dependency_common", + "phase_java_wrapper_common", "phase_jvm_flags", "phase_merge_jars", + "phase_runfiles_common", "phase_scalac_provider", - "phase_unused_deps_checker", + "phase_write_executable_junit_test", "phase_write_manifest", "run_phases", ) @@ -37,20 +38,20 @@ def _scala_junit_test_impl(ctx): [ ("scalac_provider", phase_scalac_provider), ("write_manifest", phase_write_manifest), - ("unused_deps_checker", phase_unused_deps_checker), - ("collect_jars", phase_junit_test_collect_jars), - ("java_wrapper", phase_common_java_wrapper), + ("dependency", phase_dependency_common), + ("collect_jars", phase_collect_jars_junit_test), + ("java_wrapper", phase_java_wrapper_common), ("declare_executable", phase_declare_executable), # no need to build an ijar for an executable - ("compile", phase_junit_test_compile), + ("compile", phase_compile_junit_test), + ("coverage", phase_coverage_common), ("merge_jars", phase_merge_jars), - ("runfiles", phase_common_runfiles), + ("runfiles", phase_runfiles_common), ("jvm_flags", phase_jvm_flags), - ("write_executable", phase_junit_test_write_executable), + ("write_executable", phase_write_executable_junit_test), + ("default_info", phase_default_info), ], - # fixed phase - ("final", phase_binary_final), - ).final + ) _scala_junit_test_attrs = { "prefixes": attr.string_list(default = []), diff --git a/scala/private/rules/scala_library.bzl b/scala/private/rules/scala_library.bzl index 8173d2446..23805df49 100644 --- a/scala/private/rules/scala_library.bzl +++ b/scala/private/rules/scala_library.bzl @@ -19,18 +19,21 @@ load( "@io_bazel_rules_scala//scala/private:phases/phases.bzl", "extras_phases", "phase_collect_exports_jars", + "phase_collect_jars_common", + "phase_collect_jars_macro_library", "phase_collect_srcjars", - "phase_common_collect_jars", - "phase_library_compile", - "phase_library_final", - "phase_library_for_plugin_bootstrapping_collect_jars", - "phase_library_for_plugin_bootstrapping_compile", - "phase_library_runfiles", - "phase_macro_library_collect_jars", - "phase_macro_library_compile", + "phase_compile_common", + "phase_compile_library", + "phase_compile_library_for_plugin_bootstrapping", + "phase_compile_macro_library", + "phase_coverage_common", + "phase_coverage_library", + "phase_default_info", + "phase_dependency_common", + "phase_dependency_library_for_plugin_bootstrapping", "phase_merge_jars", + "phase_runfiles_library", "phase_scalac_provider", - "phase_unused_deps_checker", "phase_write_manifest", "run_phases", ) @@ -60,16 +63,16 @@ def _scala_library_impl(ctx): ("scalac_provider", phase_scalac_provider), ("collect_srcjars", phase_collect_srcjars), ("write_manifest", phase_write_manifest), - ("unused_deps_checker", phase_unused_deps_checker), - ("collect_jars", phase_common_collect_jars), - ("compile", phase_library_compile), + ("dependency", phase_dependency_common), + ("collect_jars", phase_collect_jars_common), + ("compile", phase_compile_library), + ("coverage", phase_coverage_library), ("merge_jars", phase_merge_jars), - ("runfiles", phase_library_runfiles), + ("runfiles", phase_runfiles_library), ("collect_exports_jars", phase_collect_exports_jars), + ("default_info", phase_default_info), ], - # fixed phase - ("final", phase_library_final), - ).final + ) _scala_library_attrs = {} @@ -138,20 +141,22 @@ def _scala_library_for_plugin_bootstrapping_impl(ctx): ("scalac_provider", phase_scalac_provider), ("collect_srcjars", phase_collect_srcjars), ("write_manifest", phase_write_manifest), - ("collect_jars", phase_library_for_plugin_bootstrapping_collect_jars), - ("compile", phase_library_for_plugin_bootstrapping_compile), + ("dependency", phase_dependency_library_for_plugin_bootstrapping), + ("collect_jars", phase_collect_jars_common), + ("compile", phase_compile_library_for_plugin_bootstrapping), ("merge_jars", phase_merge_jars), - ("runfiles", phase_library_runfiles), + ("runfiles", phase_runfiles_library), ("collect_exports_jars", phase_collect_exports_jars), + ("default_info", phase_default_info), ], - # fixed phase - ("final", phase_library_final), - ).final + ) # the scala compiler plugin used for dependency analysis is compiled using `scala_library`. # in order to avoid cyclic dependencies `scala_library_for_plugin_bootstrapping` was created for this purpose, # which does not contain plugin related attributes, and thus avoids the cyclic dependency issue -_scala_library_for_plugin_bootstrapping_attrs = {} +_scala_library_for_plugin_bootstrapping_attrs = { + "build_ijar": attr.bool(default = True), +} _scala_library_for_plugin_bootstrapping_attrs.update(implicit_deps) @@ -193,16 +198,16 @@ def _scala_macro_library_impl(ctx): ("scalac_provider", phase_scalac_provider), ("collect_srcjars", phase_collect_srcjars), ("write_manifest", phase_write_manifest), - ("unused_deps_checker", phase_unused_deps_checker), - ("collect_jars", phase_macro_library_collect_jars), - ("compile", phase_macro_library_compile), + ("dependency", phase_dependency_common), + ("collect_jars", phase_collect_jars_macro_library), + ("compile", phase_compile_macro_library), + ("coverage", phase_coverage_common), ("merge_jars", phase_merge_jars), - ("runfiles", phase_library_runfiles), + ("runfiles", phase_runfiles_library), ("collect_exports_jars", phase_collect_exports_jars), + ("default_info", phase_default_info), ], - # fixed phase - ("final", phase_library_final), - ).final + ) _scala_macro_library_attrs = { "main_class": attr.string(), diff --git a/scala/private/rules/scala_repl.bzl b/scala/private/rules/scala_repl.bzl index fe454f2ef..7ab862d32 100644 --- a/scala/private/rules/scala_repl.bzl +++ b/scala/private/rules/scala_repl.bzl @@ -12,16 +12,17 @@ load("@io_bazel_rules_scala//scala/private:common_outputs.bzl", "common_outputs" load( "@io_bazel_rules_scala//scala/private:phases/phases.bzl", "extras_phases", - "phase_binary_final", - "phase_common_runfiles", + "phase_collect_jars_repl", + "phase_compile_repl", + "phase_coverage_common", "phase_declare_executable", + "phase_default_info", + "phase_dependency_common", + "phase_java_wrapper_repl", "phase_merge_jars", - "phase_repl_collect_jars", - "phase_repl_compile", - "phase_repl_java_wrapper", - "phase_repl_write_executable", + "phase_runfiles_common", "phase_scalac_provider", - "phase_unused_deps_checker", + "phase_write_executable_repl", "phase_write_manifest", "run_phases", ) @@ -33,20 +34,20 @@ def _scala_repl_impl(ctx): [ ("scalac_provider", phase_scalac_provider), ("write_manifest", phase_write_manifest), - ("unused_deps_checker", phase_unused_deps_checker), + ("dependency", phase_dependency_common), # need scala-compiler for MainGenericRunner below - ("collect_jars", phase_repl_collect_jars), - ("java_wrapper", phase_repl_java_wrapper), + ("collect_jars", phase_collect_jars_repl), + ("java_wrapper", phase_java_wrapper_repl), ("declare_executable", phase_declare_executable), # no need to build an ijar for an executable - ("compile", phase_repl_compile), + ("compile", phase_compile_repl), + ("coverage", phase_coverage_common), ("merge_jars", phase_merge_jars), - ("runfiles", phase_common_runfiles), - ("write_executable", phase_repl_write_executable), + ("runfiles", phase_runfiles_common), + ("write_executable", phase_write_executable_repl), + ("default_info", phase_default_info), ], - # fixed phase - ("final", phase_binary_final), - ).final + ) _scala_repl_attrs = { "jvm_flags": attr.string_list(), diff --git a/scala/private/rules/scala_test.bzl b/scala/private/rules/scala_test.bzl index 4751adce1..75e94ca52 100644 --- a/scala/private/rules/scala_test.bzl +++ b/scala/private/rules/scala_test.bzl @@ -12,17 +12,18 @@ load("@io_bazel_rules_scala//scala/private:common_outputs.bzl", "common_outputs" load( "@io_bazel_rules_scala//scala/private:phases/phases.bzl", "extras_phases", - "phase_common_java_wrapper", + "phase_collect_jars_scalatest", + "phase_compile_scalatest", + "phase_coverage_common", "phase_coverage_runfiles", "phase_declare_executable", + "phase_default_info", + "phase_dependency_common", + "phase_java_wrapper_common", "phase_merge_jars", + "phase_runfiles_scalatest", "phase_scalac_provider", - "phase_scalatest_collect_jars", - "phase_scalatest_compile", - "phase_scalatest_final", - "phase_scalatest_runfiles", - "phase_scalatest_write_executable", - "phase_unused_deps_checker", + "phase_write_executable_scalatest", "phase_write_manifest", "run_phases", ) @@ -34,20 +35,20 @@ def _scala_test_impl(ctx): [ ("scalac_provider", phase_scalac_provider), ("write_manifest", phase_write_manifest), - ("unused_deps_checker", phase_unused_deps_checker), - ("collect_jars", phase_scalatest_collect_jars), - ("java_wrapper", phase_common_java_wrapper), + ("dependency", phase_dependency_common), + ("collect_jars", phase_collect_jars_scalatest), + ("java_wrapper", phase_java_wrapper_common), ("declare_executable", phase_declare_executable), # no need to build an ijar for an executable - ("compile", phase_scalatest_compile), + ("compile", phase_compile_scalatest), + ("coverage", phase_coverage_common), ("merge_jars", phase_merge_jars), - ("runfiles", phase_scalatest_runfiles), + ("runfiles", phase_runfiles_scalatest), ("coverage_runfiles", phase_coverage_runfiles), - ("write_executable", phase_scalatest_write_executable), + ("write_executable", phase_write_executable_scalatest), + ("default_info", phase_default_info), ], - # fixed phase - ("final", phase_scalatest_final), - ).final + ) _scala_test_attrs = { "main_class": attr.string( diff --git a/scala/scala_cross_version.bzl b/scala/scala_cross_version.bzl index 925ff8150..d532fcfcb 100644 --- a/scala/scala_cross_version.bzl +++ b/scala/scala_cross_version.bzl @@ -31,6 +31,14 @@ def default_scala_version_jar_shas(): "scala_reflect": "6ba385b450a6311a15c918cf8688b9af9327c6104f0ecbd35933cfcd3095fe04", } +def default_maven_server_urls(): + return [ + "https://repo.maven.apache.org/maven2", + "https://maven-central.storage-download.googleapis.com/maven2", + "https://mirror.bazel.build/repo1.maven.org/maven2", + "https://jcenter.bintray.com", + ] + def extract_major_version(scala_version): """Return major Scala version given a full version, e.g. "2.11.11" -> "2.11" """ return scala_version[:scala_version.find(".", 2)] diff --git a/scala/scala_toolchain.bzl b/scala/scala_toolchain.bzl index f57e23302..055b98d06 100644 --- a/scala/scala_toolchain.bzl +++ b/scala/scala_toolchain.bzl @@ -3,12 +3,74 @@ load( _ScalacProvider = "ScalacProvider", ) +def _compute_dependency_mode(input_dependency_mode, input_plus_one_deps_mode): + if input_plus_one_deps_mode == "on": + return "plus-one" + + if input_dependency_mode == "": + return "direct" + + return input_dependency_mode + +def _compute_strict_deps_mode(input_strict_deps_mode, dependency_mode): + if dependency_mode == "direct": + return "off" + if input_strict_deps_mode == "default": + if dependency_mode == "transitive": + return "error" + else: + return "off" + return input_strict_deps_mode + +def _compute_dependency_tracking_method(input_dependency_tracking_method): + if input_dependency_tracking_method == "default": + return "high-level" + return input_dependency_tracking_method + def _scala_toolchain_impl(ctx): + if ctx.attr.plus_one_deps_mode != "": + print( + "Setting plus_one_deps_mode on toolchain is deprecated." + + "Use 'dependency_mode = \"plus-one\"' instead", + ) + if ctx.attr.dependency_mode != "" and ctx.attr.plus_one_deps_mode != "": + fail("Cannot set both dependency_mode and plus_one_deps_mode on toolchain") + + if ctx.fragments.java.strict_java_deps != "default" and ctx.fragments.java.strict_java_deps != "off": + dependency_mode = "transitive" + strict_deps_mode = ctx.fragments.java.strict_java_deps + unused_dependency_checker_mode = "off" + dependency_tracking_method = "high-level" + else: + dependency_mode = _compute_dependency_mode( + ctx.attr.dependency_mode, + ctx.attr.plus_one_deps_mode, + ) + strict_deps_mode = _compute_strict_deps_mode( + ctx.attr.strict_deps_mode, + dependency_mode, + ) + + unused_dependency_checker_mode = ctx.attr.unused_dependency_checker_mode + dependency_tracking_method = _compute_dependency_tracking_method(ctx.attr.dependency_tracking_method) + + # Final quality checks to possibly detect buggy code above + if dependency_mode not in ("direct", "plus-one", "transitive"): + fail("Internal error: invalid dependency_mode " + dependency_mode) + + if strict_deps_mode not in ("off", "warn", "error"): + fail("Internal error: invalid strict_deps_mode " + strict_deps_mode) + + if dependency_tracking_method not in ("ast", "high-level"): + fail("Internal error: invalid dependency_tracking_method " + dependency_tracking_method) + toolchain = platform_common.ToolchainInfo( scalacopts = ctx.attr.scalacopts, scalac_provider_attr = ctx.attr.scalac_provider_attr, - unused_dependency_checker_mode = ctx.attr.unused_dependency_checker_mode, - plus_one_deps_mode = ctx.attr.plus_one_deps_mode, + dependency_mode = dependency_mode, + strict_deps_mode = strict_deps_mode, + unused_dependency_checker_mode = unused_dependency_checker_mode, + dependency_tracking_method = dependency_tracking_method, enable_code_coverage_aspect = ctx.attr.enable_code_coverage_aspect, scalac_jvm_flags = ctx.attr.scalac_jvm_flags, scala_test_jvm_flags = ctx.attr.scala_test_jvm_flags, @@ -23,13 +85,23 @@ scala_toolchain = rule( default = "@io_bazel_rules_scala//scala:scalac_default", providers = [_ScalacProvider], ), + "dependency_mode": attr.string( + values = ["direct", "plus-one", "transitive", ""], + ), + "strict_deps_mode": attr.string( + default = "default", + values = ["off", "warn", "error", "default"], + ), "unused_dependency_checker_mode": attr.string( default = "off", values = ["off", "warn", "error"], ), + "dependency_tracking_method": attr.string( + default = "default", + values = ["ast", "high-level", "default"], + ), "plus_one_deps_mode": attr.string( - default = "off", - values = ["off", "on"], + values = ["off", "on", ""], ), "enable_code_coverage_aspect": attr.string( default = "off", @@ -38,4 +110,5 @@ scala_toolchain = rule( "scalac_jvm_flags": attr.string_list(), "scala_test_jvm_flags": attr.string_list(), }, + fragments = ["java"], ) diff --git a/scala/scalafmt/BUILD b/scala/scalafmt/BUILD new file mode 100644 index 000000000..1a66f0fd3 --- /dev/null +++ b/scala/scalafmt/BUILD @@ -0,0 +1,36 @@ +load("//scala:scala.bzl", "scala_binary") + +filegroup( + name = "runner", + srcs = ["private/format.template.sh"], + visibility = ["//visibility:public"], +) + +filegroup( + name = "testrunner", + srcs = ["private/format-test.template.sh"], + visibility = ["//visibility:public"], +) + +scala_binary( + name = "scalafmt", + srcs = ["scalafmt/ScalafmtRunner.scala"], + main_class = "io.bazel.rules_scala.scalafmt.ScalafmtRunner", + visibility = ["//visibility:public"], + deps = [ + "//src/java/io/bazel/rulesscala/worker", + "@com_geirsson_metaconfig_core", + "@org_scalameta_parsers", + "@org_scalameta_scalafmt_core", + ], +) + +load( + "//scala/scalafmt:phase_scalafmt_ext.bzl", + "scalafmt_singleton", +) + +scalafmt_singleton( + name = "phase_scalafmt", + visibility = ["//visibility:public"], +) diff --git a/scala/scalafmt/phase_scalafmt_ext.bzl b/scala/scalafmt/phase_scalafmt_ext.bzl new file mode 100644 index 000000000..858ee0679 --- /dev/null +++ b/scala/scalafmt/phase_scalafmt_ext.bzl @@ -0,0 +1,55 @@ +load( + "//scala:advanced_usage/providers.bzl", + _ScalaRulePhase = "ScalaRulePhase", +) +load( + "//scala/private:phases/phases.bzl", + _phase_scalafmt = "phase_scalafmt", +) + +ext_scalafmt = { + "attrs": { + "config": attr.label( + allow_single_file = [".conf"], + default = "@scalafmt_default//:config", + doc = "The Scalafmt configuration file.", + ), + "format": attr.bool( + default = False, + doc = "Switch of enabling formatting.", + ), + "_fmt": attr.label( + cfg = "host", + default = "//scala/scalafmt", + executable = True, + ), + "_runner": attr.label( + allow_single_file = True, + default = "//scala/scalafmt:runner", + ), + "_testrunner": attr.label( + allow_single_file = True, + default = "//scala/scalafmt:testrunner", + ), + }, + "outputs": { + "scalafmt_runner": "%{name}.format", + "scalafmt_testrunner": "%{name}.format-test", + }, + "phase_providers": [ + "//scala/scalafmt:phase_scalafmt", + ], +} + +def _scalafmt_singleton_implementation(ctx): + return [ + _ScalaRulePhase( + custom_phases = [ + ("$", "", "scalafmt", _phase_scalafmt), + ], + ), + ] + +scalafmt_singleton = rule( + implementation = _scalafmt_singleton_implementation, +) diff --git a/scala/scalafmt/private/format-test.template.sh b/scala/scalafmt/private/format-test.template.sh new file mode 100644 index 000000000..0ca9d99b9 --- /dev/null +++ b/scala/scalafmt/private/format-test.template.sh @@ -0,0 +1,18 @@ +#!/bin/bash -e +WORKSPACE_ROOT="${1:-$BUILD_WORKSPACE_DIRECTORY}" +RUNPATH="${TEST_SRCDIR-$0.runfiles}"/%workspace% +RUNPATH=(${RUNPATH//bin/ }) +RUNPATH="${RUNPATH[0]}"bin + +EXIT=0 +while read original formatted; do + if [[ ! -z "$original" ]] && [[ ! -z "$formatted" ]]; then + if ! cmp -s "$WORKSPACE_ROOT/$original" "$RUNPATH/$formatted"; then + echo $original + diff "$WORKSPACE_ROOT/$original" "$RUNPATH/$formatted" || true + EXIT=1 + fi + fi +done < "$RUNPATH"/%manifest% + +exit $EXIT diff --git a/scala/scalafmt/private/format.template.sh b/scala/scalafmt/private/format.template.sh new file mode 100644 index 000000000..63e5adc0a --- /dev/null +++ b/scala/scalafmt/private/format.template.sh @@ -0,0 +1,14 @@ +#!/bin/bash -e +WORKSPACE_ROOT="${1:-$BUILD_WORKSPACE_DIRECTORY}" +RUNPATH="${TEST_SRCDIR-$0.runfiles}"/%workspace% +RUNPATH=(${RUNPATH//bin/ }) +RUNPATH="${RUNPATH[0]}"bin + +while read original formatted; do + if [[ ! -z "$original" ]] && [[ ! -z "$formatted" ]]; then + if ! cmp -s "$WORKSPACE_ROOT/$original" "$RUNPATH/$formatted"; then + echo "Formatting $original" + cp "$RUNPATH/$formatted" "$WORKSPACE_ROOT/$original" + fi + fi +done < "$RUNPATH"/%manifest% diff --git a/scala/scalafmt/scalafmt/ScalafmtRunner.scala b/scala/scalafmt/scalafmt/ScalafmtRunner.scala new file mode 100644 index 000000000..5a4a870f1 --- /dev/null +++ b/scala/scalafmt/scalafmt/ScalafmtRunner.scala @@ -0,0 +1,51 @@ +package io.bazel.rules_scala.scalafmt + +import io.bazel.rulesscala.worker.{GenericWorker, Processor}; +import java.io.File +import java.nio.file.Files +import org.scalafmt.Scalafmt +import org.scalafmt.config.Config +import org.scalafmt.util.FileOps +import scala.annotation.tailrec +import scala.collection.JavaConverters._ +import scala.io.Codec + +object ScalafmtRunner extends GenericWorker(new ScalafmtProcessor) { + def main(args: Array[String]) { + try run(args) + catch { + case x: Exception => + x.printStackTrace() + System.exit(1) + } + } +} + +class ScalafmtProcessor extends Processor { + def processRequest(args: java.util.List[String]) { + val argName = List("config", "input", "output") + val argFile = args.asScala.map{x => new File(x)} + val namespace = argName.zip(argFile).toMap + + val source = FileOps.readFile(namespace.getOrElse("input", new File("")))(Codec.UTF8) + + val config = Config.fromHoconFile(namespace.getOrElse("config", new File(""))).get + @tailrec + def format(code: String): String = { + val formatted = Scalafmt.format(code, config).get + if (code == formatted) code else format(formatted) + } + + val output = try { + format(source) + } catch { + case e @ (_: org.scalafmt.Error | _: scala.meta.parsers.ParseException) => { + System.out.println("Unable to format file due to bug in scalafmt") + System.out.println(e.toString) + source + } + } + + Files.write(namespace.getOrElse("output", new File("")).toPath, output.getBytes) + } +} diff --git a/scala/scalafmt/scalafmt_repositories.bzl b/scala/scalafmt/scalafmt_repositories.bzl new file mode 100644 index 000000000..8e304afc8 --- /dev/null +++ b/scala/scalafmt/scalafmt_repositories.bzl @@ -0,0 +1,500 @@ +load( + "//scala:scala_cross_version.bzl", + _default_maven_server_urls = "default_maven_server_urls", + _default_scala_version = "default_scala_version", + _default_scala_version_jar_shas = "default_scala_version_jar_shas", + _extract_major_version = "extract_major_version", +) +load( + "@io_bazel_rules_scala//scala:scala_maven_import_external.bzl", + _scala_maven_import_external = "scala_maven_import_external", +) + +def scalafmt_default_config(path = ".scalafmt.conf"): + build = [] + build.append("filegroup(") + build.append(" name = \"config\",") + build.append(" srcs = [\"{}\"],".format(path)) + build.append(" visibility = [\"//visibility:public\"],") + build.append(")") + native.new_local_repository(name = "scalafmt_default", build_file_content = "\n".join(build), path = "") + +def _default_scala_extra_jars(): + return { + "2.11": { + "org_scalameta_common": { + "version": "4.3.0", + "sha256": "6330798bcbd78d14d371202749f32efda0465c3be5fd057a6055a67e21335ba0", + }, + "org_scalameta_fastparse": { + "version": "1.0.1", + "sha256": "49ecc30a4b47efc0038099da0c97515cf8f754ea631ea9f9935b36ca7d41b733", + }, + "org_scalameta_fastparse_utils": { + "version": "1.0.1", + "sha256": "93f58db540e53178a686621f7a9c401307a529b68e051e38804394a2a86cea94", + }, + "org_scala_lang_modules_scala_collection_compat": { + "version": "2.1.2", + "sha256": "e9667b8b7276aeb42599f536fe4d7caab06eabc55e9995572267ad60c7a11c8b", + }, + "org_scalameta_parsers": { + "version": "4.3.0", + "sha256": "724382abfac27b32dec6c21210562bc7e1b09b5268ccb704abe66dcc8844beeb", + }, + "org_scalameta_scalafmt_core": { + "version": "2.3.2", + "sha256": "6bf391e0e1d7369fda83ddaf7be4d267bf4cbccdf2cc31ff941999a78c30e67f", + }, + "org_scalameta_scalameta": { + "version": "4.3.0", + "sha256": "94fe739295447cd3ae877c279ccde1def06baea02d9c76a504dda23de1d90516", + }, + "org_scalameta_trees": { + "version": "4.3.0", + "sha256": "d24d5d63d8deafe646d455c822593a66adc6fdf17c8373754a3834a6e92a8a72", + }, + "org_typelevel_paiges_core": { + "version": "0.2.4", + "sha256": "aa66fbe0457ca5cb5b9e522d4cb873623bb376a2e1ff58c464b5194c1d87c241", + }, + "org_scala_lang_scalap": { + "version": "2.11.12", + "sha256": "a6dd7203ce4af9d6185023d5dba9993eb8e80584ff4b1f6dec574a2aba4cd2b7", + }, + "com_thesamet_scalapb_lenses": { + "version": "0.9.0", + "sha256": "f4809760edee6abc97a7fe9b7fd6ae5fe1006795b1dc3963ab4e317a72f1a385", + }, + "com_thesamet_scalapb_scalapb_runtime": { + "version": "0.9.0", + "sha256": "ab1e449a18a9ce411eb3fec31bdbca5dd5fae4475b1557bb5e235a7b54738757", + }, + "com_lihaoyi_fansi": { + "version": "0.2.5", + "sha256": "1ff0a8304f322c1442e6bcf28fab07abf3cf560dd24573dbe671249aee5fc488", + }, + "com_lihaoyi_fastparse": { + "version": "2.1.2", + "sha256": "5c5d81f90ada03ac5b21b161864a52558133951031ee5f6bf4d979e8baa03628", + }, + "com_lihaoyi_pprint": { + "version": "0.5.3", + "sha256": "fb5e4921e7dff734d049e752a482d3a031380d3eea5caa76c991312dee9e6991", + }, + "com_lihaoyi_sourcecode": { + "version": "0.1.7", + "sha256": "33516d7fd9411f74f05acfd5274e1b1889b7841d1993736118803fc727b2d5fc", + }, + "com_geirsson_metaconfig_core": { + "version": "0.9.4", + "sha256": "5d5704a1f1c4f74aed26248eeb9b577274d570b167cec0bf51d2908609c29118", + }, + "com_geirsson_metaconfig_typesafe_config": { + "version": "0.9.4", + "sha256": "52d2913640f4592402aeb2f0cec5004893d02acf26df4aa1cf8d4dcb0d2b21c7", + }, + }, + "2.12": { + "org_scalameta_common": { + "version": "4.3.0", + "sha256": "3bdb2ff71d3e86f94b4d31d2c40442f533655860749a92fd17e1f29b8deb8baa", + }, + "org_scalameta_fastparse": { + "version": "1.0.1", + "sha256": "387ced762e93915c5f87fed59d8453e404273f49f812d413405696ce20273aa5", + }, + "org_scalameta_fastparse_utils": { + "version": "1.0.1", + "sha256": "9d8ad97778ef9aedef5d4190879ed0ec54969e2fc951576fe18746ae6ce6cfcf", + }, + "org_scala_lang_modules_scala_collection_compat": { + "version": "2.1.2", + "sha256": "8aab3e1f9dd7bc392a2e27cf168af94fdc7cc2752131fc852192302fb21efdb4", + }, + "org_scalameta_parsers": { + "version": "4.3.0", + "sha256": "d9f87d03b6b5e942f263db6dab75937493bfcb0fe7cfe2cda6567bf30f23ff3a", + }, + "org_scalameta_scalafmt_core": { + "version": "2.3.2", + "sha256": "4788e2045e99f4624162d3182016a05032a7ab1324c4a28af433aa070f916773", + }, + "org_scalameta_scalameta": { + "version": "4.3.0", + "sha256": "4d9487b434cbe9d89033824a4fc902dc7c782eea94961e8575df91ae96b10d6a", + }, + "org_scalameta_trees": { + "version": "4.3.0", + "sha256": "020b53681dd8e148d74ffa282276994bcb0f06c3425fb9a4bb9f8d161e22187a", + }, + "org_typelevel_paiges_core": { + "version": "0.2.4", + "sha256": "594ca130526023e80549484e45400d09810fa39d9fd6b4663830a00be2a8556a", + }, + "org_scala_lang_scalap": { + "version": "2.12.10", + "sha256": "4641b0a55fe1ebec995b4daea9183c21651c03f77d2ed08b345507474eeabe72", + }, + "com_thesamet_scalapb_lenses": { + "version": "0.9.0", + "sha256": "0a2fff4de17d270cea561618090c21d50bc891d82c6f9dfccdc20568f18d0260", + }, + "com_thesamet_scalapb_scalapb_runtime": { + "version": "0.9.0", + "sha256": "b905fa66b3fd0fabf3114105cd73ae2bdddbb6e13188a6538a92ae695e7ad6ed", + }, + "com_lihaoyi_fansi": { + "version": "0.2.5", + "sha256": "7d752240ec724e7370903c25b69088922fa3fb6831365db845cd72498f826eca", + }, + "com_lihaoyi_fastparse": { + "version": "2.1.2", + "sha256": "92a98f89c4f9559715124599ee5ce8f0d36ee326f5c7ef88b51487de39a3602e", + }, + "com_lihaoyi_pprint": { + "version": "0.5.3", + "sha256": "2e18aa0884870537bf5c562255fc759d4ebe360882b5cb2141b30eda4034c71d", + }, + "com_lihaoyi_sourcecode": { + "version": "0.1.7", + "sha256": "f07d79f0751ac275cc09b92caf3618f0118d153da7868b8f0c9397ce93c5f926", + }, + "com_geirsson_metaconfig_core": { + "version": "0.9.4", + "sha256": "970b3d74fc9b2982d9fb31d93f460000b41fff21c0b9d9ef9476ed333a010b2a", + }, + "com_geirsson_metaconfig_typesafe_config": { + "version": "0.9.4", + "sha256": "3165f30a85d91de7f8ba714e685a6b822bd1cbb365946f5d708163725df3ef5d", + }, + }, + } + +def scalafmt_repositories( + scala_version_shas = ( + _default_scala_version(), + _default_scala_version_jar_shas(), + ), + maven_servers = _default_maven_server_urls(), + scala_extra_jars = _default_scala_extra_jars()): + (scala_version, scala_version_jar_shas) = scala_version_shas + major_version = _extract_major_version(scala_version) + + scala_version_extra_jars = scala_extra_jars[major_version] + + _scala_maven_import_external( + name = "org_scalameta_common", + artifact = "org.scalameta:common_{major_version}:{extra_jar_version}".format( + major_version = major_version, + extra_jar_version = scala_version_extra_jars["org_scalameta_common"]["version"], + ), + artifact_sha256 = scala_version_extra_jars["org_scalameta_common"]["sha256"], + fetch_sources = True, + licenses = ["notice"], # Apache 2.0 + deps = [ + "@com_lihaoyi_sourcecode", + "//external:io_bazel_rules_scala/dependency/scala/scala_library", + ], + server_urls = maven_servers, + ) + + _scala_maven_import_external( + name = "org_scalameta_fastparse", + artifact = "org.scalameta:fastparse_{major_version}:{extra_jar_version}".format( + major_version = major_version, + extra_jar_version = scala_version_extra_jars["org_scalameta_fastparse"]["version"], + ), + artifact_sha256 = scala_version_extra_jars["org_scalameta_fastparse"]["sha256"], + fetch_sources = True, + licenses = ["notice"], # Apache 2.0 + deps = [ + "@com_lihaoyi_sourcecode", + "//external:io_bazel_rules_scala/dependency/scala/scala_library", + "@org_scalameta_fastparse_utils", + ], + server_urls = maven_servers, + ) + + _scala_maven_import_external( + name = "org_scalameta_fastparse_utils", + artifact = "org.scalameta:fastparse-utils_{major_version}:{extra_jar_version}".format( + major_version = major_version, + extra_jar_version = scala_version_extra_jars["org_scalameta_fastparse_utils"]["version"], + ), + artifact_sha256 = scala_version_extra_jars["org_scalameta_fastparse_utils"]["sha256"], + fetch_sources = True, + licenses = ["notice"], # Apache 2.0 + deps = [ + "@com_lihaoyi_sourcecode", + "//external:io_bazel_rules_scala/dependency/scala/scala_library", + ], + server_urls = maven_servers, + ) + + _scala_maven_import_external( + name = "org_scala_lang_modules_scala_collection_compat", + artifact = "org.scala-lang.modules:scala-collection-compat_{major_version}:{extra_jar_version}".format( + major_version = major_version, + extra_jar_version = scala_version_extra_jars["org_scala_lang_modules_scala_collection_compat"]["version"], + ), + artifact_sha256 = scala_version_extra_jars["org_scala_lang_modules_scala_collection_compat"]["sha256"], + fetch_sources = True, + licenses = ["notice"], # Apache 2.0 + deps = [ + "//external:io_bazel_rules_scala/dependency/scala/scala_library", + ], + server_urls = maven_servers, + ) + + _scala_maven_import_external( + name = "org_scalameta_parsers", + artifact = "org.scalameta:parsers_{major_version}:{extra_jar_version}".format( + major_version = major_version, + extra_jar_version = scala_version_extra_jars["org_scalameta_parsers"]["version"], + ), + artifact_sha256 = scala_version_extra_jars["org_scalameta_parsers"]["sha256"], + fetch_sources = True, + licenses = ["notice"], # Apache 2.0 + deps = [ + "//external:io_bazel_rules_scala/dependency/scala/scala_library", + "@org_scalameta_trees", + ], + server_urls = maven_servers, + ) + + _scala_maven_import_external( + name = "org_scalameta_scalafmt_core", + artifact = "org.scalameta:scalafmt-core_{major_version}:{extra_jar_version}".format( + major_version = major_version, + extra_jar_version = scala_version_extra_jars["org_scalameta_scalafmt_core"]["version"], + ), + artifact_sha256 = scala_version_extra_jars["org_scalameta_scalafmt_core"]["sha256"], + fetch_sources = True, + licenses = ["notice"], # Apache 2.0 + deps = [ + "@com_geirsson_metaconfig_core", + "@com_geirsson_metaconfig_typesafe_config", + "//external:io_bazel_rules_scala/dependency/scala/scala_library", + "//external:io_bazel_rules_scala/dependency/scala/scala_reflect", + "@org_scalameta_scalameta", + "@org_scala_lang_modules_scala_collection_compat", + ], + server_urls = maven_servers, + ) + + _scala_maven_import_external( + name = "org_scalameta_scalameta", + artifact = "org.scalameta:scalameta_{major_version}:{extra_jar_version}".format( + major_version = major_version, + extra_jar_version = scala_version_extra_jars["org_scalameta_scalameta"]["version"], + ), + artifact_sha256 = scala_version_extra_jars["org_scalameta_scalameta"]["sha256"], + fetch_sources = True, + licenses = ["notice"], # Apache 2.0 + deps = [ + "//external:io_bazel_rules_scala/dependency/scala/scala_library", + "@org_scala_lang_scalap", + "@org_scalameta_parsers", + ], + server_urls = maven_servers, + ) + + _scala_maven_import_external( + name = "org_scalameta_trees", + artifact = "org.scalameta:trees_{major_version}:{extra_jar_version}".format( + major_version = major_version, + extra_jar_version = scala_version_extra_jars["org_scalameta_trees"]["version"], + ), + artifact_sha256 = scala_version_extra_jars["org_scalameta_trees"]["sha256"], + fetch_sources = True, + licenses = ["notice"], # Apache 2.0 + deps = [ + "@com_thesamet_scalapb_scalapb_runtime", + "//external:io_bazel_rules_scala/dependency/scala/scala_library", + "@org_scalameta_common", + "@org_scalameta_fastparse", + ], + server_urls = maven_servers, + ) + + _scala_maven_import_external( + name = "org_typelevel_paiges_core", + artifact = "org.typelevel:paiges-core_{major_version}:{extra_jar_version}".format( + major_version = major_version, + extra_jar_version = scala_version_extra_jars["org_typelevel_paiges_core"]["version"], + ), + artifact_sha256 = scala_version_extra_jars["org_typelevel_paiges_core"]["sha256"], + fetch_sources = True, + licenses = ["notice"], # Apache 2.0 + deps = [ + "//external:io_bazel_rules_scala/dependency/scala/scala_library", + ], + server_urls = maven_servers, + ) + + _scala_maven_import_external( + name = "com_typesafe_config", + artifact = "com.typesafe:config:1.3.3", + artifact_sha256 = "b5f1d6071f1548d05be82f59f9039c7d37a1787bd8e3c677e31ee275af4a4621", + srcjar_sha256 = "fcd7c3070417c776b313cc559665c035d74e3a2b40a89bbb0e9bff6e567c9cc8", + fetch_sources = True, + licenses = ["notice"], # Apache 2.0 + server_urls = maven_servers, + ) + + _scala_maven_import_external( + name = "org_scala_lang_scalap", + artifact = "org.scala-lang:scalap:{extra_jar_version}".format( + extra_jar_version = scala_version_extra_jars["org_scala_lang_scalap"]["version"], + ), + artifact_sha256 = scala_version_extra_jars["org_scala_lang_scalap"]["sha256"], + fetch_sources = True, + licenses = ["notice"], # Apache 2.0 + deps = [ + "@io_bazel_rules_scala_scala_compiler", + ], + server_urls = maven_servers, + ) + + _scala_maven_import_external( + name = "com_thesamet_scalapb_lenses", + artifact = "com.thesamet.scalapb:lenses_{major_version}:{extra_jar_version}".format( + major_version = major_version, + extra_jar_version = scala_version_extra_jars["com_thesamet_scalapb_lenses"]["version"], + ), + artifact_sha256 = scala_version_extra_jars["com_thesamet_scalapb_lenses"]["sha256"], + fetch_sources = True, + licenses = ["notice"], # Apache 2.0 + deps = [ + "//external:io_bazel_rules_scala/dependency/scala/scala_library", + ], + server_urls = maven_servers, + ) + + _scala_maven_import_external( + name = "com_thesamet_scalapb_scalapb_runtime", + artifact = "com.thesamet.scalapb:scalapb-runtime_{major_version}:{extra_jar_version}".format( + major_version = major_version, + extra_jar_version = scala_version_extra_jars["com_thesamet_scalapb_scalapb_runtime"]["version"], + ), + artifact_sha256 = scala_version_extra_jars["com_thesamet_scalapb_scalapb_runtime"]["sha256"], + fetch_sources = True, + licenses = ["notice"], # Apache 2.0 + deps = [ + "@com_google_protobuf_protobuf_java", + "@com_lihaoyi_fastparse", + "@com_thesamet_scalapb_lenses", + "//external:io_bazel_rules_scala/dependency/scala/scala_library", + ], + server_urls = maven_servers, + ) + + _scala_maven_import_external( + name = "com_lihaoyi_fansi", + artifact = "com.lihaoyi:fansi_{major_version}:{extra_jar_version}".format( + major_version = major_version, + extra_jar_version = scala_version_extra_jars["com_lihaoyi_fansi"]["version"], + ), + artifact_sha256 = scala_version_extra_jars["com_lihaoyi_fansi"]["sha256"], + fetch_sources = True, + licenses = ["notice"], # Apache 2.0 + deps = [ + "@com_lihaoyi_sourcecode", + "//external:io_bazel_rules_scala/dependency/scala/scala_library", + ], + server_urls = maven_servers, + ) + + _scala_maven_import_external( + name = "com_lihaoyi_fastparse", + artifact = "com.lihaoyi:fastparse_{major_version}:{extra_jar_version}".format( + major_version = major_version, + extra_jar_version = scala_version_extra_jars["com_lihaoyi_fastparse"]["version"], + ), + artifact_sha256 = scala_version_extra_jars["com_lihaoyi_fastparse"]["sha256"], + fetch_sources = True, + licenses = ["notice"], # Apache 2.0 + deps = [ + "@com_lihaoyi_sourcecode", + ], + server_urls = maven_servers, + ) + + _scala_maven_import_external( + name = "com_lihaoyi_pprint", + artifact = "com.lihaoyi:pprint_{major_version}:{extra_jar_version}".format( + major_version = major_version, + extra_jar_version = scala_version_extra_jars["com_lihaoyi_pprint"]["version"], + ), + artifact_sha256 = scala_version_extra_jars["com_lihaoyi_pprint"]["sha256"], + fetch_sources = True, + licenses = ["notice"], # Apache 2.0 + deps = [ + "@com_lihaoyi_fansi", + "@com_lihaoyi_sourcecode", + "//external:io_bazel_rules_scala/dependency/scala/scala_library", + ], + server_urls = maven_servers, + ) + + _scala_maven_import_external( + name = "com_lihaoyi_sourcecode", + artifact = "com.lihaoyi:sourcecode_{major_version}:{extra_jar_version}".format( + major_version = major_version, + extra_jar_version = scala_version_extra_jars["com_lihaoyi_sourcecode"]["version"], + ), + artifact_sha256 = scala_version_extra_jars["com_lihaoyi_sourcecode"]["sha256"], + fetch_sources = True, + licenses = ["notice"], # Apache 2.0 + deps = [ + "//external:io_bazel_rules_scala/dependency/scala/scala_library", + ], + server_urls = maven_servers, + ) + + _scala_maven_import_external( + name = "com_google_protobuf_protobuf_java", + artifact = "com.google.protobuf:protobuf-java:3.10.0", + artifact_sha256 = "161d7d61a8cb3970891c299578702fd079646e032329d6c2cabf998d191437c9", + srcjar_sha256 = "47012b36fcd7c4325e07a3a3b43c72e1b2d7a7d79d8e2605f2327b1e81348133", + fetch_sources = True, + licenses = ["notice"], # Apache 2.0 + server_urls = maven_servers, + ) + + _scala_maven_import_external( + name = "com_geirsson_metaconfig_core", + artifact = "com.geirsson:metaconfig-core_{major_version}:{extra_jar_version}".format( + major_version = major_version, + extra_jar_version = scala_version_extra_jars["com_geirsson_metaconfig_core"]["version"], + ), + artifact_sha256 = scala_version_extra_jars["com_geirsson_metaconfig_core"]["sha256"], + fetch_sources = True, + licenses = ["notice"], # Apache 2.0 + deps = [ + "@com_lihaoyi_pprint", + "//external:io_bazel_rules_scala/dependency/scala/scala_library", + "@org_typelevel_paiges_core", + "@org_scala_lang_modules_scala_collection_compat", + ], + server_urls = maven_servers, + ) + + _scala_maven_import_external( + name = "com_geirsson_metaconfig_typesafe_config", + artifact = "com.geirsson:metaconfig-typesafe-config_{major_version}:{extra_jar_version}".format( + major_version = major_version, + extra_jar_version = scala_version_extra_jars["com_geirsson_metaconfig_typesafe_config"]["version"], + ), + artifact_sha256 = scala_version_extra_jars["com_geirsson_metaconfig_typesafe_config"]["sha256"], + fetch_sources = True, + licenses = ["notice"], # Apache 2.0 + deps = [ + "@com_geirsson_metaconfig_core", + "@com_typesafe_config", + "//external:io_bazel_rules_scala/dependency/scala/scala_library", + "@org_scala_lang_modules_scala_collection_compat", + ], + server_urls = maven_servers, + ) diff --git a/scala/scalatest/BUILD b/scala/scalatest/BUILD index 2e4ad67bb..4e7439e96 100644 --- a/scala/scalatest/BUILD +++ b/scala/scalatest/BUILD @@ -6,7 +6,7 @@ scala_import( name = "scalatest", jars = [], exports = [ - "@io_bazel_rules_scala_scalactic", - "@io_bazel_rules_scala_scalatest", + "//external:io_bazel_rules_scala/dependency/scala/scalactic/scalactic", + "//external:io_bazel_rules_scala/dependency/scala/scalatest/scalatest", ], ) diff --git a/scala/unstable/BUILD b/scala/unstable/BUILD new file mode 100644 index 000000000..e69de29bb diff --git a/scala/unstable/defs.bzl b/scala/unstable/defs.bzl new file mode 100644 index 000000000..b228e7d32 --- /dev/null +++ b/scala/unstable/defs.bzl @@ -0,0 +1,40 @@ +""" +Starlark rules for building Scala projects. + +These are the core rules under active development. Their APIs are +not guaranteed stable and we anticipate some breaking changes. + +We do not recommend using these APIs for production codebases. Instead, +use the stable rules exported by scala.bzl: + +``` +load( + "@io_bazel_rules_scala//scala:scala.bzl", + "scala_library", + "scala_binary", + "scala_test" +) +``` + +""" + +load( + "@io_bazel_rules_scala//scala/private:rules/scala_binary.bzl", + _make_scala_binary = "make_scala_binary", +) +load( + "@io_bazel_rules_scala//scala/private:rules/scala_library.bzl", + _make_scala_library = "make_scala_library", +) +load( + "@io_bazel_rules_scala//scala/private:rules/scala_test.bzl", + _make_scala_test = "make_scala_test", +) + +make_scala_library = _make_scala_library +make_scala_binary = _make_scala_binary +make_scala_test = _make_scala_test + +scala_library = _make_scala_library() +scala_binary = _make_scala_binary() +scala_test = _make_scala_test() diff --git a/scala_proto/BUILD b/scala_proto/BUILD index 55fe207ad..a7c0bc0c9 100644 --- a/scala_proto/BUILD +++ b/scala_proto/BUILD @@ -1,3 +1,4 @@ +load("@rules_java//java:defs.bzl", "java_library") load("//scala_proto:scala_proto_toolchain.bzl", "scala_proto_toolchain") load("//scala_proto:default_dep_sets.bzl", "DEFAULT_SCALAPB_COMPILE_DEPS", "DEFAULT_SCALAPB_GRPC_DEPS") diff --git a/scala_proto/private/proto_to_scala_src.bzl b/scala_proto/private/proto_to_scala_src.bzl index b3f7727de..c2b7db27f 100644 --- a/scala_proto/private/proto_to_scala_src.bzl +++ b/scala_proto/private/proto_to_scala_src.bzl @@ -2,7 +2,6 @@ load( "//scala/private:common.bzl", "write_manifest_file", ) -load("//scala/private:rule_impls.bzl", "compile_scala") def _root_path(f): if f.is_source: diff --git a/scala_proto/private/scala_proto_default_repositories.bzl b/scala_proto/private/scala_proto_default_repositories.bzl index 8a1899345..d93c5b481 100644 --- a/scala_proto/private/scala_proto_default_repositories.bzl +++ b/scala_proto/private/scala_proto_default_repositories.bzl @@ -1,5 +1,6 @@ load( "//scala:scala_cross_version.bzl", + _default_maven_server_urls = "default_maven_server_urls", _default_scala_version = "default_scala_version", _extract_major_version = "extract_major_version", _scala_mvn_artifact = "scala_mvn_artifact", @@ -11,7 +12,7 @@ load( def scala_proto_default_repositories( scala_version = _default_scala_version(), - maven_servers = ["https://repo.maven.apache.org/maven2"]): + maven_servers = _default_maven_server_urls()): major_version = _extract_major_version(scala_version) scala_jar_shas = { diff --git a/scala_proto/private/scalapb_aspect.bzl b/scala_proto/private/scalapb_aspect.bzl index 32b8f6469..0844933f2 100644 --- a/scala_proto/private/scalapb_aspect.bzl +++ b/scala_proto/private/scalapb_aspect.bzl @@ -1,7 +1,6 @@ -load( - "//scala/private:common.bzl", - "write_manifest_file", -) +load("@rules_proto//proto:defs.bzl", "ProtoInfo") +load("//scala/private:common.bzl", "write_manifest_file") +load("//scala/private:dependency.bzl", "legacy_unclear_dependency_info_for_protobuf_scrooge") load("//scala/private:rule_impls.bzl", "compile_scala") load("//scala_proto/private:proto_to_scala_src.bzl", "proto_to_scala_src") @@ -51,7 +50,9 @@ def _compile_scala( output, scalapb_jar, deps_java_info, - implicit_deps): + implicit_deps, + resources, + resource_strip_prefix): manifest = ctx.actions.declare_file( label.name + "_MANIFEST.MF", sibling = scalapb_jar, @@ -78,8 +79,8 @@ def _compile_scala( all_srcjars = depset([scalapb_jar]), transitive_compile_jars = merged_deps.transitive_compile_time_jars, plugins = [], - resource_strip_prefix = "", - resources = [], + resource_strip_prefix = resource_strip_prefix, + resources = resources, resource_jars = [], labels = {}, in_scalacopts = [], @@ -87,6 +88,8 @@ def _compile_scala( expect_java_output = False, scalac_jvm_flags = [], scalac = scalac, + dependency_info = legacy_unclear_dependency_info_for_protobuf_scrooge(ctx), + unused_dependency_checker_ignored_targets = [], ) return JavaInfo( @@ -193,6 +196,8 @@ def _scalapb_aspect_impl(target, ctx): scalapb_file, deps, imps, + compile_protos, + "" if target_ti.proto_source_root == "." else target_ti.proto_source_root, ) else: # this target is only an aggregation target diff --git a/scala_proto/scala_proto.bzl b/scala_proto/scala_proto.bzl index 4bb8211cf..2dd296f86 100644 --- a/scala_proto/scala_proto.bzl +++ b/scala_proto/scala_proto.bzl @@ -1,5 +1,10 @@ +load( + "@rules_proto//proto:defs.bzl", + "ProtoInfo", +) load( "//scala:scala_cross_version.bzl", + _default_maven_server_urls = "default_maven_server_urls", _default_scala_version = "default_scala_version", ) load( @@ -29,7 +34,7 @@ def register_default_proto_dependencies(): def scala_proto_repositories( scala_version = _default_scala_version(), - maven_servers = ["https://repo.maven.apache.org/maven2"]): + maven_servers = _default_maven_server_urls()): ret = scala_proto_default_repositories(scala_version, maven_servers) register_default_proto_dependencies() return ret diff --git a/scripts/ij.bazelproject b/scripts/ij.bazelproject index 6b197ca27..fc3ee47df 100644 --- a/scripts/ij.bazelproject +++ b/scripts/ij.bazelproject @@ -7,10 +7,10 @@ directories: targets: //test/... - -test/src/main/scala/scala/test/twitter_scrooge/... + -test/src/main/scala/scalarules/test/twitter_scrooge/... //jmh/... //junit/... - //third_party/plugin/... + //third_party/... //scala/... //specs2/... //src/... @@ -21,4 +21,4 @@ targets: additional_languages: scala -bazel_binary: /usr/local/bin/bazel \ No newline at end of file +bazel_binary: /usr/local/bin/bazel diff --git a/specs2/BUILD b/specs2/BUILD index d610597db..fda8ee13e 100644 --- a/specs2/BUILD +++ b/specs2/BUILD @@ -1,3 +1,5 @@ +load("@rules_java//java:defs.bzl", "java_import") + package(default_visibility = ["//visibility:public"]) java_import( diff --git a/specs2/specs2.bzl b/specs2/specs2.bzl index a9385daf7..623d0e0db 100644 --- a/specs2/specs2.bzl +++ b/specs2/specs2.bzl @@ -1,5 +1,6 @@ load( "//scala:scala_cross_version.bzl", + _default_maven_server_urls = "default_maven_server_urls", _default_scala_version = "default_scala_version", _extract_major_version = "extract_major_version", _scala_mvn_artifact = "scala_mvn_artifact", @@ -14,7 +15,7 @@ def specs2_version(): def specs2_repositories( scala_version = _default_scala_version(), - maven_servers = ["https://repo.maven.apache.org/maven2"]): + maven_servers = _default_maven_server_urls()): major_version = _extract_major_version(scala_version) scala_jar_shas = { @@ -41,6 +42,7 @@ def specs2_repositories( major_version, ), artifact_sha256 = scala_version_jar_shas["specs2_common"], + deps = ["@io_bazel_rules_scala_org_specs2_specs2_fp"], licenses = ["notice"], server_urls = maven_servers, ) @@ -52,6 +54,10 @@ def specs2_repositories( major_version, ), artifact_sha256 = scala_version_jar_shas["specs2_core"], + deps = [ + "@io_bazel_rules_scala_org_specs2_specs2_common", + "@io_bazel_rules_scala_org_specs2_specs2_matcher", + ], licenses = ["notice"], server_urls = maven_servers, ) @@ -74,6 +80,7 @@ def specs2_repositories( major_version, ), artifact_sha256 = scala_version_jar_shas["specs2_matcher"], + deps = ["@io_bazel_rules_scala_org_specs2_specs2_common"], licenses = ["notice"], server_urls = maven_servers, ) diff --git a/specs2/specs2_junit.bzl b/specs2/specs2_junit.bzl index aa04e25ac..9c9885b2c 100644 --- a/specs2/specs2_junit.bzl +++ b/specs2/specs2_junit.bzl @@ -7,6 +7,7 @@ load( load("//junit:junit.bzl", "junit_repositories") load( "//scala:scala_cross_version.bzl", + _default_maven_server_urls = "default_maven_server_urls", _default_scala_version = "default_scala_version", _extract_major_version = "extract_major_version", _scala_mvn_artifact = "scala_mvn_artifact", @@ -18,7 +19,7 @@ load( def specs2_junit_repositories( scala_version = _default_scala_version(), - maven_servers = ["https://repo.maven.apache.org/maven2"]): + maven_servers = _default_maven_server_urls()): major_version = _extract_major_version(scala_version) specs2_repositories(scala_version, maven_servers) @@ -41,6 +42,7 @@ def specs2_junit_repositories( major_version, ), artifact_sha256 = scala_jar_shas[major_version]["specs2_junit"], + deps = ["@io_bazel_rules_scala_org_specs2_specs2_core"], licenses = ["notice"], server_urls = maven_servers, ) diff --git a/src/java/com/google/devtools/build/lib/BUILD b/src/java/com/google/devtools/build/lib/BUILD deleted file mode 100644 index 971837d85..000000000 --- a/src/java/com/google/devtools/build/lib/BUILD +++ /dev/null @@ -1,11 +0,0 @@ -java_library( - name = "worker", - srcs = ["WorkerProtocol.java"], - visibility = ["//visibility:public"], - exports = [ - "//external:io_bazel_rules_scala/dependency/com_google_protobuf/protobuf_java", - ], - deps = [ - "//external:io_bazel_rules_scala/dependency/com_google_protobuf/protobuf_java", - ], -) diff --git a/src/java/com/google/devtools/build/lib/WorkerProtocol.java b/src/java/com/google/devtools/build/lib/WorkerProtocol.java deleted file mode 100644 index 3db880a94..000000000 --- a/src/java/com/google/devtools/build/lib/WorkerProtocol.java +++ /dev/null @@ -1,2298 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: -// bazel-out/local-fastbuild/bin/external/io_bazel/src/main/protobuf/libworker_protocol_java_proto_srcjar.srcjar.preprocessed/worker_protocol.proto - -package com.google.devtools.build.lib.worker; - -public final class WorkerProtocol { - private WorkerProtocol() {} - - public static void registerAllExtensions(com.google.protobuf.ExtensionRegistry registry) {} - - public interface InputOrBuilder - extends - // @@protoc_insertion_point(interface_extends:blaze.worker.Input) - com.google.protobuf.MessageOrBuilder { - - /** - * optional string path = 1; - * - *
-     * The path in the file system where to read this input artifact from. This is
-     * either a path relative to the execution root (the worker process is
-     * launched with the working directory set to the execution root), or an
-     * absolute path.
-     * 
- */ - java.lang.String getPath(); - /** - * optional string path = 1; - * - *
-     * The path in the file system where to read this input artifact from. This is
-     * either a path relative to the execution root (the worker process is
-     * launched with the working directory set to the execution root), or an
-     * absolute path.
-     * 
- */ - com.google.protobuf.ByteString getPathBytes(); - - /** - * optional bytes digest = 2; - * - *
-     * A hash-value of the contents. The format of the contents is unspecified and
-     * the digest should be treated as an opaque token.
-     * 
- */ - com.google.protobuf.ByteString getDigest(); - } - /** - * Protobuf type {@code blaze.worker.Input} - * - *
-   * An input file.
-   * 
- */ - public static final class Input extends com.google.protobuf.GeneratedMessage - implements - // @@protoc_insertion_point(message_implements:blaze.worker.Input) - InputOrBuilder { - // Use Input.newBuilder() to construct. - private Input(com.google.protobuf.GeneratedMessage.Builder builder) { - super(builder); - } - - private Input() { - path_ = ""; - digest_ = com.google.protobuf.ByteString.EMPTY; - } - - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet getUnknownFields() { - return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); - } - - private Input( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) { - this(); - int mutable_bitField0_ = 0; - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: - { - if (!input.skipField(tag)) { - done = true; - } - break; - } - case 10: - { - String s = input.readStringRequireUtf8(); - - path_ = s; - break; - } - case 18: - { - digest_ = input.readBytes(); - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw new RuntimeException(e.setUnfinishedMessage(this)); - } catch (java.io.IOException e) { - throw new RuntimeException( - new com.google.protobuf.InvalidProtocolBufferException(e.getMessage()) - .setUnfinishedMessage(this)); - } finally { - makeExtensionsImmutable(); - } - } - - public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return com.google.devtools.build.lib.worker.WorkerProtocol - .internal_static_blaze_worker_Input_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return com.google.devtools.build.lib.worker.WorkerProtocol - .internal_static_blaze_worker_Input_fieldAccessorTable.ensureFieldAccessorsInitialized( - com.google.devtools.build.lib.worker.WorkerProtocol.Input.class, - com.google.devtools.build.lib.worker.WorkerProtocol.Input.Builder.class); - } - - public static final int PATH_FIELD_NUMBER = 1; - private volatile java.lang.Object path_; - /** - * optional string path = 1; - * - *
-     * The path in the file system where to read this input artifact from. This is
-     * either a path relative to the execution root (the worker process is
-     * launched with the working directory set to the execution root), or an
-     * absolute path.
-     * 
- */ - public java.lang.String getPath() { - java.lang.Object ref = path_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - path_ = s; - return s; - } - } - /** - * optional string path = 1; - * - *
-     * The path in the file system where to read this input artifact from. This is
-     * either a path relative to the execution root (the worker process is
-     * launched with the working directory set to the execution root), or an
-     * absolute path.
-     * 
- */ - public com.google.protobuf.ByteString getPathBytes() { - java.lang.Object ref = path_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); - path_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - public static final int DIGEST_FIELD_NUMBER = 2; - private com.google.protobuf.ByteString digest_; - /** - * optional bytes digest = 2; - * - *
-     * A hash-value of the contents. The format of the contents is unspecified and
-     * the digest should be treated as an opaque token.
-     * 
- */ - public com.google.protobuf.ByteString getDigest() { - return digest_; - } - - private byte memoizedIsInitialized = -1; - - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized == 1) return true; - if (isInitialized == 0) return false; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - if (!getPathBytes().isEmpty()) { - com.google.protobuf.GeneratedMessage.writeString(output, 1, path_); - } - if (!digest_.isEmpty()) { - output.writeBytes(2, digest_); - } - } - - public int getSerializedSize() { - int size = memoizedSize; - if (size != -1) return size; - - size = 0; - if (!getPathBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessage.computeStringSize(1, path_); - } - if (!digest_.isEmpty()) { - size += com.google.protobuf.CodedOutputStream.computeBytesSize(2, digest_); - } - memoizedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - - public static com.google.devtools.build.lib.worker.WorkerProtocol.Input parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - - public static com.google.devtools.build.lib.worker.WorkerProtocol.Input parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - - public static com.google.devtools.build.lib.worker.WorkerProtocol.Input parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - - public static com.google.devtools.build.lib.worker.WorkerProtocol.Input parseFrom( - byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - - public static com.google.devtools.build.lib.worker.WorkerProtocol.Input parseFrom( - java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); - } - - public static com.google.devtools.build.lib.worker.WorkerProtocol.Input parseFrom( - java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - - public static com.google.devtools.build.lib.worker.WorkerProtocol.Input parseDelimitedFrom( - java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); - } - - public static com.google.devtools.build.lib.worker.WorkerProtocol.Input parseDelimitedFrom( - java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); - } - - public static com.google.devtools.build.lib.worker.WorkerProtocol.Input parseFrom( - com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); - } - - public static com.google.devtools.build.lib.worker.WorkerProtocol.Input parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - - public Builder newBuilderForType() { - return newBuilder(); - } - - public static Builder newBuilder() { - return DEFAULT_INSTANCE.toBuilder(); - } - - public static Builder newBuilder( - com.google.devtools.build.lib.worker.WorkerProtocol.Input prototype) { - return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); - } - - public Builder toBuilder() { - return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); - } - - @java.lang.Override - protected Builder newBuilderForType(com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code blaze.worker.Input} - * - *
-     * An input file.
-     * 
- */ - public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements - // @@protoc_insertion_point(builder_implements:blaze.worker.Input) - com.google.devtools.build.lib.worker.WorkerProtocol.InputOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return com.google.devtools.build.lib.worker.WorkerProtocol - .internal_static_blaze_worker_Input_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return com.google.devtools.build.lib.worker.WorkerProtocol - .internal_static_blaze_worker_Input_fieldAccessorTable.ensureFieldAccessorsInitialized( - com.google.devtools.build.lib.worker.WorkerProtocol.Input.class, - com.google.devtools.build.lib.worker.WorkerProtocol.Input.Builder.class); - } - - // Construct using com.google.devtools.build.lib.worker.WorkerProtocol.Input.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(com.google.protobuf.GeneratedMessage.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {} - } - - public Builder clear() { - super.clear(); - path_ = ""; - - digest_ = com.google.protobuf.ByteString.EMPTY; - - return this; - } - - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return com.google.devtools.build.lib.worker.WorkerProtocol - .internal_static_blaze_worker_Input_descriptor; - } - - public com.google.devtools.build.lib.worker.WorkerProtocol.Input getDefaultInstanceForType() { - return com.google.devtools.build.lib.worker.WorkerProtocol.Input.getDefaultInstance(); - } - - public com.google.devtools.build.lib.worker.WorkerProtocol.Input build() { - com.google.devtools.build.lib.worker.WorkerProtocol.Input result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public com.google.devtools.build.lib.worker.WorkerProtocol.Input buildPartial() { - com.google.devtools.build.lib.worker.WorkerProtocol.Input result = - new com.google.devtools.build.lib.worker.WorkerProtocol.Input(this); - result.path_ = path_; - result.digest_ = digest_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof com.google.devtools.build.lib.worker.WorkerProtocol.Input) { - return mergeFrom((com.google.devtools.build.lib.worker.WorkerProtocol.Input) other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(com.google.devtools.build.lib.worker.WorkerProtocol.Input other) { - if (other == com.google.devtools.build.lib.worker.WorkerProtocol.Input.getDefaultInstance()) - return this; - if (!other.getPath().isEmpty()) { - path_ = other.path_; - onChanged(); - } - if (other.getDigest() != com.google.protobuf.ByteString.EMPTY) { - setDigest(other.getDigest()); - } - onChanged(); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.devtools.build.lib.worker.WorkerProtocol.Input parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = - (com.google.devtools.build.lib.worker.WorkerProtocol.Input) e.getUnfinishedMessage(); - throw e; - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - - private java.lang.Object path_ = ""; - /** - * optional string path = 1; - * - *
-       * The path in the file system where to read this input artifact from. This is
-       * either a path relative to the execution root (the worker process is
-       * launched with the working directory set to the execution root), or an
-       * absolute path.
-       * 
- */ - public java.lang.String getPath() { - java.lang.Object ref = path_; - if (!(ref instanceof java.lang.String)) { - com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - path_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * optional string path = 1; - * - *
-       * The path in the file system where to read this input artifact from. This is
-       * either a path relative to the execution root (the worker process is
-       * launched with the working directory set to the execution root), or an
-       * absolute path.
-       * 
- */ - public com.google.protobuf.ByteString getPathBytes() { - java.lang.Object ref = path_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); - path_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * optional string path = 1; - * - *
-       * The path in the file system where to read this input artifact from. This is
-       * either a path relative to the execution root (the worker process is
-       * launched with the working directory set to the execution root), or an
-       * absolute path.
-       * 
- */ - public Builder setPath(java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - - path_ = value; - onChanged(); - return this; - } - /** - * optional string path = 1; - * - *
-       * The path in the file system where to read this input artifact from. This is
-       * either a path relative to the execution root (the worker process is
-       * launched with the working directory set to the execution root), or an
-       * absolute path.
-       * 
- */ - public Builder clearPath() { - - path_ = getDefaultInstance().getPath(); - onChanged(); - return this; - } - /** - * optional string path = 1; - * - *
-       * The path in the file system where to read this input artifact from. This is
-       * either a path relative to the execution root (the worker process is
-       * launched with the working directory set to the execution root), or an
-       * absolute path.
-       * 
- */ - public Builder setPathBytes(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - checkByteStringIsUtf8(value); - - path_ = value; - onChanged(); - return this; - } - - private com.google.protobuf.ByteString digest_ = com.google.protobuf.ByteString.EMPTY; - /** - * optional bytes digest = 2; - * - *
-       * A hash-value of the contents. The format of the contents is unspecified and
-       * the digest should be treated as an opaque token.
-       * 
- */ - public com.google.protobuf.ByteString getDigest() { - return digest_; - } - /** - * optional bytes digest = 2; - * - *
-       * A hash-value of the contents. The format of the contents is unspecified and
-       * the digest should be treated as an opaque token.
-       * 
- */ - public Builder setDigest(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - - digest_ = value; - onChanged(); - return this; - } - /** - * optional bytes digest = 2; - * - *
-       * A hash-value of the contents. The format of the contents is unspecified and
-       * the digest should be treated as an opaque token.
-       * 
- */ - public Builder clearDigest() { - - digest_ = getDefaultInstance().getDigest(); - onChanged(); - return this; - } - - public final Builder setUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return this; - } - - public final Builder mergeUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return this; - } - - // @@protoc_insertion_point(builder_scope:blaze.worker.Input) - } - - // @@protoc_insertion_point(class_scope:blaze.worker.Input) - private static final com.google.devtools.build.lib.worker.WorkerProtocol.Input DEFAULT_INSTANCE; - - static { - DEFAULT_INSTANCE = new com.google.devtools.build.lib.worker.WorkerProtocol.Input(); - } - - public static com.google.devtools.build.lib.worker.WorkerProtocol.Input getDefaultInstance() { - return DEFAULT_INSTANCE; - } - - private static final com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public Input parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - try { - return new Input(input, extensionRegistry); - } catch (RuntimeException e) { - if (e.getCause() instanceof com.google.protobuf.InvalidProtocolBufferException) { - throw (com.google.protobuf.InvalidProtocolBufferException) e.getCause(); - } - throw e; - } - } - }; - - public static com.google.protobuf.Parser parser() { - return PARSER; - } - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - public com.google.devtools.build.lib.worker.WorkerProtocol.Input getDefaultInstanceForType() { - return DEFAULT_INSTANCE; - } - } - - public interface WorkRequestOrBuilder - extends - // @@protoc_insertion_point(interface_extends:blaze.worker.WorkRequest) - com.google.protobuf.MessageOrBuilder { - - /** repeated string arguments = 1; */ - com.google.protobuf.ProtocolStringList getArgumentsList(); - /** repeated string arguments = 1; */ - int getArgumentsCount(); - /** repeated string arguments = 1; */ - java.lang.String getArguments(int index); - /** repeated string arguments = 1; */ - com.google.protobuf.ByteString getArgumentsBytes(int index); - - /** - * repeated .blaze.worker.Input inputs = 2; - * - *
-     * The inputs that the worker is allowed to read during execution of this
-     * request.
-     * 
- */ - java.util.List getInputsList(); - /** - * repeated .blaze.worker.Input inputs = 2; - * - *
-     * The inputs that the worker is allowed to read during execution of this
-     * request.
-     * 
- */ - com.google.devtools.build.lib.worker.WorkerProtocol.Input getInputs(int index); - /** - * repeated .blaze.worker.Input inputs = 2; - * - *
-     * The inputs that the worker is allowed to read during execution of this
-     * request.
-     * 
- */ - int getInputsCount(); - /** - * repeated .blaze.worker.Input inputs = 2; - * - *
-     * The inputs that the worker is allowed to read during execution of this
-     * request.
-     * 
- */ - java.util.List - getInputsOrBuilderList(); - /** - * repeated .blaze.worker.Input inputs = 2; - * - *
-     * The inputs that the worker is allowed to read during execution of this
-     * request.
-     * 
- */ - com.google.devtools.build.lib.worker.WorkerProtocol.InputOrBuilder getInputsOrBuilder( - int index); - } - /** - * Protobuf type {@code blaze.worker.WorkRequest} - * - *
-   * This represents a single work unit that Blaze sends to the worker.
-   * 
- */ - public static final class WorkRequest extends com.google.protobuf.GeneratedMessage - implements - // @@protoc_insertion_point(message_implements:blaze.worker.WorkRequest) - WorkRequestOrBuilder { - // Use WorkRequest.newBuilder() to construct. - private WorkRequest(com.google.protobuf.GeneratedMessage.Builder builder) { - super(builder); - } - - private WorkRequest() { - arguments_ = com.google.protobuf.LazyStringArrayList.EMPTY; - inputs_ = java.util.Collections.emptyList(); - } - - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet getUnknownFields() { - return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); - } - - private WorkRequest( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) { - this(); - int mutable_bitField0_ = 0; - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: - { - if (!input.skipField(tag)) { - done = true; - } - break; - } - case 10: - { - String s = input.readStringRequireUtf8(); - if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { - arguments_ = new com.google.protobuf.LazyStringArrayList(); - mutable_bitField0_ |= 0x00000001; - } - arguments_.add(s); - break; - } - case 18: - { - if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) { - inputs_ = - new java.util.ArrayList< - com.google.devtools.build.lib.worker.WorkerProtocol.Input>(); - mutable_bitField0_ |= 0x00000002; - } - inputs_.add( - input.readMessage( - com.google.devtools.build.lib.worker.WorkerProtocol.Input.parser(), - extensionRegistry)); - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw new RuntimeException(e.setUnfinishedMessage(this)); - } catch (java.io.IOException e) { - throw new RuntimeException( - new com.google.protobuf.InvalidProtocolBufferException(e.getMessage()) - .setUnfinishedMessage(this)); - } finally { - if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { - arguments_ = arguments_.getUnmodifiableView(); - } - if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { - inputs_ = java.util.Collections.unmodifiableList(inputs_); - } - makeExtensionsImmutable(); - } - } - - public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return com.google.devtools.build.lib.worker.WorkerProtocol - .internal_static_blaze_worker_WorkRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return com.google.devtools.build.lib.worker.WorkerProtocol - .internal_static_blaze_worker_WorkRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - com.google.devtools.build.lib.worker.WorkerProtocol.WorkRequest.class, - com.google.devtools.build.lib.worker.WorkerProtocol.WorkRequest.Builder.class); - } - - public static final int ARGUMENTS_FIELD_NUMBER = 1; - private com.google.protobuf.LazyStringList arguments_; - /** repeated string arguments = 1; */ - public com.google.protobuf.ProtocolStringList getArgumentsList() { - return arguments_; - } - /** repeated string arguments = 1; */ - public int getArgumentsCount() { - return arguments_.size(); - } - /** repeated string arguments = 1; */ - public java.lang.String getArguments(int index) { - return arguments_.get(index); - } - /** repeated string arguments = 1; */ - public com.google.protobuf.ByteString getArgumentsBytes(int index) { - return arguments_.getByteString(index); - } - - public static final int INPUTS_FIELD_NUMBER = 2; - private java.util.List inputs_; - /** - * repeated .blaze.worker.Input inputs = 2; - * - *
-     * The inputs that the worker is allowed to read during execution of this
-     * request.
-     * 
- */ - public java.util.List - getInputsList() { - return inputs_; - } - /** - * repeated .blaze.worker.Input inputs = 2; - * - *
-     * The inputs that the worker is allowed to read during execution of this
-     * request.
-     * 
- */ - public java.util.List< - ? extends com.google.devtools.build.lib.worker.WorkerProtocol.InputOrBuilder> - getInputsOrBuilderList() { - return inputs_; - } - /** - * repeated .blaze.worker.Input inputs = 2; - * - *
-     * The inputs that the worker is allowed to read during execution of this
-     * request.
-     * 
- */ - public int getInputsCount() { - return inputs_.size(); - } - /** - * repeated .blaze.worker.Input inputs = 2; - * - *
-     * The inputs that the worker is allowed to read during execution of this
-     * request.
-     * 
- */ - public com.google.devtools.build.lib.worker.WorkerProtocol.Input getInputs(int index) { - return inputs_.get(index); - } - /** - * repeated .blaze.worker.Input inputs = 2; - * - *
-     * The inputs that the worker is allowed to read during execution of this
-     * request.
-     * 
- */ - public com.google.devtools.build.lib.worker.WorkerProtocol.InputOrBuilder getInputsOrBuilder( - int index) { - return inputs_.get(index); - } - - private byte memoizedIsInitialized = -1; - - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized == 1) return true; - if (isInitialized == 0) return false; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - for (int i = 0; i < arguments_.size(); i++) { - com.google.protobuf.GeneratedMessage.writeString(output, 1, arguments_.getRaw(i)); - } - for (int i = 0; i < inputs_.size(); i++) { - output.writeMessage(2, inputs_.get(i)); - } - } - - public int getSerializedSize() { - int size = memoizedSize; - if (size != -1) return size; - - size = 0; - { - int dataSize = 0; - for (int i = 0; i < arguments_.size(); i++) { - dataSize += computeStringSizeNoTag(arguments_.getRaw(i)); - } - size += dataSize; - size += 1 * getArgumentsList().size(); - } - for (int i = 0; i < inputs_.size(); i++) { - size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, inputs_.get(i)); - } - memoizedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - - public static com.google.devtools.build.lib.worker.WorkerProtocol.WorkRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - - public static com.google.devtools.build.lib.worker.WorkerProtocol.WorkRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - - public static com.google.devtools.build.lib.worker.WorkerProtocol.WorkRequest parseFrom( - byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - - public static com.google.devtools.build.lib.worker.WorkerProtocol.WorkRequest parseFrom( - byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - - public static com.google.devtools.build.lib.worker.WorkerProtocol.WorkRequest parseFrom( - java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); - } - - public static com.google.devtools.build.lib.worker.WorkerProtocol.WorkRequest parseFrom( - java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - - public static com.google.devtools.build.lib.worker.WorkerProtocol.WorkRequest - parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); - } - - public static com.google.devtools.build.lib.worker.WorkerProtocol.WorkRequest - parseDelimitedFrom( - java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); - } - - public static com.google.devtools.build.lib.worker.WorkerProtocol.WorkRequest parseFrom( - com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); - } - - public static com.google.devtools.build.lib.worker.WorkerProtocol.WorkRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - - public Builder newBuilderForType() { - return newBuilder(); - } - - public static Builder newBuilder() { - return DEFAULT_INSTANCE.toBuilder(); - } - - public static Builder newBuilder( - com.google.devtools.build.lib.worker.WorkerProtocol.WorkRequest prototype) { - return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); - } - - public Builder toBuilder() { - return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); - } - - @java.lang.Override - protected Builder newBuilderForType(com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code blaze.worker.WorkRequest} - * - *
-     * This represents a single work unit that Blaze sends to the worker.
-     * 
- */ - public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements - // @@protoc_insertion_point(builder_implements:blaze.worker.WorkRequest) - com.google.devtools.build.lib.worker.WorkerProtocol.WorkRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return com.google.devtools.build.lib.worker.WorkerProtocol - .internal_static_blaze_worker_WorkRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return com.google.devtools.build.lib.worker.WorkerProtocol - .internal_static_blaze_worker_WorkRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - com.google.devtools.build.lib.worker.WorkerProtocol.WorkRequest.class, - com.google.devtools.build.lib.worker.WorkerProtocol.WorkRequest.Builder.class); - } - - // Construct using - // com.google.devtools.build.lib.worker.WorkerProtocol.WorkRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(com.google.protobuf.GeneratedMessage.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getInputsFieldBuilder(); - } - } - - public Builder clear() { - super.clear(); - arguments_ = com.google.protobuf.LazyStringArrayList.EMPTY; - bitField0_ = (bitField0_ & ~0x00000001); - if (inputsBuilder_ == null) { - inputs_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000002); - } else { - inputsBuilder_.clear(); - } - return this; - } - - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return com.google.devtools.build.lib.worker.WorkerProtocol - .internal_static_blaze_worker_WorkRequest_descriptor; - } - - public com.google.devtools.build.lib.worker.WorkerProtocol.WorkRequest - getDefaultInstanceForType() { - return com.google.devtools.build.lib.worker.WorkerProtocol.WorkRequest.getDefaultInstance(); - } - - public com.google.devtools.build.lib.worker.WorkerProtocol.WorkRequest build() { - com.google.devtools.build.lib.worker.WorkerProtocol.WorkRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public com.google.devtools.build.lib.worker.WorkerProtocol.WorkRequest buildPartial() { - com.google.devtools.build.lib.worker.WorkerProtocol.WorkRequest result = - new com.google.devtools.build.lib.worker.WorkerProtocol.WorkRequest(this); - int from_bitField0_ = bitField0_; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - arguments_ = arguments_.getUnmodifiableView(); - bitField0_ = (bitField0_ & ~0x00000001); - } - result.arguments_ = arguments_; - if (inputsBuilder_ == null) { - if (((bitField0_ & 0x00000002) == 0x00000002)) { - inputs_ = java.util.Collections.unmodifiableList(inputs_); - bitField0_ = (bitField0_ & ~0x00000002); - } - result.inputs_ = inputs_; - } else { - result.inputs_ = inputsBuilder_.build(); - } - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof com.google.devtools.build.lib.worker.WorkerProtocol.WorkRequest) { - return mergeFrom((com.google.devtools.build.lib.worker.WorkerProtocol.WorkRequest) other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom( - com.google.devtools.build.lib.worker.WorkerProtocol.WorkRequest other) { - if (other - == com.google.devtools.build.lib.worker.WorkerProtocol.WorkRequest.getDefaultInstance()) - return this; - if (!other.arguments_.isEmpty()) { - if (arguments_.isEmpty()) { - arguments_ = other.arguments_; - bitField0_ = (bitField0_ & ~0x00000001); - } else { - ensureArgumentsIsMutable(); - arguments_.addAll(other.arguments_); - } - onChanged(); - } - if (inputsBuilder_ == null) { - if (!other.inputs_.isEmpty()) { - if (inputs_.isEmpty()) { - inputs_ = other.inputs_; - bitField0_ = (bitField0_ & ~0x00000002); - } else { - ensureInputsIsMutable(); - inputs_.addAll(other.inputs_); - } - onChanged(); - } - } else { - if (!other.inputs_.isEmpty()) { - if (inputsBuilder_.isEmpty()) { - inputsBuilder_.dispose(); - inputsBuilder_ = null; - inputs_ = other.inputs_; - bitField0_ = (bitField0_ & ~0x00000002); - inputsBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders - ? getInputsFieldBuilder() - : null; - } else { - inputsBuilder_.addAllMessages(other.inputs_); - } - } - } - onChanged(); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.devtools.build.lib.worker.WorkerProtocol.WorkRequest parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = - (com.google.devtools.build.lib.worker.WorkerProtocol.WorkRequest) - e.getUnfinishedMessage(); - throw e; - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - - private int bitField0_; - - private com.google.protobuf.LazyStringList arguments_ = - com.google.protobuf.LazyStringArrayList.EMPTY; - - private void ensureArgumentsIsMutable() { - if (!((bitField0_ & 0x00000001) == 0x00000001)) { - arguments_ = new com.google.protobuf.LazyStringArrayList(arguments_); - bitField0_ |= 0x00000001; - } - } - /** repeated string arguments = 1; */ - public com.google.protobuf.ProtocolStringList getArgumentsList() { - return arguments_.getUnmodifiableView(); - } - /** repeated string arguments = 1; */ - public int getArgumentsCount() { - return arguments_.size(); - } - /** repeated string arguments = 1; */ - public java.lang.String getArguments(int index) { - return arguments_.get(index); - } - /** repeated string arguments = 1; */ - public com.google.protobuf.ByteString getArgumentsBytes(int index) { - return arguments_.getByteString(index); - } - /** repeated string arguments = 1; */ - public Builder setArguments(int index, java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - ensureArgumentsIsMutable(); - arguments_.set(index, value); - onChanged(); - return this; - } - /** repeated string arguments = 1; */ - public Builder addArguments(java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - ensureArgumentsIsMutable(); - arguments_.add(value); - onChanged(); - return this; - } - /** repeated string arguments = 1; */ - public Builder addAllArguments(java.lang.Iterable values) { - ensureArgumentsIsMutable(); - com.google.protobuf.AbstractMessageLite.Builder.addAll(values, arguments_); - onChanged(); - return this; - } - /** repeated string arguments = 1; */ - public Builder clearArguments() { - arguments_ = com.google.protobuf.LazyStringArrayList.EMPTY; - bitField0_ = (bitField0_ & ~0x00000001); - onChanged(); - return this; - } - /** repeated string arguments = 1; */ - public Builder addArgumentsBytes(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - checkByteStringIsUtf8(value); - ensureArgumentsIsMutable(); - arguments_.add(value); - onChanged(); - return this; - } - - private java.util.List inputs_ = - java.util.Collections.emptyList(); - - private void ensureInputsIsMutable() { - if (!((bitField0_ & 0x00000002) == 0x00000002)) { - inputs_ = - new java.util.ArrayList( - inputs_); - bitField0_ |= 0x00000002; - } - } - - private com.google.protobuf.RepeatedFieldBuilder< - com.google.devtools.build.lib.worker.WorkerProtocol.Input, - com.google.devtools.build.lib.worker.WorkerProtocol.Input.Builder, - com.google.devtools.build.lib.worker.WorkerProtocol.InputOrBuilder> - inputsBuilder_; - - /** - * repeated .blaze.worker.Input inputs = 2; - * - *
-       * The inputs that the worker is allowed to read during execution of this
-       * request.
-       * 
- */ - public java.util.List - getInputsList() { - if (inputsBuilder_ == null) { - return java.util.Collections.unmodifiableList(inputs_); - } else { - return inputsBuilder_.getMessageList(); - } - } - /** - * repeated .blaze.worker.Input inputs = 2; - * - *
-       * The inputs that the worker is allowed to read during execution of this
-       * request.
-       * 
- */ - public int getInputsCount() { - if (inputsBuilder_ == null) { - return inputs_.size(); - } else { - return inputsBuilder_.getCount(); - } - } - /** - * repeated .blaze.worker.Input inputs = 2; - * - *
-       * The inputs that the worker is allowed to read during execution of this
-       * request.
-       * 
- */ - public com.google.devtools.build.lib.worker.WorkerProtocol.Input getInputs(int index) { - if (inputsBuilder_ == null) { - return inputs_.get(index); - } else { - return inputsBuilder_.getMessage(index); - } - } - /** - * repeated .blaze.worker.Input inputs = 2; - * - *
-       * The inputs that the worker is allowed to read during execution of this
-       * request.
-       * 
- */ - public Builder setInputs( - int index, com.google.devtools.build.lib.worker.WorkerProtocol.Input value) { - if (inputsBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureInputsIsMutable(); - inputs_.set(index, value); - onChanged(); - } else { - inputsBuilder_.setMessage(index, value); - } - return this; - } - /** - * repeated .blaze.worker.Input inputs = 2; - * - *
-       * The inputs that the worker is allowed to read during execution of this
-       * request.
-       * 
- */ - public Builder setInputs( - int index, - com.google.devtools.build.lib.worker.WorkerProtocol.Input.Builder builderForValue) { - if (inputsBuilder_ == null) { - ensureInputsIsMutable(); - inputs_.set(index, builderForValue.build()); - onChanged(); - } else { - inputsBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - /** - * repeated .blaze.worker.Input inputs = 2; - * - *
-       * The inputs that the worker is allowed to read during execution of this
-       * request.
-       * 
- */ - public Builder addInputs(com.google.devtools.build.lib.worker.WorkerProtocol.Input value) { - if (inputsBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureInputsIsMutable(); - inputs_.add(value); - onChanged(); - } else { - inputsBuilder_.addMessage(value); - } - return this; - } - /** - * repeated .blaze.worker.Input inputs = 2; - * - *
-       * The inputs that the worker is allowed to read during execution of this
-       * request.
-       * 
- */ - public Builder addInputs( - int index, com.google.devtools.build.lib.worker.WorkerProtocol.Input value) { - if (inputsBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureInputsIsMutable(); - inputs_.add(index, value); - onChanged(); - } else { - inputsBuilder_.addMessage(index, value); - } - return this; - } - /** - * repeated .blaze.worker.Input inputs = 2; - * - *
-       * The inputs that the worker is allowed to read during execution of this
-       * request.
-       * 
- */ - public Builder addInputs( - com.google.devtools.build.lib.worker.WorkerProtocol.Input.Builder builderForValue) { - if (inputsBuilder_ == null) { - ensureInputsIsMutable(); - inputs_.add(builderForValue.build()); - onChanged(); - } else { - inputsBuilder_.addMessage(builderForValue.build()); - } - return this; - } - /** - * repeated .blaze.worker.Input inputs = 2; - * - *
-       * The inputs that the worker is allowed to read during execution of this
-       * request.
-       * 
- */ - public Builder addInputs( - int index, - com.google.devtools.build.lib.worker.WorkerProtocol.Input.Builder builderForValue) { - if (inputsBuilder_ == null) { - ensureInputsIsMutable(); - inputs_.add(index, builderForValue.build()); - onChanged(); - } else { - inputsBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - /** - * repeated .blaze.worker.Input inputs = 2; - * - *
-       * The inputs that the worker is allowed to read during execution of this
-       * request.
-       * 
- */ - public Builder addAllInputs( - java.lang.Iterable - values) { - if (inputsBuilder_ == null) { - ensureInputsIsMutable(); - com.google.protobuf.AbstractMessageLite.Builder.addAll(values, inputs_); - onChanged(); - } else { - inputsBuilder_.addAllMessages(values); - } - return this; - } - /** - * repeated .blaze.worker.Input inputs = 2; - * - *
-       * The inputs that the worker is allowed to read during execution of this
-       * request.
-       * 
- */ - public Builder clearInputs() { - if (inputsBuilder_ == null) { - inputs_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000002); - onChanged(); - } else { - inputsBuilder_.clear(); - } - return this; - } - /** - * repeated .blaze.worker.Input inputs = 2; - * - *
-       * The inputs that the worker is allowed to read during execution of this
-       * request.
-       * 
- */ - public Builder removeInputs(int index) { - if (inputsBuilder_ == null) { - ensureInputsIsMutable(); - inputs_.remove(index); - onChanged(); - } else { - inputsBuilder_.remove(index); - } - return this; - } - /** - * repeated .blaze.worker.Input inputs = 2; - * - *
-       * The inputs that the worker is allowed to read during execution of this
-       * request.
-       * 
- */ - public com.google.devtools.build.lib.worker.WorkerProtocol.Input.Builder getInputsBuilder( - int index) { - return getInputsFieldBuilder().getBuilder(index); - } - /** - * repeated .blaze.worker.Input inputs = 2; - * - *
-       * The inputs that the worker is allowed to read during execution of this
-       * request.
-       * 
- */ - public com.google.devtools.build.lib.worker.WorkerProtocol.InputOrBuilder getInputsOrBuilder( - int index) { - if (inputsBuilder_ == null) { - return inputs_.get(index); - } else { - return inputsBuilder_.getMessageOrBuilder(index); - } - } - /** - * repeated .blaze.worker.Input inputs = 2; - * - *
-       * The inputs that the worker is allowed to read during execution of this
-       * request.
-       * 
- */ - public java.util.List< - ? extends com.google.devtools.build.lib.worker.WorkerProtocol.InputOrBuilder> - getInputsOrBuilderList() { - if (inputsBuilder_ != null) { - return inputsBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(inputs_); - } - } - /** - * repeated .blaze.worker.Input inputs = 2; - * - *
-       * The inputs that the worker is allowed to read during execution of this
-       * request.
-       * 
- */ - public com.google.devtools.build.lib.worker.WorkerProtocol.Input.Builder addInputsBuilder() { - return getInputsFieldBuilder() - .addBuilder( - com.google.devtools.build.lib.worker.WorkerProtocol.Input.getDefaultInstance()); - } - /** - * repeated .blaze.worker.Input inputs = 2; - * - *
-       * The inputs that the worker is allowed to read during execution of this
-       * request.
-       * 
- */ - public com.google.devtools.build.lib.worker.WorkerProtocol.Input.Builder addInputsBuilder( - int index) { - return getInputsFieldBuilder() - .addBuilder( - index, - com.google.devtools.build.lib.worker.WorkerProtocol.Input.getDefaultInstance()); - } - /** - * repeated .blaze.worker.Input inputs = 2; - * - *
-       * The inputs that the worker is allowed to read during execution of this
-       * request.
-       * 
- */ - public java.util.List - getInputsBuilderList() { - return getInputsFieldBuilder().getBuilderList(); - } - - private com.google.protobuf.RepeatedFieldBuilder< - com.google.devtools.build.lib.worker.WorkerProtocol.Input, - com.google.devtools.build.lib.worker.WorkerProtocol.Input.Builder, - com.google.devtools.build.lib.worker.WorkerProtocol.InputOrBuilder> - getInputsFieldBuilder() { - if (inputsBuilder_ == null) { - inputsBuilder_ = - new com.google.protobuf.RepeatedFieldBuilder< - com.google.devtools.build.lib.worker.WorkerProtocol.Input, - com.google.devtools.build.lib.worker.WorkerProtocol.Input.Builder, - com.google.devtools.build.lib.worker.WorkerProtocol.InputOrBuilder>( - inputs_, - ((bitField0_ & 0x00000002) == 0x00000002), - getParentForChildren(), - isClean()); - inputs_ = null; - } - return inputsBuilder_; - } - - public final Builder setUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return this; - } - - public final Builder mergeUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return this; - } - - // @@protoc_insertion_point(builder_scope:blaze.worker.WorkRequest) - } - - // @@protoc_insertion_point(class_scope:blaze.worker.WorkRequest) - private static final com.google.devtools.build.lib.worker.WorkerProtocol.WorkRequest - DEFAULT_INSTANCE; - - static { - DEFAULT_INSTANCE = new com.google.devtools.build.lib.worker.WorkerProtocol.WorkRequest(); - } - - public static com.google.devtools.build.lib.worker.WorkerProtocol.WorkRequest - getDefaultInstance() { - return DEFAULT_INSTANCE; - } - - private static final com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public WorkRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - try { - return new WorkRequest(input, extensionRegistry); - } catch (RuntimeException e) { - if (e.getCause() instanceof com.google.protobuf.InvalidProtocolBufferException) { - throw (com.google.protobuf.InvalidProtocolBufferException) e.getCause(); - } - throw e; - } - } - }; - - public static com.google.protobuf.Parser parser() { - return PARSER; - } - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - public com.google.devtools.build.lib.worker.WorkerProtocol.WorkRequest - getDefaultInstanceForType() { - return DEFAULT_INSTANCE; - } - } - - public interface WorkResponseOrBuilder - extends - // @@protoc_insertion_point(interface_extends:blaze.worker.WorkResponse) - com.google.protobuf.MessageOrBuilder { - - /** optional int32 exit_code = 1; */ - int getExitCode(); - - /** - * optional string output = 2; - * - *
-     * This is printed to the user after the WorkResponse has been received and is supposed to contain
-     * compiler warnings / errors etc. - thus we'll use a string type here, which gives us UTF-8
-     * encoding.
-     * 
- */ - java.lang.String getOutput(); - /** - * optional string output = 2; - * - *
-     * This is printed to the user after the WorkResponse has been received and is supposed to contain
-     * compiler warnings / errors etc. - thus we'll use a string type here, which gives us UTF-8
-     * encoding.
-     * 
- */ - com.google.protobuf.ByteString getOutputBytes(); - } - /** - * Protobuf type {@code blaze.worker.WorkResponse} - * - *
-   * The worker sends this message to Blaze when it finished its work on the WorkRequest message.
-   * 
- */ - public static final class WorkResponse extends com.google.protobuf.GeneratedMessage - implements - // @@protoc_insertion_point(message_implements:blaze.worker.WorkResponse) - WorkResponseOrBuilder { - // Use WorkResponse.newBuilder() to construct. - private WorkResponse(com.google.protobuf.GeneratedMessage.Builder builder) { - super(builder); - } - - private WorkResponse() { - exitCode_ = 0; - output_ = ""; - } - - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet getUnknownFields() { - return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); - } - - private WorkResponse( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) { - this(); - int mutable_bitField0_ = 0; - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: - { - if (!input.skipField(tag)) { - done = true; - } - break; - } - case 8: - { - exitCode_ = input.readInt32(); - break; - } - case 18: - { - String s = input.readStringRequireUtf8(); - - output_ = s; - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw new RuntimeException(e.setUnfinishedMessage(this)); - } catch (java.io.IOException e) { - throw new RuntimeException( - new com.google.protobuf.InvalidProtocolBufferException(e.getMessage()) - .setUnfinishedMessage(this)); - } finally { - makeExtensionsImmutable(); - } - } - - public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return com.google.devtools.build.lib.worker.WorkerProtocol - .internal_static_blaze_worker_WorkResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return com.google.devtools.build.lib.worker.WorkerProtocol - .internal_static_blaze_worker_WorkResponse_fieldAccessorTable - .ensureFieldAccessorsInitialized( - com.google.devtools.build.lib.worker.WorkerProtocol.WorkResponse.class, - com.google.devtools.build.lib.worker.WorkerProtocol.WorkResponse.Builder.class); - } - - public static final int EXIT_CODE_FIELD_NUMBER = 1; - private int exitCode_; - /** optional int32 exit_code = 1; */ - public int getExitCode() { - return exitCode_; - } - - public static final int OUTPUT_FIELD_NUMBER = 2; - private volatile java.lang.Object output_; - /** - * optional string output = 2; - * - *
-     * This is printed to the user after the WorkResponse has been received and is supposed to contain
-     * compiler warnings / errors etc. - thus we'll use a string type here, which gives us UTF-8
-     * encoding.
-     * 
- */ - public java.lang.String getOutput() { - java.lang.Object ref = output_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - output_ = s; - return s; - } - } - /** - * optional string output = 2; - * - *
-     * This is printed to the user after the WorkResponse has been received and is supposed to contain
-     * compiler warnings / errors etc. - thus we'll use a string type here, which gives us UTF-8
-     * encoding.
-     * 
- */ - public com.google.protobuf.ByteString getOutputBytes() { - java.lang.Object ref = output_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); - output_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - private byte memoizedIsInitialized = -1; - - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized == 1) return true; - if (isInitialized == 0) return false; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - if (exitCode_ != 0) { - output.writeInt32(1, exitCode_); - } - if (!getOutputBytes().isEmpty()) { - com.google.protobuf.GeneratedMessage.writeString(output, 2, output_); - } - } - - public int getSerializedSize() { - int size = memoizedSize; - if (size != -1) return size; - - size = 0; - if (exitCode_ != 0) { - size += com.google.protobuf.CodedOutputStream.computeInt32Size(1, exitCode_); - } - if (!getOutputBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessage.computeStringSize(2, output_); - } - memoizedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - - public static com.google.devtools.build.lib.worker.WorkerProtocol.WorkResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - - public static com.google.devtools.build.lib.worker.WorkerProtocol.WorkResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - - public static com.google.devtools.build.lib.worker.WorkerProtocol.WorkResponse parseFrom( - byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - - public static com.google.devtools.build.lib.worker.WorkerProtocol.WorkResponse parseFrom( - byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - - public static com.google.devtools.build.lib.worker.WorkerProtocol.WorkResponse parseFrom( - java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); - } - - public static com.google.devtools.build.lib.worker.WorkerProtocol.WorkResponse parseFrom( - java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - - public static com.google.devtools.build.lib.worker.WorkerProtocol.WorkResponse - parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); - } - - public static com.google.devtools.build.lib.worker.WorkerProtocol.WorkResponse - parseDelimitedFrom( - java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); - } - - public static com.google.devtools.build.lib.worker.WorkerProtocol.WorkResponse parseFrom( - com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); - } - - public static com.google.devtools.build.lib.worker.WorkerProtocol.WorkResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - - public Builder newBuilderForType() { - return newBuilder(); - } - - public static Builder newBuilder() { - return DEFAULT_INSTANCE.toBuilder(); - } - - public static Builder newBuilder( - com.google.devtools.build.lib.worker.WorkerProtocol.WorkResponse prototype) { - return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); - } - - public Builder toBuilder() { - return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); - } - - @java.lang.Override - protected Builder newBuilderForType(com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code blaze.worker.WorkResponse} - * - *
-     * The worker sends this message to Blaze when it finished its work on the WorkRequest message.
-     * 
- */ - public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements - // @@protoc_insertion_point(builder_implements:blaze.worker.WorkResponse) - com.google.devtools.build.lib.worker.WorkerProtocol.WorkResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return com.google.devtools.build.lib.worker.WorkerProtocol - .internal_static_blaze_worker_WorkResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return com.google.devtools.build.lib.worker.WorkerProtocol - .internal_static_blaze_worker_WorkResponse_fieldAccessorTable - .ensureFieldAccessorsInitialized( - com.google.devtools.build.lib.worker.WorkerProtocol.WorkResponse.class, - com.google.devtools.build.lib.worker.WorkerProtocol.WorkResponse.Builder.class); - } - - // Construct using - // com.google.devtools.build.lib.worker.WorkerProtocol.WorkResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(com.google.protobuf.GeneratedMessage.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {} - } - - public Builder clear() { - super.clear(); - exitCode_ = 0; - - output_ = ""; - - return this; - } - - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return com.google.devtools.build.lib.worker.WorkerProtocol - .internal_static_blaze_worker_WorkResponse_descriptor; - } - - public com.google.devtools.build.lib.worker.WorkerProtocol.WorkResponse - getDefaultInstanceForType() { - return com.google.devtools.build.lib.worker.WorkerProtocol.WorkResponse - .getDefaultInstance(); - } - - public com.google.devtools.build.lib.worker.WorkerProtocol.WorkResponse build() { - com.google.devtools.build.lib.worker.WorkerProtocol.WorkResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public com.google.devtools.build.lib.worker.WorkerProtocol.WorkResponse buildPartial() { - com.google.devtools.build.lib.worker.WorkerProtocol.WorkResponse result = - new com.google.devtools.build.lib.worker.WorkerProtocol.WorkResponse(this); - result.exitCode_ = exitCode_; - result.output_ = output_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof com.google.devtools.build.lib.worker.WorkerProtocol.WorkResponse) { - return mergeFrom( - (com.google.devtools.build.lib.worker.WorkerProtocol.WorkResponse) other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom( - com.google.devtools.build.lib.worker.WorkerProtocol.WorkResponse other) { - if (other - == com.google.devtools.build.lib.worker.WorkerProtocol.WorkResponse - .getDefaultInstance()) return this; - if (other.getExitCode() != 0) { - setExitCode(other.getExitCode()); - } - if (!other.getOutput().isEmpty()) { - output_ = other.output_; - onChanged(); - } - onChanged(); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.devtools.build.lib.worker.WorkerProtocol.WorkResponse parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = - (com.google.devtools.build.lib.worker.WorkerProtocol.WorkResponse) - e.getUnfinishedMessage(); - throw e; - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - - private int exitCode_; - /** optional int32 exit_code = 1; */ - public int getExitCode() { - return exitCode_; - } - /** optional int32 exit_code = 1; */ - public Builder setExitCode(int value) { - - exitCode_ = value; - onChanged(); - return this; - } - /** optional int32 exit_code = 1; */ - public Builder clearExitCode() { - - exitCode_ = 0; - onChanged(); - return this; - } - - private java.lang.Object output_ = ""; - /** - * optional string output = 2; - * - *
-       * This is printed to the user after the WorkResponse has been received and is supposed to contain
-       * compiler warnings / errors etc. - thus we'll use a string type here, which gives us UTF-8
-       * encoding.
-       * 
- */ - public java.lang.String getOutput() { - java.lang.Object ref = output_; - if (!(ref instanceof java.lang.String)) { - com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - output_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * optional string output = 2; - * - *
-       * This is printed to the user after the WorkResponse has been received and is supposed to contain
-       * compiler warnings / errors etc. - thus we'll use a string type here, which gives us UTF-8
-       * encoding.
-       * 
- */ - public com.google.protobuf.ByteString getOutputBytes() { - java.lang.Object ref = output_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); - output_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * optional string output = 2; - * - *
-       * This is printed to the user after the WorkResponse has been received and is supposed to contain
-       * compiler warnings / errors etc. - thus we'll use a string type here, which gives us UTF-8
-       * encoding.
-       * 
- */ - public Builder setOutput(java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - - output_ = value; - onChanged(); - return this; - } - /** - * optional string output = 2; - * - *
-       * This is printed to the user after the WorkResponse has been received and is supposed to contain
-       * compiler warnings / errors etc. - thus we'll use a string type here, which gives us UTF-8
-       * encoding.
-       * 
- */ - public Builder clearOutput() { - - output_ = getDefaultInstance().getOutput(); - onChanged(); - return this; - } - /** - * optional string output = 2; - * - *
-       * This is printed to the user after the WorkResponse has been received and is supposed to contain
-       * compiler warnings / errors etc. - thus we'll use a string type here, which gives us UTF-8
-       * encoding.
-       * 
- */ - public Builder setOutputBytes(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - checkByteStringIsUtf8(value); - - output_ = value; - onChanged(); - return this; - } - - public final Builder setUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return this; - } - - public final Builder mergeUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return this; - } - - // @@protoc_insertion_point(builder_scope:blaze.worker.WorkResponse) - } - - // @@protoc_insertion_point(class_scope:blaze.worker.WorkResponse) - private static final com.google.devtools.build.lib.worker.WorkerProtocol.WorkResponse - DEFAULT_INSTANCE; - - static { - DEFAULT_INSTANCE = new com.google.devtools.build.lib.worker.WorkerProtocol.WorkResponse(); - } - - public static com.google.devtools.build.lib.worker.WorkerProtocol.WorkResponse - getDefaultInstance() { - return DEFAULT_INSTANCE; - } - - private static final com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public WorkResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - try { - return new WorkResponse(input, extensionRegistry); - } catch (RuntimeException e) { - if (e.getCause() instanceof com.google.protobuf.InvalidProtocolBufferException) { - throw (com.google.protobuf.InvalidProtocolBufferException) e.getCause(); - } - throw e; - } - } - }; - - public static com.google.protobuf.Parser parser() { - return PARSER; - } - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - public com.google.devtools.build.lib.worker.WorkerProtocol.WorkResponse - getDefaultInstanceForType() { - return DEFAULT_INSTANCE; - } - } - - private static com.google.protobuf.Descriptors.Descriptor - internal_static_blaze_worker_Input_descriptor; - private static com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_blaze_worker_Input_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_blaze_worker_WorkRequest_descriptor; - private static com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_blaze_worker_WorkRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_blaze_worker_WorkResponse_descriptor; - private static com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_blaze_worker_WorkResponse_fieldAccessorTable; - - public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { - return descriptor; - } - - private static com.google.protobuf.Descriptors.FileDescriptor descriptor; - - static { - java.lang.String[] descriptorData = { - "\n\220\001bazel-out/local-fastbuild/bin/externa" - + "l/io_bazel/src/main/protobuf/libworker_p" - + "rotocol_java_proto_srcjar.srcjar.preproc" - + "essed/worker_protocol.proto\022\014blaze.worke" - + "r\"%\n\005Input\022\014\n\004path\030\001 \001(\t\022\016\n\006digest\030\002 \001(\014" - + "\"E\n\013WorkRequest\022\021\n\targuments\030\001 \003(\t\022#\n\006in" - + "puts\030\002 \003(\0132\023.blaze.worker.Input\"1\n\014WorkR" - + "esponse\022\021\n\texit_code\030\001 \001(\005\022\016\n\006output\030\002 \001" - + "(\tB&\n$com.google.devtools.build.lib.work" - + "erb\006proto3" - }; - com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = - new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { - public com.google.protobuf.ExtensionRegistry assignDescriptors( - com.google.protobuf.Descriptors.FileDescriptor root) { - descriptor = root; - return null; - } - }; - com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom( - descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] {}, assigner); - internal_static_blaze_worker_Input_descriptor = getDescriptor().getMessageTypes().get(0); - internal_static_blaze_worker_Input_fieldAccessorTable = - new com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_blaze_worker_Input_descriptor, - new java.lang.String[] { - "Path", "Digest", - }); - internal_static_blaze_worker_WorkRequest_descriptor = getDescriptor().getMessageTypes().get(1); - internal_static_blaze_worker_WorkRequest_fieldAccessorTable = - new com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_blaze_worker_WorkRequest_descriptor, - new java.lang.String[] { - "Arguments", "Inputs", - }); - internal_static_blaze_worker_WorkResponse_descriptor = getDescriptor().getMessageTypes().get(2); - internal_static_blaze_worker_WorkResponse_fieldAccessorTable = - new com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_blaze_worker_WorkResponse_descriptor, - new java.lang.String[] { - "ExitCode", "Output", - }); - } - - // @@protoc_insertion_point(outer_class_scope) -} diff --git a/src/java/io/bazel/rulesscala/coverage/instrumenter/BUILD b/src/java/io/bazel/rulesscala/coverage/instrumenter/BUILD index 1b0ed55bd..13aa7a445 100644 --- a/src/java/io/bazel/rulesscala/coverage/instrumenter/BUILD +++ b/src/java/io/bazel/rulesscala/coverage/instrumenter/BUILD @@ -1,3 +1,5 @@ +load("@rules_java//java:defs.bzl", "java_binary") + java_binary( name = "instrumenter", srcs = [ @@ -10,8 +12,8 @@ java_binary( main_class = "io.bazel.rulesscala.coverage.instrumenter.JacocoInstrumenter", visibility = ["//visibility:public"], deps = [ + "//third_party/bazel/src/main/protobuf:worker_protocol_java_proto", "@bazel_tools//tools/jdk:JacocoCoverage", - "@io_bazel_rules_scala//src/java/com/google/devtools/build/lib:worker", "@io_bazel_rules_scala//src/java/io/bazel/rulesscala/jar", "@io_bazel_rules_scala//src/java/io/bazel/rulesscala/worker", ], diff --git a/src/java/io/bazel/rulesscala/coverage/instrumenter/JacocoInstrumenter.java b/src/java/io/bazel/rulesscala/coverage/instrumenter/JacocoInstrumenter.java index 86c450b63..d553d7cf5 100644 --- a/src/java/io/bazel/rulesscala/coverage/instrumenter/JacocoInstrumenter.java +++ b/src/java/io/bazel/rulesscala/coverage/instrumenter/JacocoInstrumenter.java @@ -52,11 +52,12 @@ public void processRequest(List < String > args) { private void processArg(Instrumenter jacoco, String arg) throws Exception { String[] parts = arg.split("="); - if (parts.length != 2) { - throw new Exception("expected `in_path=out_path` form for argument: " + arg); + if (parts.length != 3) { + throw new Exception("expected `in_path=out_path=srcs` form for argument: " + arg); } Path inPath = Paths.get(parts[0]); Path outPath = Paths.get(parts[1]); + String srcs = parts[2]; try ( FileSystem inFS = FileSystems.newFileSystem(inPath, null); FileSystem outFS = FileSystems.newFileSystem( URI.create("jar:" + outPath.toUri()), Collections.singletonMap("create", "true")); @@ -69,6 +70,21 @@ private void processArg(Instrumenter jacoco, String arg) throws Exception { throw new RuntimeException(e); } }); + + /* + * https://github.com/bazelbuild/bazel/blob/567ca633d016572f5760bfd027c10616f2b8c2e4/src/java_tools/junitrunner/java/com/google/testing/coverage/JacocoCoverageRunner.java#L411 + * + * Bazel / JacocoCoverageRunner will look for any file that ends with '-paths-for-coverage.txt' within the JAR to be later used for reconstructing the path for source files. + * This is a fairly undocumented feature within bazel at this time, but in essence, it opens all the jars, searches for all files matching '-paths-for-coverage.txt' + * and then adds them to a single in memory set. + * + * https://github.com/bazelbuild/bazel/blob/567ca633d016572f5760bfd027c10616f2b8c2e4/src/java_tools/junitrunner/java/com/google/testing/coverage/JacocoLCOVFormatter.java#L70 + * Which is then used in the formatter to find the corresponding source file from the set of sources we wrote in all the JARs. + */ + Files.write( + outFS.getPath("-paths-for-coverage.txt"), + srcs.replace(",", "\n").getBytes(java.nio.charset.StandardCharsets.UTF_8) + ); } } diff --git a/src/java/io/bazel/rulesscala/exe/BUILD b/src/java/io/bazel/rulesscala/exe/BUILD index 0a36ca645..115b61c53 100644 --- a/src/java/io/bazel/rulesscala/exe/BUILD +++ b/src/java/io/bazel/rulesscala/exe/BUILD @@ -1,3 +1,5 @@ +load("@rules_java//java:defs.bzl", "java_binary", "java_library") + java_library( name = "exe-lib", srcs = [ diff --git a/src/java/io/bazel/rulesscala/exe/LauncherFileWriter.java b/src/java/io/bazel/rulesscala/exe/LauncherFileWriter.java index f54a57819..15d1c1326 100644 --- a/src/java/io/bazel/rulesscala/exe/LauncherFileWriter.java +++ b/src/java/io/bazel/rulesscala/exe/LauncherFileWriter.java @@ -36,7 +36,7 @@ public static void main(String[] args) throws IOException { .addKeyValuePair("jar_bin_path", jarBinPath) .addKeyValuePair("java_start_class", javaStartClass) .addKeyValuePair("classpath", classpath) - .addJoinedValues("jvm_flags", " ", jvmFlags) + .addJoinedValues("jvm_flags", "\t", jvmFlags) .build(); Path launcher = Paths.get(Runfiles.create().rlocation("bazel_tools/tools/launcher/launcher.exe")); diff --git a/src/java/io/bazel/rulesscala/io_utils/BUILD b/src/java/io/bazel/rulesscala/io_utils/BUILD index 3d72bd8e1..f3d6c1c32 100644 --- a/src/java/io/bazel/rulesscala/io_utils/BUILD +++ b/src/java/io/bazel/rulesscala/io_utils/BUILD @@ -1,3 +1,5 @@ +load("@rules_java//java:defs.bzl", "java_library") + java_library( name = "io_utils", srcs = glob(["*.java"]), diff --git a/src/java/io/bazel/rulesscala/jar/BUILD b/src/java/io/bazel/rulesscala/jar/BUILD index 173484d19..c92d77853 100644 --- a/src/java/io/bazel/rulesscala/jar/BUILD +++ b/src/java/io/bazel/rulesscala/jar/BUILD @@ -1,3 +1,5 @@ +load("@rules_java//java:defs.bzl", "java_binary", "java_library") + java_library( name = "jar", srcs = [ diff --git a/src/java/io/bazel/rulesscala/scala_test/BUILD b/src/java/io/bazel/rulesscala/scala_test/BUILD index 2b4f1d851..ba02286d8 100644 --- a/src/java/io/bazel/rulesscala/scala_test/BUILD +++ b/src/java/io/bazel/rulesscala/scala_test/BUILD @@ -1,3 +1,5 @@ +load("@rules_java//java:defs.bzl", "java_library") + java_library( name = "runner", srcs = ["Runner.java"], diff --git a/src/java/io/bazel/rulesscala/scalac/BUILD b/src/java/io/bazel/rulesscala/scalac/BUILD index d80b8152f..c6121878a 100644 --- a/src/java/io/bazel/rulesscala/scalac/BUILD +++ b/src/java/io/bazel/rulesscala/scalac/BUILD @@ -1,3 +1,4 @@ +load("@rules_java//java:defs.bzl", "java_binary") load( ":jvm_export_toolchain.bzl", _export_scalac_repositories_from_toolchain_to_jvm = "export_scalac_repositories_from_toolchain_to_jvm", @@ -20,10 +21,10 @@ java_binary( visibility = ["//visibility:public"], deps = [ ":exported_scalac_repositories_from_toolchain_to_jvm", - "@io_bazel_rules_scala//src/java/com/google/devtools/build/lib:worker", + "//external:io_bazel_rules_scala/dependency/commons_io/commons_io", + "//third_party/bazel/src/main/protobuf:worker_protocol_java_proto", "@io_bazel_rules_scala//src/java/io/bazel/rulesscala/jar", "@io_bazel_rules_scala//src/java/io/bazel/rulesscala/worker", - "@scalac_rules_commons_io//jar", ], ) diff --git a/src/java/io/bazel/rulesscala/scalac/CompileOptions.java b/src/java/io/bazel/rulesscala/scalac/CompileOptions.java index 471dc12d6..e781c280e 100644 --- a/src/java/io/bazel/rulesscala/scalac/CompileOptions.java +++ b/src/java/io/bazel/rulesscala/scalac/CompileOptions.java @@ -1,5 +1,6 @@ package io.bazel.rulesscala.scalac; +import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -15,19 +16,19 @@ public class CompileOptions { public final String[] files; public final String[] sourceJars; public final String[] javaFiles; - public final Map resourceFiles; - public final String resourceStripPrefix; + public final List resourceFiles; public final String[] resourceJars; public final String[] classpathResourceFiles; public final String[] directJars; public final String[] directTargets; - public final String[] ignoredTargets; + public final String[] unusedDepsIgnoredTargets; public final String[] indirectJars; public final String[] indirectTargets; - public final String dependencyAnalyzerMode; + public final String strictDepsMode; public final String unusedDependencyCheckerMode; public final String currentTarget; public final String statsfile; + public final String dependencyTrackingMethod; public CompileOptions(List args) { Map argMap = buildArgMap(args); @@ -50,46 +51,38 @@ public CompileOptions(List args) { sourceJars = getCommaList(argMap, "SourceJars"); resourceFiles = getResources(argMap); - resourceStripPrefix = getOrEmpty(argMap, "ResourceStripPrefix"); resourceJars = getCommaList(argMap, "ResourceJars"); classpathResourceFiles = getCommaList(argMap, "ClasspathResourceSrcs"); directJars = getCommaList(argMap, "DirectJars"); directTargets = getCommaList(argMap, "DirectTargets"); - ignoredTargets = getCommaList(argMap, "IgnoredTargets"); + unusedDepsIgnoredTargets = getCommaList(argMap, "UnusedDepsIgnoredTargets"); indirectJars = getCommaList(argMap, "IndirectJars"); indirectTargets = getCommaList(argMap, "IndirectTargets"); - dependencyAnalyzerMode = getOrElse(argMap, "DependencyAnalyzerMode", "off"); + strictDepsMode = getOrElse(argMap, "StrictDepsMode", "off"); unusedDependencyCheckerMode = getOrElse(argMap, "UnusedDependencyCheckerMode", "off"); currentTarget = getOrElse(argMap, "CurrentTarget", "NA"); + dependencyTrackingMethod = getOrElse(argMap, "DependencyTrackingMethod", "high-level"); statsfile = getOrError(argMap, "StatsfileOutput", "Missing required arg StatsfileOutput"); } - private static Map getResources(Map args) { - String[] keys = getCommaList(args, "ResourceSrcs"); - String[] dests = getCommaList(args, "ResourceDests"); - String[] shortPaths = getCommaList(args, "ResourceShortPaths"); + private static List getResources(Map args) { + String[] targets = getCommaList(args, "ResourceTargets"); + String[] sources = getCommaList(args, "ResourceSources"); - if (keys.length != dests.length) + if (targets.length != sources.length) throw new RuntimeException( String.format( - "mismatch in resources: keys: %s dests: %s", - getOrEmpty(args, "ResourceSrcs"), getOrEmpty(args, "ResourceDests"))); + "mismatch in resources: targets: %s sources: %s", + getOrEmpty(args, "ResourceTargets"), getOrEmpty(args, "ResourceSources"))); - if (keys.length != shortPaths.length) - throw new RuntimeException( - String.format( - "mismatch in resources: keys: %s shortPaths: %s", - getOrEmpty(args, "ResourceSrcs"), getOrEmpty(args, "ResourceShortPaths"))); - - HashMap res = new HashMap(); - for (int idx = 0; idx < keys.length; idx++) { - Resource resource = new Resource(dests[idx], shortPaths[idx]); - res.put(keys[idx], resource); + List resources = new ArrayList(); + for (int idx = 0; idx < targets.length; idx++) { + resources.add(new Resource(targets[idx], sources[idx])); } - return res; + return resources; } private static HashMap buildArgMap(List lines) { diff --git a/src/java/io/bazel/rulesscala/scalac/Resource.java b/src/java/io/bazel/rulesscala/scalac/Resource.java index a8db9dbc3..6c0b68cc4 100644 --- a/src/java/io/bazel/rulesscala/scalac/Resource.java +++ b/src/java/io/bazel/rulesscala/scalac/Resource.java @@ -1,11 +1,11 @@ package io.bazel.rulesscala.scalac; public class Resource { - public final String destination; - public final String shortPath; + public final String target; + public final String source; - public Resource(String destination, String shortPath) { - this.destination = destination; - this.shortPath = shortPath; + public Resource(String target, String source) { + this.target = target; + this.source = source; } } diff --git a/src/java/io/bazel/rulesscala/scalac/ScalacProcessor.java b/src/java/io/bazel/rulesscala/scalac/ScalacProcessor.java index 9e43ce639..fe5b7721f 100644 --- a/src/java/io/bazel/rulesscala/scalac/ScalacProcessor.java +++ b/src/java/io/bazel/rulesscala/scalac/ScalacProcessor.java @@ -3,7 +3,12 @@ import io.bazel.rulesscala.jar.JarCreator; import io.bazel.rulesscala.worker.GenericWorker; import io.bazel.rulesscala.worker.Processor; -import java.io.*; +import java.io.File; +import java.io.FileNotFoundException; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; import java.lang.reflect.Field; import java.nio.file.FileSystems; import java.nio.file.FileVisitResult; @@ -12,8 +17,10 @@ import java.nio.file.Paths; import java.nio.file.SimpleFileVisitor; import java.nio.file.attribute.BasicFileAttributes; -import java.util.*; -import java.util.Map.Entry; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Enumeration; +import java.util.List; import java.util.jar.JarEntry; import java.util.jar.JarFile; import org.apache.commons.io.IOUtils; @@ -70,7 +77,7 @@ public void processRequest(List args) throws Exception { } /** Copy the resources */ - copyResources(ops.resourceFiles, ops.resourceStripPrefix, tmpPath); + copyResources(ops.resourceFiles, tmpPath); /** Extract and copy resources from resource jars */ copyResourceJars(ops.resourceJars, tmpPath); @@ -173,31 +180,36 @@ private static boolean isModeEnabled(String mode) { private static String[] getPluginParamsFrom(CompileOptions ops) { ArrayList pluginParams = new ArrayList<>(0); - if (isModeEnabled(ops.dependencyAnalyzerMode)) { - String[] indirectTargets = encodeBazelTargets(ops.indirectTargets); + if (isModeEnabled(ops.strictDepsMode) || isModeEnabled(ops.unusedDependencyCheckerMode)) { String currentTarget = encodeBazelTarget(ops.currentTarget); String[] dependencyAnalyzerParams = { - "-P:dependency-analyzer:direct-jars:" + String.join(":", ops.directJars), - "-P:dependency-analyzer:indirect-jars:" + String.join(":", ops.indirectJars), - "-P:dependency-analyzer:indirect-targets:" + String.join(":", indirectTargets), - "-P:dependency-analyzer:mode:" + ops.dependencyAnalyzerMode, - "-P:dependency-analyzer:current-target:" + currentTarget, + "-P:dependency-analyzer:strict-deps-mode:" + ops.strictDepsMode, + "-P:dependency-analyzer:unused-deps-mode:" + ops.unusedDependencyCheckerMode, + "-P:dependency-analyzer:current-target:" + currentTarget, + "-P:dependency-analyzer:dependency-tracking-method:" + ops.dependencyTrackingMethod, }; + pluginParams.addAll(Arrays.asList(dependencyAnalyzerParams)); - } else if (isModeEnabled(ops.unusedDependencyCheckerMode)) { - String[] directTargets = encodeBazelTargets(ops.directTargets); - String[] ignoredTargets = encodeBazelTargets(ops.ignoredTargets); - String currentTarget = encodeBazelTarget(ops.currentTarget); - String[] unusedDependencyCheckerParams = { - "-P:unused-dependency-checker:direct-jars:" + String.join(":", ops.directJars), - "-P:unused-dependency-checker:direct-targets:" + String.join(":", directTargets), - "-P:unused-dependency-checker:ignored-targets:" + String.join(":", ignoredTargets), - "-P:unused-dependency-checker:mode:" + ops.unusedDependencyCheckerMode, - "-P:unused-dependency-checker:current-target:" + currentTarget, - }; - pluginParams.addAll(Arrays.asList(unusedDependencyCheckerParams)); + if (ops.directJars.length > 0) { + pluginParams.add("-P:dependency-analyzer:direct-jars:" + String.join(":", ops.directJars)); + } + if (ops.directTargets.length > 0) { + String[] directTargets = encodeBazelTargets(ops.directTargets); + pluginParams.add("-P:dependency-analyzer:direct-targets:" + String.join(":", directTargets)); + } + if (ops.indirectJars.length > 0) { + pluginParams.add("-P:dependency-analyzer:indirect-jars:" + String.join(":", ops.indirectJars)); + } + if (ops.indirectTargets.length > 0) { + String[] indirectTargets = encodeBazelTargets(ops.indirectTargets); + pluginParams.add("-P:dependency-analyzer:indirect-targets:" + String.join(":", indirectTargets)); + } + if (ops.unusedDepsIgnoredTargets.length > 0) { + String[] ignoredTargets = encodeBazelTargets(ops.unusedDepsIgnoredTargets); + pluginParams.add("-P:dependency-analyzer:unused-deps-ignored-targets:" + String.join(":", ignoredTargets)); + } } return pluginParams.toArray(new String[pluginParams.size()]); @@ -268,44 +280,11 @@ public FileVisitResult postVisitDirectory(Path dir, IOException exc) } } - private static void copyResources( - Map resources, String resourceStripPrefix, Path dest) throws IOException { - for (Entry e : resources.entrySet()) { - Path source = Paths.get(e.getKey()); - Resource resource = e.getValue(); - Path shortPath = Paths.get(resource.shortPath); - String dstr; - // Check if we need to modify resource destination path - if (!"".equals(resourceStripPrefix)) { - /** - * NOTE: We are not using the Resource Hash Value as the destination path when - * `resource_strip_prefix` present. The path in the hash value is computed by the - * `_adjust_resources_path` in `scala.bzl`. These are the default paths, ie, path that are - * automatically computed when there is no `resource_strip_prefix` present. But when - * `resource_strip_prefix` is present, we need to strip the prefix from the Source Path and - * use that as the new destination path Refer Bazel -> BazelJavaRuleClasses.java#L227 for - * details - */ - dstr = getResourcePath(shortPath, resourceStripPrefix); - } else { - dstr = resource.destination; - } - - if (dstr.charAt(0) == '/') { - // we don't want to copy to an absolute destination - dstr = dstr.substring(1); - } - if (dstr.startsWith("../")) { - // paths to external repositories, for some reason, start with a leading ../ - // we don't want to copy the resource out of our temporary directory, so - // instead we replace ../ with external/ - // since "external" is a bit of reserved directory in bazel for these kinds - // of purposes, we don't expect a collision in the paths. - dstr = "external" + dstr.substring(2); - } - Path target = dest.resolve(dstr); - File tfile = target.getParent().toFile(); - tfile.mkdirs(); + private static void copyResources(List resources, Path dest) throws IOException { + for (Resource r : resources) { + Path source = Paths.get(r.source); + Path target = dest.resolve(r.target); + target.getParent().toFile().mkdirs(); Files.copy(source, target); } } @@ -328,24 +307,6 @@ private static void copyClasspathResourcesToRoot(String[] classpathResourceFiles } } - private static String getResourcePath(Path source, String resourceStripPrefix) - throws RuntimeException { - String sourcePath = source.toString(); - // convert strip prefix to a Path first and back to handle different file systems - String resourceStripPrefixPath = Paths.get(resourceStripPrefix).toString(); - // check if the Resource file is under the specified prefix to strip - if (!sourcePath.startsWith(resourceStripPrefixPath)) { - // Resource File is not under the specified prefix to strip - throw new RuntimeException( - "Resource File " - + sourcePath - + " is not under the specified strip prefix " - + resourceStripPrefix); - } - String newResPath = sourcePath.substring(resourceStripPrefix.length()); - return newResPath; - } - private static void copyResourceJars(String[] resourceJars, Path dest) throws IOException { for (String jarPath : resourceJars) { extractJar(jarPath, dest.toString(), null); diff --git a/src/java/io/bazel/rulesscala/worker/BUILD b/src/java/io/bazel/rulesscala/worker/BUILD index 9a6fca96a..61c69e98d 100644 --- a/src/java/io/bazel/rulesscala/worker/BUILD +++ b/src/java/io/bazel/rulesscala/worker/BUILD @@ -1,3 +1,5 @@ +load("@rules_java//java:defs.bzl", "java_library") + java_library( name = "worker", srcs = [ @@ -6,6 +8,6 @@ java_library( ], visibility = ["//visibility:public"], deps = [ - "//src/java/com/google/devtools/build/lib:worker", + "//third_party/bazel/src/main/protobuf:worker_protocol_java_proto", ], ) diff --git a/src/scala/io/bazel/rules_scala/jmh_support/BenchmarkGenerator.scala b/src/scala/io/bazel/rules_scala/jmh_support/BenchmarkGenerator.scala index d367ff424..b443f3e68 100644 --- a/src/scala/io/bazel/rules_scala/jmh_support/BenchmarkGenerator.scala +++ b/src/scala/io/bazel/rules_scala/jmh_support/BenchmarkGenerator.scala @@ -41,15 +41,24 @@ object BenchmarkGenerator { classPath: List[Path] ) + private case class GenerationException(messageLines: Seq[String]) + extends RuntimeException(messageLines.mkString("\n")) + def main(argv: Array[String]): Unit = { val args = parseArgs(argv) - generateJmhBenchmark( - args.generatorType, - args.resultSourceJar, - args.resultResourceJar, - args.inputJar, - args.classPath - ) + try { + generateJmhBenchmark( + args.generatorType, + args.resultSourceJar, + args.resultResourceJar, + args.inputJar, + args.classPath + ) + } catch { + case GenerationException(messageLines) => + messageLines.foreach(log) + sys.exit(1) + } } private def parseArgs(argv: Array[String]): BenchmarkGeneratorArgs = { @@ -168,10 +177,8 @@ object BenchmarkGenerator { generator.generate(source, destination) generator.complete(source, destination) if (destination.hasErrors) { - log("JMH Benchmark generator failed") - for (e <- destination.getErrors.asScala) { - log(e.toString) - } + throw new GenerationException( + "JHM Benchmark generator failed" +: destination.getErrors.asScala.map(_.toString).toSeq) } } constructJar(sourceJarOut, tmpSourceDir) diff --git a/test/BUILD b/test/BUILD index daeb207b1..5ff5302c1 100644 --- a/test/BUILD +++ b/test/BUILD @@ -1,3 +1,6 @@ +load("@rules_python//python:defs.bzl", "py_binary") +load("@rules_java//java:defs.bzl", "java_binary", "java_import", "java_library") + package(default_testonly = 1) load( @@ -303,6 +306,38 @@ sh_test( data = ["MixJavaScalaLibTestOutputs"], ) +scala_binary( + name = "MixJavaScalaBinary", + srcs = ["src/main/scala/scalarules/test/MixJavaScalaLibBinary.scala"] + glob([ + "src/main/scala/scalarules/test/mix_java_scala/*.scala", + ]) + glob([ + "src/main/scala/scalarules/test/mix_java_scala/*.java", + ]), + main_class = "scalarules.test.JavaBinary", +) + +scala_test( + name = "MixJavaScalaScalaTest", + size = "small", + srcs = [ + "HelloLibTest.scala", + "OtherJavaLib.java", + ], + deps = [ + ":HelloLib", + ], +) + +scala_junit_test( + name = "MixJavaScalaJunitTest", + size = "small", + srcs = [ + "OtherJavaLib.java", + "src/main/scala/scalarules/test/junit/HelloWorldJunitTest.scala", + ], + suffixes = ["Test"], +) + #needed to test java sources are compiled scala_binary( name = "MixJavaScalaLibBinary", @@ -469,8 +504,8 @@ scala_specs2_junit_test( name = "data_location_expansion", size = "small", srcs = ["src/main/scala/scalarules/test/location_expansion/LocationExpansionTest.scala"], - data = ["//src/java/com/google/devtools/build/lib:worker"], - jvm_flags = ["-Dlocation.expanded=$(location //src/java/com/google/devtools/build/lib:worker)"], + data = ["//third_party/bazel/src/main/protobuf:worker_protocol_java_proto"], + jvm_flags = ["-Dlocation.expanded='$(locations //third_party/bazel/src/main/protobuf:worker_protocol_java_proto)'"], suffixes = ["Test"], ) @@ -720,3 +755,15 @@ scala_junit_test( tests_from = [":JunitMixedSeparateTarget"], runtime_deps = [":JunitMixedSeparateTarget"], ) + +py_binary( + name = "py_resource_binary", + srcs = ["py_resource.py"], + main = "py_resource.py", +) + +scala_test( + name = "ScalaTestResourcesFromLocalTargetTest", + srcs = ["ScalaTestResourcesFromLocalTargetTest.scala"], + resources = [":py_resource_binary"], +) diff --git a/test/ScalaTestResourcesFromLocalTargetTest.scala b/test/ScalaTestResourcesFromLocalTargetTest.scala new file mode 100644 index 000000000..717e4dae9 --- /dev/null +++ b/test/ScalaTestResourcesFromLocalTargetTest.scala @@ -0,0 +1,9 @@ +package scalarules.test + +class ScalaTestResourcesFromLocalTargetTest extends org.scalatest.FlatSpec { + "scala_test's resources when referencing local target" should + "assert that local target is not placed in bazel-out, but rather next to the packaged code" in { + assert(getClass.getResource("/bazel-out/darwin-fastbuild/bin/test/py_resource_binary") == null) + assert(getClass.getResource("/test/py_resource_binary") != null) + } +} \ No newline at end of file diff --git a/test/aspect/aspect.bzl b/test/aspect/aspect.bzl index 183397441..5ff91072d 100644 --- a/test/aspect/aspect.bzl +++ b/test/aspect/aspect.bzl @@ -43,6 +43,10 @@ def _rule_impl(ctx): "@io_bazel_rules_scala_org_hamcrest_hamcrest_core//:io_bazel_rules_scala_org_hamcrest_hamcrest_core", # From specs2/specs2.bzl:specs2_dependencies() "@io_bazel_rules_scala//specs2:specs2", + "@io_bazel_rules_scala_org_specs2_specs2_common//:io_bazel_rules_scala_org_specs2_specs2_common", + "@io_bazel_rules_scala_org_specs2_specs2_core//:io_bazel_rules_scala_org_specs2_specs2_core", + "@io_bazel_rules_scala_org_specs2_specs2_fp//:io_bazel_rules_scala_org_specs2_specs2_fp", + "@io_bazel_rules_scala_org_specs2_specs2_matcher//:io_bazel_rules_scala_org_specs2_specs2_matcher", "@io_bazel_rules_scala_scala_xml//:io_bazel_rules_scala_scala_xml", "@io_bazel_rules_scala_scala_parser_combinators//:io_bazel_rules_scala_scala_parser_combinators", "@io_bazel_rules_scala_scala_library//:io_bazel_rules_scala_scala_library", @@ -53,8 +57,8 @@ def _rule_impl(ctx): } content = "" for target in ctx.attr.targets: - visited = sorted(target.visited) - expected = sorted(expected_deps[target.label.name]) + visited = depset(sorted(target.visited)).to_list() + expected = depset(sorted(expected_deps[target.label.name])).to_list() if visited != expected: content += """ echo Expected these deps from {name}: 1>&2 diff --git a/test/coverage/A1.scala b/test/coverage/A1.scala index 2dcc34f43..be90760e4 100644 --- a/test/coverage/A1.scala +++ b/test/coverage/A1.scala @@ -1,3 +1,5 @@ +package coverage; + object A1 { def a1(flag: Boolean): B1.type = if (flag) B1 diff --git a/test/coverage/A2.scala b/test/coverage/A2.scala index 0e58f455d..ff3cce15c 100644 --- a/test/coverage/A2.scala +++ b/test/coverage/A2.scala @@ -1,7 +1,8 @@ +package coverage; + object A2 { def a2(): Unit = { - println("a2: " + - "" // B2.b2_a() + println("a2: " + B2.b2_a() ) } } diff --git a/test/coverage/B1.scala b/test/coverage/B1.scala index 3b5a9f305..ee8f12665 100644 --- a/test/coverage/B1.scala +++ b/test/coverage/B1.scala @@ -1,3 +1,5 @@ +package coverage; + object B1 { def not_called(): Unit = { diff --git a/test/coverage/B2.java b/test/coverage/B2.java index 30b97c543..629bb28d0 100644 --- a/test/coverage/B2.java +++ b/test/coverage/B2.java @@ -1,3 +1,5 @@ +package coverage; + class B2 { public static String b2_a() { return C2.c2("hello from b2_a"); diff --git a/test/coverage/BUILD b/test/coverage/BUILD index 1d3cc583b..2a7149336 100644 --- a/test/coverage/BUILD +++ b/test/coverage/BUILD @@ -1,19 +1,7 @@ +load("@rules_java//java:defs.bzl", "java_library", "java_test") load("//scala:scala.bzl", "scala_library", "scala_test") load("//scala:scala_toolchain.bzl", "scala_toolchain") -scala_toolchain( - name = "enable_code_coverage_aspect_impl", - enable_code_coverage_aspect = "on", - visibility = ["//visibility:public"], -) - -toolchain( - name = "enable_code_coverage_aspect", - toolchain = "enable_code_coverage_aspect_impl", - toolchain_type = "@io_bazel_rules_scala//scala:toolchain_type", - visibility = ["//visibility:public"], -) - scala_test( name = "test-all", srcs = [ @@ -61,20 +49,10 @@ scala_library( "A2.scala", ], deps = [ - # TODO :: Understand why referencing a local java library breaks coverage - # ":b2", + ":b2", ], ) -# -# As it stands I can't seem to generate coverage for Java libraries pulled into -# a scala_test target. -# -# The java_library is instrumented, but doesn't have the .uninstrumented files -# that Bazel seems to expect. There are a few code paths for code coverage, so -# down the road we can explore how to fix this... -# - java_library( name = "b2", srcs = [ diff --git a/test/coverage/C2.scala b/test/coverage/C2.scala index 7e405b1cf..40daebda1 100644 --- a/test/coverage/C2.scala +++ b/test/coverage/C2.scala @@ -1,3 +1,5 @@ +package coverage; + object C2 { def c2(input: String): String = input.reverse diff --git a/test/coverage/TestAll.scala b/test/coverage/TestAll.scala index cbe8f9e4e..aae034261 100644 --- a/test/coverage/TestAll.scala +++ b/test/coverage/TestAll.scala @@ -1,3 +1,4 @@ +package coverage; import org.scalatest._ class TestAll extends FlatSpec { diff --git a/test/coverage/TestB2.java b/test/coverage/TestB2.java index ea5275e64..bffa8898f 100644 --- a/test/coverage/TestB2.java +++ b/test/coverage/TestB2.java @@ -1,3 +1,5 @@ +package coverage; + import org.junit.Test; import org.junit.Assert.*; diff --git a/test/coverage/expected-coverage.dat b/test/coverage/expected-coverage.dat index d142c915f..d92769254 100755 --- a/test/coverage/expected-coverage.dat +++ b/test/coverage/expected-coverage.dat @@ -1,78 +1,110 @@ -SF:/A1.scala -FN:-1,A1$:: ()V -FN:5,A1$:: ()V -FN:3,A1$::a1 (Z)LB1$; -FN:-1,A1::a1 (Z)LB1$; -FNDA:1,A1$:: ()V -FNDA:1,A1$:: ()V -FNDA:1,A1$::a1 (Z)LB1$; -FNDA:0,A1::a1 (Z)LB1$; +SF:test/coverage/A1.scala +FN:-1,coverage/A1$:: ()V +FN:7,coverage/A1$:: ()V +FN:5,coverage/A1$::a1 (Z)Lcoverage/B1$; +FN:-1,coverage/A1::a1 (Z)Lcoverage/B1$; +FNDA:1,coverage/A1$:: ()V +FNDA:1,coverage/A1$:: ()V +FNDA:1,coverage/A1$::a1 (Z)Lcoverage/B1$; +FNDA:0,coverage/A1::a1 (Z)Lcoverage/B1$; FNF:4 FNH:3 -BA:3,2 +BA:5,2 BRF:1 BRH:1 -DA:3,4 -DA:4,0 -DA:5,5 +DA:5,4 +DA:6,0 +DA:7,5 LH:2 LF:3 end_of_record -SF:/A2.scala -FN:-1,A2$:: ()V -FN:7,A2$:: ()V -FN:3,A2$::a2 ()V -FN:-1,A2::a2 ()V -FNDA:1,A2$:: ()V -FNDA:1,A2$:: ()V -FNDA:1,A2$::a2 ()V -FNDA:0,A2::a2 ()V +SF:test/coverage/A2.scala +FN:-1,coverage/A2$:: ()V +FN:8,coverage/A2$:: ()V +FN:5,coverage/A2$::a2 ()V +FN:-1,coverage/A2::a2 ()V +FNDA:1,coverage/A2$:: ()V +FNDA:1,coverage/A2$:: ()V +FNDA:1,coverage/A2$::a2 ()V +FNDA:0,coverage/A2::a2 ()V FNF:4 FNH:3 -DA:3,4 -DA:7,5 +DA:5,11 +DA:8,5 LH:2 LF:2 end_of_record -SF:/B1.scala -FN:-1,B1$:: ()V -FN:7,B1$:: ()V -FN:4,B1$::not_called ()V -FN:-1,B1::not_called ()V -FNDA:1,B1$:: ()V -FNDA:1,B1$:: ()V -FNDA:0,B1$::not_called ()V -FNDA:0,B1::not_called ()V +SF:test/coverage/B1.scala +FN:-1,coverage/B1$:: ()V +FN:9,coverage/B1$:: ()V +FN:6,coverage/B1$::not_called ()V +FN:-1,coverage/B1::not_called ()V +FNDA:1,coverage/B1$:: ()V +FNDA:1,coverage/B1$:: ()V +FNDA:0,coverage/B1$::not_called ()V +FNDA:0,coverage/B1::not_called ()V FNF:4 FNH:2 -DA:4,0 -DA:7,5 +DA:6,0 +DA:9,5 +LH:1 +LF:2 +end_of_record +SF:test/coverage/B2.java +FN:3,coverage/B2:: ()V +FN:5,coverage/B2::b2_a ()Ljava/lang/String; +FN:9,coverage/B2::b2_b ()V +FNDA:0,coverage/B2:: ()V +FNDA:1,coverage/B2::b2_a ()Ljava/lang/String; +FNDA:0,coverage/B2::b2_b ()V +FNF:3 +FNH:1 +DA:3,0 +DA:5,3 +DA:9,0 +DA:10,0 LH:1 +LF:4 +end_of_record +SF:test/coverage/C2.scala +FN:-1,coverage/C2$:: ()V +FN:7,coverage/C2$:: ()V +FN:5,coverage/C2$::c2 (Ljava/lang/String;)Ljava/lang/String; +FN:-1,coverage/C2::c2 (Ljava/lang/String;)Ljava/lang/String; +FNDA:1,coverage/C2$:: ()V +FNDA:1,coverage/C2$:: ()V +FNDA:1,coverage/C2$::c2 (Ljava/lang/String;)Ljava/lang/String; +FNDA:1,coverage/C2::c2 (Ljava/lang/String;)Ljava/lang/String; +FNF:4 +FNH:4 +DA:5,9 +DA:7,5 +LH:2 LF:2 end_of_record -SF:/TestAll.scala -FN:10,TestAll$$anonfun$1:: (LTestAll;)V -FN:10,TestAll$$anonfun$1::apply ()V -FN:10,TestAll$$anonfun$1::apply$mcV$sp ()V -FN:6,TestAll$$anonfun$2:: (LTestAll;)V -FN:6,TestAll$$anonfun$2::apply ()Lorg/scalatest/compatible/Assertion; -FN:3,TestAll:: ()V -FNDA:1,TestAll$$anonfun$1:: (LTestAll;)V -FNDA:1,TestAll$$anonfun$1::apply ()V -FNDA:1,TestAll$$anonfun$1::apply$mcV$sp ()V -FNDA:1,TestAll$$anonfun$2:: (LTestAll;)V -FNDA:1,TestAll$$anonfun$2::apply ()Lorg/scalatest/compatible/Assertion; -FNDA:1,TestAll:: ()V +SF:test/coverage/TestAll.scala +FN:11,coverage/TestAll$$anonfun$1:: (Lcoverage/TestAll;)V +FN:11,coverage/TestAll$$anonfun$1::apply ()V +FN:11,coverage/TestAll$$anonfun$1::apply$mcV$sp ()V +FN:7,coverage/TestAll$$anonfun$2:: (Lcoverage/TestAll;)V +FN:7,coverage/TestAll$$anonfun$2::apply ()Lorg/scalatest/compatible/Assertion; +FN:4,coverage/TestAll:: ()V +FNDA:1,coverage/TestAll$$anonfun$1:: (Lcoverage/TestAll;)V +FNDA:1,coverage/TestAll$$anonfun$1::apply ()V +FNDA:1,coverage/TestAll$$anonfun$1::apply$mcV$sp ()V +FNDA:1,coverage/TestAll$$anonfun$2:: (Lcoverage/TestAll;)V +FNDA:1,coverage/TestAll$$anonfun$2::apply ()Lorg/scalatest/compatible/Assertion; +FNDA:1,coverage/TestAll:: ()V FNF:6 FNH:6 -BA:6,2 +BA:7,2 BRF:1 BRH:1 -DA:3,2 -DA:5,22 -DA:6,51 -DA:9,23 -DA:10,13 +DA:4,2 +DA:6,22 +DA:7,51 +DA:10,23 +DA:11,13 LH:5 LF:5 end_of_record diff --git a/test/example_jars/BUILD b/test/example_jars/BUILD index 612e2e857..773f28aa5 100644 --- a/test/example_jars/BUILD +++ b/test/example_jars/BUILD @@ -1,3 +1,5 @@ +load("@rules_java//java:defs.bzl", "java_import") + java_import( name = "example_jar1", jars = [ diff --git a/test/jmh/BUILD b/test/jmh/BUILD index 17edd80d1..143b9367c 100644 --- a/test/jmh/BUILD +++ b/test/jmh/BUILD @@ -1,3 +1,4 @@ +load("@rules_java//java:defs.bzl", "java_library") load("//scala:scala.bzl", "scala_library") load("//jmh:jmh.bzl", "scala_benchmark_jmh") diff --git a/test/phase/providers/BUILD.bazel b/test/phase/providers/BUILD.bazel new file mode 100644 index 000000000..e7fb39d78 --- /dev/null +++ b/test/phase/providers/BUILD.bazel @@ -0,0 +1,30 @@ +load(":phase_providers_expose.bzl", "phase_expose_provider_singleton", "rule_that_needs_custom_provider", "scala_library_that_exposes_custom_provider") +load(":phase_providers_override.bzl", "phase_override_provider_singleton", "rule_that_has_phases_which_override_providers", "rule_that_verifies_providers_are_overriden") + +scala_library_that_exposes_custom_provider( + name = "scala_library_that_exposes_custom_provider", +) + +rule_that_needs_custom_provider( + name = "rule_that_needs_custom_provider", + dep = ":scala_library_that_exposes_custom_provider", +) + +phase_expose_provider_singleton( + name = "phase_expose_provider_singleton_target", + visibility = ["//visibility:public"], +) + +rule_that_has_phases_which_override_providers( + name = "PhaseOverridesProvider", +) + +rule_that_verifies_providers_are_overriden( + name = "PhaseOverridesProviderTest", + dep = ":PhaseOverridesProvider", +) + +phase_override_provider_singleton( + name = "phase_override_provider_singleton_target", + visibility = ["//visibility:public"], +) diff --git a/test/phase/providers/phase_providers_expose.bzl b/test/phase/providers/phase_providers_expose.bzl new file mode 100644 index 000000000..10660c430 --- /dev/null +++ b/test/phase/providers/phase_providers_expose.bzl @@ -0,0 +1,42 @@ +load("@io_bazel_rules_scala//scala:advanced_usage/providers.bzl", "ScalaRulePhase") +load("@io_bazel_rules_scala//scala:advanced_usage/scala.bzl", "make_scala_library") + +ext_phase_expose_provider = { + "phase_providers": [ + "//test/phase/providers:phase_expose_provider_singleton_target", + ], +} + +scala_library_that_exposes_custom_provider = make_scala_library(ext_phase_expose_provider) + +_some_position = "last" #last position is just because a location is mandatory, not important + +def _phase_expose_provider_singleton_implementation(ctx): + return [ + ScalaRulePhase( + custom_phases = [ + (_some_position, "", "phase_expose_provider", _phase_expose_provider), + ], + ), + ] + +phase_expose_provider_singleton = rule( + implementation = _phase_expose_provider_singleton_implementation, +) + +CustomProviderExposedByPhase = provider() + +def _phase_expose_provider(ctx, p): + return struct( + external_providers = {"CustomProviderExposedByPhase": CustomProviderExposedByPhase()}, + ) + +def _rule_that_needs_custom_provider_impl(ctx): + return [] + +rule_that_needs_custom_provider = rule( + implementation = _rule_that_needs_custom_provider_impl, + attrs = { + "dep": attr.label(providers = [CustomProviderExposedByPhase]), + }, +) diff --git a/test/phase/providers/phase_providers_override.bzl b/test/phase/providers/phase_providers_override.bzl new file mode 100644 index 000000000..c432cc55f --- /dev/null +++ b/test/phase/providers/phase_providers_override.bzl @@ -0,0 +1,61 @@ +load("@io_bazel_rules_scala//scala:advanced_usage/providers.bzl", "ScalaRulePhase") +load("@io_bazel_rules_scala//scala:advanced_usage/scala.bzl", "make_scala_library") + +ext_phase_override_provider = { + "phase_providers": [ + "//test/phase/providers:phase_override_provider_singleton_target", + ], +} + +rule_that_has_phases_which_override_providers = make_scala_library(ext_phase_override_provider) + +def _phase_override_provider_singleton_implementation(ctx): + return [ + ScalaRulePhase( + custom_phases = [ + ("last", "", "first_custom", _phase_original), + ("after", "first_custom", "second_custom", _phase_override), + ], + ), + ] + +phase_override_provider_singleton = rule( + implementation = _phase_override_provider_singleton_implementation, +) + +OverrideProvider = provider(fields = ["content"]) + +def _phase_original(ctx, p): + return struct( + external_providers = { + "OverrideProvider": OverrideProvider( + content = "original", + ), + }, + ) + +def _phase_override(ctx, p): + return struct( + external_providers = { + "OverrideProvider": OverrideProvider( + content = "override", + ), + }, + ) + +def _rule_that_verifies_providers_are_overriden_impl(ctx): + if (ctx.attr.dep[OverrideProvider].content != "override"): + fail( + "expected OverrideProvider of {label} to have content 'override' but got '{content}'".format( + label = ctx.label, + content = ctx.attr.dep[OverrideProvider].content, + ), + ) + return [] + +rule_that_verifies_providers_are_overriden = rule( + implementation = _rule_that_verifies_providers_are_overriden_impl, + attrs = { + "dep": attr.label(providers = [OverrideProvider]), + }, +) diff --git a/test/proto/BUILD b/test/proto/BUILD index a4614bb57..f97e6ddb3 100644 --- a/test/proto/BUILD +++ b/test/proto/BUILD @@ -1,3 +1,5 @@ +load("@rules_java//java:defs.bzl", "java_proto_library") +load("@rules_proto//proto:defs.bzl", "proto_library") load( "//scala_proto:scala_proto.bzl", "scala_proto_library", @@ -167,3 +169,79 @@ scala_test( ":test_external_dep", ], ) + +proto_library( + name = "standalone_proto", + srcs = ["standalone.proto"], +) + +proto_library( + name = "standalone_proto_strip_import_prefix_partial", + srcs = ["standalone.proto"], + strip_import_prefix = "/test", +) + +proto_library( + name = "standalone_proto_strip_import_prefix_package", + srcs = ["standalone.proto"], + strip_import_prefix = "/" + package_name(), +) + +proto_library( + name = "standalone_proto_with_import_prefix", + srcs = ["standalone.proto"], + import_prefix = "prefix", +) + +proto_library( + name = "standalone_proto_with_custom_prefix", + srcs = ["standalone.proto"], + import_prefix = "prefix", + strip_import_prefix = "/test", +) + +proto_library( + name = "nested_proto", + srcs = ["some/path/nested.proto"], +) + +proto_library( + name = "nested_proto_strip_import_prefix_relative", + srcs = ["some/path/nested.proto"], + strip_import_prefix = "some", +) + +proto_library( + name = "nested_proto_with_import_prefix", + srcs = ["some/path/nested.proto"], + import_prefix = "prefix", +) + +proto_library( + name = "nested_proto_with_custom_prefix", + srcs = ["some/path/nested.proto"], + import_prefix = "prefix", + strip_import_prefix = "some", +) + +scala_proto_library( + name = "pack_protos_lib", + deps = [ + ":nested_proto", + ":nested_proto_strip_import_prefix_relative", + ":nested_proto_with_custom_prefix", + ":nested_proto_with_import_prefix", + ":standalone_proto", + ":standalone_proto_strip_import_prefix_package", + ":standalone_proto_strip_import_prefix_partial", + ":standalone_proto_with_custom_prefix", + ":standalone_proto_with_import_prefix", + ], +) + +scala_test( + name = "test_pack_protos", + srcs = ["PackProtosTest.scala"], + unused_dependency_checker_mode = "off", + deps = [":pack_protos_lib"], +) diff --git a/test/proto/PackProtosTest.scala b/test/proto/PackProtosTest.scala new file mode 100644 index 000000000..f75084177 --- /dev/null +++ b/test/proto/PackProtosTest.scala @@ -0,0 +1,13 @@ +class PackProtosTest extends org.scalatest.FlatSpec { + "scala_proto_library" should "pack input proto next to generated code" in { + assert(getClass.getResource("test/proto/standalone.proto") != null) + assert(getClass.getResource("proto/standalone.proto") != null) + assert(getClass.getResource("standalone.proto") != null) + assert(getClass.getResource("prefix/test/proto/standalone.proto") != null) + assert(getClass.getResource("prefix/proto/standalone.proto") != null) + assert(getClass.getResource("test/proto/some/path/nested.proto") != null) + assert(getClass.getResource("path/nested.proto") != null) + assert(getClass.getResource("prefix/test/proto/some/path/nested.proto") != null) + assert(getClass.getResource("prefix/path/nested.proto") != null) + } +} diff --git a/test/proto/some/path/nested.proto b/test/proto/some/path/nested.proto new file mode 100644 index 000000000..24afdf24c --- /dev/null +++ b/test/proto/some/path/nested.proto @@ -0,0 +1,4 @@ +syntax = "proto3"; + +message Message { +} diff --git a/test/proto/standalone.proto b/test/proto/standalone.proto new file mode 100644 index 000000000..24afdf24c --- /dev/null +++ b/test/proto/standalone.proto @@ -0,0 +1,4 @@ +syntax = "proto3"; + +message Message { +} diff --git a/test/proto2/BUILD b/test/proto2/BUILD index 96e963557..2087f7626 100644 --- a/test/proto2/BUILD +++ b/test/proto2/BUILD @@ -1,3 +1,5 @@ +load("@rules_proto//proto:defs.bzl", "proto_library") + proto_library( name = "test", srcs = ["test.proto"], diff --git a/test/proto3/BUILD b/test/proto3/BUILD index 441040e72..2d28654fb 100644 --- a/test/proto3/BUILD +++ b/test/proto3/BUILD @@ -2,6 +2,7 @@ load( "//scala_proto:scala_proto.bzl", "scala_proto_library", ) +load("@rules_proto//proto:defs.bzl", "proto_library") genrule( name = "generated", diff --git a/test/proto_cross_repo_boundary/BUILD b/test/proto_cross_repo_boundary/BUILD new file mode 100644 index 000000000..42fb573a6 --- /dev/null +++ b/test/proto_cross_repo_boundary/BUILD @@ -0,0 +1,7 @@ +load("@io_bazel_rules_scala//scala_proto:scala_proto.bzl", "scalapb_proto_library") + +scalapb_proto_library( + name = "sample_scala_proto", + visibility = ["//visibility:public"], + deps = ["@proto_cross_repo_boundary//:sample_proto"], +) diff --git a/test/proto_cross_repo_boundary/repo.bzl b/test/proto_cross_repo_boundary/repo.bzl new file mode 100644 index 000000000..0f19c06d2 --- /dev/null +++ b/test/proto_cross_repo_boundary/repo.bzl @@ -0,0 +1,6 @@ +def proto_cross_repo_boundary_repository(): + native.new_local_repository( + name = "proto_cross_repo_boundary", + path = "test/proto_cross_repo_boundary/repo", + build_file = "test/proto_cross_repo_boundary/repo/BUILD.repo", + ) diff --git a/test/proto_cross_repo_boundary/repo/BUILD.repo b/test/proto_cross_repo_boundary/repo/BUILD.repo new file mode 100644 index 000000000..dc9aa78e5 --- /dev/null +++ b/test/proto_cross_repo_boundary/repo/BUILD.repo @@ -0,0 +1,7 @@ +load("@rules_proto//proto:defs.bzl", "proto_library") + +proto_library( + name = "sample_proto", + srcs = ["sample.proto"], + visibility = ["//visibility:public"], +) diff --git a/test/proto_cross_repo_boundary/repo/sample.proto b/test/proto_cross_repo_boundary/repo/sample.proto new file mode 100644 index 000000000..6fae4b202 --- /dev/null +++ b/test/proto_cross_repo_boundary/repo/sample.proto @@ -0,0 +1,9 @@ +syntax = "proto3"; + +package sample; + +option java_package = "sample"; + +message Sample { + repeated string foobar = 1; +} \ No newline at end of file diff --git a/test/py_resource.py b/test/py_resource.py new file mode 100644 index 000000000..5cb928578 --- /dev/null +++ b/test/py_resource.py @@ -0,0 +1 @@ +# Empty file just need to be used for reference \ No newline at end of file diff --git a/test/scala_test/A.scala b/test/scala_test/A.scala new file mode 100644 index 000000000..40cd31468 --- /dev/null +++ b/test/scala_test/A.scala @@ -0,0 +1,10 @@ + +import org.scalatest._ + +abstract class A extends FunSuite { + val Number: Int + + test("number is positive") { + assert(Number > 0) + } +} \ No newline at end of file diff --git a/test/scala_test/B.scala b/test/scala_test/B.scala new file mode 100644 index 000000000..e00c5182b --- /dev/null +++ b/test/scala_test/B.scala @@ -0,0 +1,4 @@ + +class B extends A { + override val Number: Int = 12 +} diff --git a/test/scala_test/BUILD b/test/scala_test/BUILD new file mode 100644 index 000000000..1ceb25b6f --- /dev/null +++ b/test/scala_test/BUILD @@ -0,0 +1,12 @@ +load("@io_bazel_rules_scala//scala:scala.bzl", "scala_test") + +scala_test( + name = "a", + srcs = ["A.scala"], +) + +scala_test( + name = "b", + srcs = ["B.scala"], + deps = [":a"], +) diff --git a/test/scalafmt/.scalafmt.conf b/test/scalafmt/.scalafmt.conf new file mode 100644 index 000000000..46b439c07 --- /dev/null +++ b/test/scalafmt/.scalafmt.conf @@ -0,0 +1 @@ +maxColumn = 40 diff --git a/test/scalafmt/BUILD b/test/scalafmt/BUILD new file mode 100644 index 000000000..696bc4323 --- /dev/null +++ b/test/scalafmt/BUILD @@ -0,0 +1,68 @@ +load( + "//test/scalafmt:phase_scalafmt_test.bzl", + "scalafmt_scala_binary", + "scalafmt_scala_library", + "scalafmt_scala_test", +) + +filegroup( + name = "custom-conf", + srcs = [".scalafmt.conf"], + visibility = ["//visibility:public"], +) + +### scala_binary ### +scalafmt_scala_binary( + name = "formatted-binary", + srcs = ["formatted/formatted-encoding.scala"], + format = True, + main_class = "scalarules.test.scalafmt.Format", +) + +scalafmt_scala_binary( + name = "unformatted-binary", + srcs = ["unformatted/unformatted-encoding.scala"], + format = True, + main_class = "scalarules.test.scalafmt.Format", +) + +### scala_library ### +scalafmt_scala_library( + name = "formatted-library", + srcs = ["formatted/formatted-encoding.scala"], + format = True, +) + +scalafmt_scala_library( + name = "unformatted-library", + srcs = ["unformatted/unformatted-encoding.scala"], + format = True, +) + +### scala_test ### +scalafmt_scala_test( + name = "formatted-test", + srcs = ["formatted/formatted-test.scala"], + format = True, +) + +scalafmt_scala_test( + name = "unformatted-test", + srcs = ["unformatted/unformatted-test.scala"], + format = True, +) + +### custom config ### +scalafmt_scala_library( + name = "formatted-custom-conf", + srcs = ["formatted/formatted-custom-conf.scala"], + config = ":custom-conf", + format = True, +) + +scalafmt_scala_library( + name = "unformatted-custom-conf", + srcs = ["unformatted/unformatted-custom-conf.scala"], + config = ":custom-conf", + format = True, +) diff --git a/test/scalafmt/formatted/formatted-custom-conf.scala b/test/scalafmt/formatted/formatted-custom-conf.scala new file mode 100644 index 000000000..9a237e692 --- /dev/null +++ b/test/scalafmt/formatted/formatted-custom-conf.scala @@ -0,0 +1,7 @@ +package scalarules.test.scalafmt +object Format { + def main(args: Array[String]) { + val warnings: String = + "Be careful with this test. The column number is limited to 40, so it should be in new line." + } +} diff --git a/test/scalafmt/formatted/formatted-encoding.scala b/test/scalafmt/formatted/formatted-encoding.scala new file mode 100644 index 000000000..9a5192a35 --- /dev/null +++ b/test/scalafmt/formatted/formatted-encoding.scala @@ -0,0 +1,12 @@ +package scalarules.test.scalafmt +object Format { + def main(args: Array[String]) { + val warnings: List[String] = List( + "Be careful with this test", + "小心這個測試", + "このテストに注意してください", + "이 시험에 조심하십시오", + "😁✊🚀🍟💯" + ) + } +} diff --git a/test/scalafmt/formatted/formatted-test.scala b/test/scalafmt/formatted/formatted-test.scala new file mode 100644 index 000000000..929e655f4 --- /dev/null +++ b/test/scalafmt/formatted/formatted-test.scala @@ -0,0 +1,7 @@ +package scalarules.test.scalafmt +import org.scalatest._ +class FormatTest extends FlatSpec { + "FormatTest" should "be formatted" in { + assert(true) + } +} diff --git a/test/scalafmt/phase_scalafmt_test.bzl b/test/scalafmt/phase_scalafmt_test.bzl new file mode 100644 index 000000000..ed5e46602 --- /dev/null +++ b/test/scalafmt/phase_scalafmt_test.bzl @@ -0,0 +1,16 @@ +load( + "//scala:advanced_usage/scala.bzl", + "make_scala_binary", + "make_scala_library", + "make_scala_test", +) +load( + "//scala/scalafmt:phase_scalafmt_ext.bzl", + "ext_scalafmt", +) + +scalafmt_scala_binary = make_scala_binary(ext_scalafmt) + +scalafmt_scala_library = make_scala_library(ext_scalafmt) + +scalafmt_scala_test = make_scala_test(ext_scalafmt) diff --git a/test/scalafmt/unformatted/unformatted-custom-conf.scala b/test/scalafmt/unformatted/unformatted-custom-conf.scala new file mode 100644 index 000000000..46a3dd420 --- /dev/null +++ b/test/scalafmt/unformatted/unformatted-custom-conf.scala @@ -0,0 +1,6 @@ +package scalarules.test.scalafmt +object Format { + def main(args: Array[String]) { + val warnings: String = "Be careful with this test. The column number is limited to 40, so it should be in new line." + } +} diff --git a/test/scalafmt/unformatted/unformatted-encoding.scala b/test/scalafmt/unformatted/unformatted-encoding.scala new file mode 100644 index 000000000..57cc66d0a --- /dev/null +++ b/test/scalafmt/unformatted/unformatted-encoding.scala @@ -0,0 +1,15 @@ + package scalarules.test.scalafmt + object Format { + def main ( args: + + + Array [String ]) { + val warnings : List [ String ] = List( + "Be careful with this test", + "小心這個測試", + "このテストに注意してください", + "이 시험에 조심하십시오", + "😁✊🚀🍟💯" + ) + } +} diff --git a/test/scalafmt/unformatted/unformatted-test.scala b/test/scalafmt/unformatted/unformatted-test.scala new file mode 100644 index 000000000..7a696cc48 --- /dev/null +++ b/test/scalafmt/unformatted/unformatted-test.scala @@ -0,0 +1,7 @@ + package scalarules.test.scalafmt + import org.scalatest._ + class FormatTest extends FlatSpec { + "FormatTest" should "be formatted" in { + assert ( true ) + } +} diff --git a/test/shell/test_build_event_protocol.sh b/test/shell/test_build_event_protocol.sh index 0ba50edce..42678cb16 100755 --- a/test/shell/test_build_event_protocol.sh +++ b/test/shell/test_build_event_protocol.sh @@ -5,11 +5,12 @@ runner=$(get_test_runner "${1:-local}") scala_binary_common_jar_is_exposed_in_build_event_protocol() { local target=$1 + local target_suffix=${2:-""} set +e bazel build test:$target --build_event_text_file=$target_bes.txt - cat $target_bes.txt | grep "test/$target.jar" + cat $target_bes.txt | grep "test/$target$target_suffix.jar" if [ $? -ne 0 ]; then - echo "test/$target.jar was not found in build event protocol:" + echo "test/$target$target_suffix.jar was not found in build event protocol:" cat $target_bes.txt rm $target_bes.txt exit 1 @@ -31,6 +32,26 @@ scala_junit_test_jar_is_exposed_in_build_event_protocol() { scala_binary_common_jar_is_exposed_in_build_event_protocol JunitTestWithDeps } +scala_binary_java_jar_is_exposed_in_build_event_protocol() { + scala_binary_common_jar_is_exposed_in_build_event_protocol MixJavaScalaBinary _java +} + +scala_library_java_jar_is_exposed_in_build_event_protocol() { + scala_binary_common_jar_is_exposed_in_build_event_protocol MixJavaScalaLib _java +} + +scala_test_java_jar_is_exposed_in_build_event_protocol() { + scala_binary_common_jar_is_exposed_in_build_event_protocol MixJavaScalaScalaTest _java +} + +junit_test_java_jar_is_exposed_in_build_event_protocol() { + scala_binary_common_jar_is_exposed_in_build_event_protocol MixJavaScalaJunitTest _java +} + $runner scala_binary_jar_is_exposed_in_build_event_protocol $runner scala_test_jar_is_exposed_in_build_event_protocol $runner scala_junit_test_jar_is_exposed_in_build_event_protocol +$runner scala_binary_java_jar_is_exposed_in_build_event_protocol +$runner scala_library_java_jar_is_exposed_in_build_event_protocol +$runner scala_test_java_jar_is_exposed_in_build_event_protocol +$runner junit_test_java_jar_is_exposed_in_build_event_protocol diff --git a/test/shell/test_deps.sh b/test/shell/test_deps.sh index 5570c1e09..746c40cb2 100755 --- a/test/shell/test_deps.sh +++ b/test/shell/test_deps.sh @@ -42,7 +42,20 @@ test_scala_import_expect_failure_on_missing_direct_deps_warn_mode() { test_expect_failure_or_warning_on_missing_direct_deps_with_expected_message "${expected_message1}" ${test_target} "--strict_java_deps=warn" "ne" "${expected_message2}" } +test_plus_one_ast_analyzer_strict_deps() { + dependenecy_target='//test_expect_failure/missing_direct_deps/internal_deps:transitive_dependency' + test_target='test_expect_failure/missing_direct_deps/internal_deps:transitive_dependency_user' + + expected_message_warn="warning: Target '$dependenecy_target' is used but isn't explicitly declared, please add it to the deps" + expected_message_error="error: Target '$dependenecy_target' is used but isn't explicitly declared, please add it to the deps" + + test_expect_failure_or_warning_on_missing_direct_deps_with_expected_message "${expected_message_error}" ${test_target} "--extra_toolchains=//test/toolchains:ast_plus_one_deps_strict_deps_error" "eq" + test_expect_failure_or_warning_on_missing_direct_deps_with_expected_message "${expected_message_error}" ${test_target} "--extra_toolchains=//test/toolchains:ast_plus_one_deps_strict_deps_unused_deps_error" "eq" + test_expect_failure_or_warning_on_missing_direct_deps_with_expected_message "${expected_message_warn}" ${test_target} "--extra_toolchains=//test/toolchains:ast_plus_one_deps_strict_deps_warn" "ne" +} + $runner test_scala_import_library_passes_labels_of_direct_deps $runner test_plus_one_deps_only_works_for_java_info_targets $runner scala_pb_library_targets_do_not_have_host_deps $runner test_scala_import_expect_failure_on_missing_direct_deps_warn_mode +$runner test_plus_one_ast_analyzer_strict_deps diff --git a/test/shell/test_helper.sh b/test/shell/test_helper.sh index 7e5f1b989..1c339f6ef 100755 --- a/test/shell/test_helper.sh +++ b/test/shell/test_helper.sh @@ -65,7 +65,7 @@ test_expect_failure_or_warning_on_missing_direct_deps_with_expected_message() { expected_message=$1 test_target=$2 - strict_deps_mode=$3 + args=$3 operator=${4:-"eq"} additional_expected_message=${5:-""} @@ -75,7 +75,7 @@ test_expect_failure_or_warning_on_missing_direct_deps_with_expected_message() { error_message="bazel build of scala_library with missing direct deps should not have failed." fi - command="bazel build ${test_target} ${strict_deps_mode}" + command="bazel build ${test_target} ${args}" output=$(${command} 2>&1) status_code=$? diff --git a/test/shell/test_misc.sh b/test/shell/test_misc.sh index e1a767b71..2a58ce5da 100755 --- a/test/shell/test_misc.sh +++ b/test/shell/test_misc.sh @@ -55,12 +55,27 @@ test_repl() { test_benchmark_jmh() { RES=$(bazel run -- test/jmh:test_benchmark -i1 -f1 -wi 1) - RESPONSE_CODE=$? + if [ $? -ne 0 ]; then + exit 1 + fi if [[ $RES != *Result*Benchmark* ]]; then echo "Benchmark did not produce expected output:\n$RES" exit 1 fi - exit $RESPONSE_CODE + + exit 0 +} + +test_benchmark_jmh_failure() { + set +e + + bazel build test_expect_failure/jmh:jmh_reports_failure + if [ $? -eq 0 ]; then + echo "'bazel build test_expect_failure/jmh:jmh_reports_failure' should have failed." + exit 1 + fi + + exit 0 } scala_test_test_filters() { @@ -104,7 +119,7 @@ test_override_javabin() { test_coverage_on() { bazel coverage \ - --extra_toolchains="//test/coverage:enable_code_coverage_aspect" \ + --extra_toolchains="//scala:code_coverage_toolchain" \ //test/coverage/... diff test/coverage/expected-coverage.dat $(bazel info bazel-testlogs)/test/coverage/test-all/coverage.dat } @@ -117,6 +132,7 @@ $runner test_disappearing_class $runner test_transitive_deps $runner test_repl $runner test_benchmark_jmh +$runner test_benchmark_jmh_failure $runner scala_test_test_filters $runner test_multi_service_manifest $runner test_override_javabin diff --git a/test/shell/test_scalafmt.sh b/test/shell/test_scalafmt.sh new file mode 100755 index 000000000..03db1a895 --- /dev/null +++ b/test/shell/test_scalafmt.sh @@ -0,0 +1,75 @@ +# shellcheck source=./test_runner.sh +dir=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd ) +. "${dir}"/test_runner.sh +. "${dir}"/test_helper.sh +runner=$(get_test_runner "${1:-local}") + +backup_unformatted() { + FILE_PATH=$1 + FILENAME=$2 + cp $FILE_PATH/unformatted/unformatted-$FILENAME.scala $FILE_PATH/unformatted/unformatted-$FILENAME.backup.scala +} + +restore_unformatted_before_exit() { + FILE_PATH=$1 + FILENAME=$2 + cp $FILE_PATH/unformatted/unformatted-$FILENAME.backup.scala $FILE_PATH/unformatted/unformatted-$FILENAME.scala + rm -f $FILE_PATH/unformatted/unformatted-$FILENAME.backup.scala +} + +run_formatting() { + set +e + + FILE_PATH="$( dirname $( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd ) )"/scalafmt + RULE_TYPE=$1 + FILENAME=$2 + + bazel run //test/scalafmt:formatted-$RULE_TYPE.format-test + if [ $? -ne 0 ]; then + echo -e "${RED} formatted-$RULE_TYPE.format-test should be a formatted target. $NC" + exit 1 + fi + + bazel run //test/scalafmt:unformatted-$RULE_TYPE.format-test + if [ $? -eq 0 ]; then + echo -e "${RED} unformatted-$RULE_TYPE.format-test should be an unformatted target. $NC" + exit 1 + fi + + backup_unformatted $FILE_PATH $FILENAME + # format unformatted*.scala + bazel run //test/scalafmt:unformatted-$RULE_TYPE.format + if [ $? -ne 0 ]; then + echo -e "${RED} unformatted-$RULE_TYPE.format should run formatting. $NC" + restore_unformatted_before_exit $FILE_PATH $FILENAME + exit 1 + fi + + diff $FILE_PATH/unformatted/unformatted-$FILENAME.scala $FILE_PATH/formatted/formatted-$FILENAME.scala + if [ $? -ne 0 ]; then + echo -e "${RED} unformatted-$FILENAME.scala should be the same as formatted-$FILENAME.scala after formatting. $NC" + restore_unformatted_before_exit $FILE_PATH $FILENAME + exit 1 + fi + restore_unformatted_before_exit $FILE_PATH $FILENAME +} + +test_scalafmt_binary() { + run_formatting binary encoding +} + +test_scalafmt_library() { + run_formatting library encoding +} + +test_scalafmt_test() { + run_formatting test test +} +test_custom_conf() { + run_formatting custom-conf custom-conf +} + +$runner test_scalafmt_binary +$runner test_scalafmt_library +$runner test_scalafmt_test +$runner test_custom_conf diff --git a/test/shell/test_unused_dependency.sh b/test/shell/test_unused_dependency.sh index 6e8e1e2ff..4659004ab 100755 --- a/test/shell/test_unused_dependency.sh +++ b/test/shell/test_unused_dependency.sh @@ -16,17 +16,12 @@ test_unused_dependency_checker_mode_override_toolchain() { bazel build --extra_toolchains="//test_expect_failure/unused_dependency_checker:failing_scala_toolchain" //test_expect_failure/unused_dependency_checker:toolchain_override } -test_unused_dependency_checker_mode_warn() { - # this is a hack to invalidate the cache, so that the target actually gets built and outputs warnings. - bazel build \ - --strict_java_deps=warn \ - //test:UnusedDependencyCheckerWarn +test_succeeds_with_warning() { + cmd=$1 + expected=$2 local output - output=$(bazel build \ - --strict_java_deps=off \ - //test:UnusedDependencyCheckerWarn 2>&1 - ) + output=$($cmd 2>&1) if [ $? -ne 0 ]; then echo "Target with unused dependency failed to build with status $?" @@ -34,8 +29,6 @@ test_unused_dependency_checker_mode_warn() { exit 1 fi - local expected="warning: Target '//test:UnusedLib' is specified as a dependency to //test:UnusedDependencyCheckerWarn but isn't used, please remove it from the deps." - echo "$output" | grep "$expected" if [ $? -ne 0 ]; then echo "Expected output:[$output] to contain [$expected]" @@ -43,12 +36,48 @@ test_unused_dependency_checker_mode_warn() { fi } +test_unused_dependency_checker_mode_warn() { + # this is a hack to invalidate the cache, so that the target actually gets built and outputs warnings. + bazel build \ + --strict_java_deps=warn \ + //test:UnusedDependencyCheckerWarn + + test_succeeds_with_warning \ + "bazel build --strict_java_deps=off //test:UnusedDependencyCheckerWarn" \ + "warning: Target '//test:UnusedLib' is specified as a dependency to //test:UnusedDependencyCheckerWarn but isn't used, please remove it from the deps." +} + test_unused_dependency_fails_even_if_also_exists_in_plus_one_deps() { action_should_fail build --extra_toolchains="//test_expect_failure/plus_one_deps:plus_one_deps_with_unused_error" //test_expect_failure/plus_one_deps/with_unused_deps:a } +test_plus_one_ast_analyzer_unused_deps_error() { + action_should_fail build --extra_toolchains="//test/toolchains:ast_plus_one_deps_unused_deps_error" //test_expect_failure/plus_one_deps/with_unused_deps:a +} + +test_plus_one_ast_analyzer_unused_deps_strict_deps_error() { + action_should_fail build --extra_toolchains="//test/toolchains:ast_plus_one_deps_strict_deps_unused_deps_error" //test_expect_failure/plus_one_deps/with_unused_deps:a +} + +test_plus_one_ast_analyzer_unused_deps_warn() { + test_succeeds_with_warning \ + "bazel build --extra_toolchains=//test/toolchains:ast_plus_one_deps_unused_deps_warn //test_expect_failure/plus_one_deps/with_unused_deps:a" \ + "warning: Target '//test_expect_failure/plus_one_deps/with_unused_deps:c' is specified as a dependency to //test_expect_failure/plus_one_deps/with_unused_deps:a but isn't used, please remove it from the deps." +} + +test_plus_one_ast_analyzer_unused_deps_scala_test() { + # We should not emit an unuse dep warning for scalatest library in a scala_test rule + # even when the rule does not directly depend on scalatest. As scalatest is built into + # the scala_test library. + bazel build --extra_toolchains="//test/toolchains:ast_plus_one_deps_unused_deps_error" //test/scala_test:b +} + $runner test_unused_dependency_checker_mode_from_scala_toolchain $runner test_unused_dependency_checker_mode_set_in_rule $runner test_unused_dependency_checker_mode_override_toolchain $runner test_unused_dependency_checker_mode_warn $runner test_unused_dependency_fails_even_if_also_exists_in_plus_one_deps +$runner test_plus_one_ast_analyzer_unused_deps_error +$runner test_plus_one_ast_analyzer_unused_deps_strict_deps_error +$runner test_plus_one_ast_analyzer_unused_deps_warn +$runner test_plus_one_ast_analyzer_unused_deps_scala_test diff --git a/test/src/main/scala/scalarules/test/junit/HelloWorldJunitTest.scala b/test/src/main/scala/scalarules/test/junit/HelloWorldJunitTest.scala new file mode 100644 index 000000000..6d982fb67 --- /dev/null +++ b/test/src/main/scala/scalarules/test/junit/HelloWorldJunitTest.scala @@ -0,0 +1,12 @@ +package scalarules.test.junit + +import org.junit.Test + +class HelloWorldJunitTest { + + @Test + def helloWorld: Unit = { + println("hello world") + } + +} diff --git a/test/src/main/scala/scalarules/test/large_classpath/BUILD b/test/src/main/scala/scalarules/test/large_classpath/BUILD index e9f1415a5..4d8783082 100644 --- a/test/src/main/scala/scalarules/test/large_classpath/BUILD +++ b/test/src/main/scala/scalarules/test/large_classpath/BUILD @@ -1,4 +1,4 @@ -load("//scala:scala.bzl", "scala_binary") +load("//scala:scala.bzl", "scala_binary", "scala_library") load(":helper.bzl", "create_dependencies", "get_dependency_labels") scala_binary( @@ -8,12 +8,17 @@ scala_binary( unused_dependency_checker_mode = "off", visibility = ["//visibility:public"], deps = get_dependency_labels( - amount = 250, + amount = 1000, length = 20, ), ) +scala_library( + name = "triggerDeployJarCreation", + data = [":largeClasspath_deploy.jar"], +) + create_dependencies( - amount = 250, + amount = 1000, length = 20, ) diff --git a/test/src/main/scala/scalarules/test/resources/strip/BUILD b/test/src/main/scala/scalarules/test/resources/strip/BUILD index b4036aaf9..b13cb4542 100644 --- a/test/src/main/scala/scalarules/test/resources/strip/BUILD +++ b/test/src/main/scala/scalarules/test/resources/strip/BUILD @@ -14,3 +14,12 @@ scala_specs2_junit_test( unused_dependency_checker_mode = "off", deps = [":noSrcsWithResources"], ) + +scala_specs2_junit_test( + name = "resouceStripPrefixFromExternalRepoTest", + size = "small", + srcs = ["ResourceStripPrefixTest.scala"], + suffixes = ["Test"], + unused_dependency_checker_mode = "off", + deps = ["@strip_resource_external_workspace//strip:noSrcsWithResources"], +) diff --git a/test/src/main/scala/scalarules/test/scala_import/BUILD b/test/src/main/scala/scalarules/test/scala_import/BUILD index 0ffc07d7c..80ca9ccd7 100644 --- a/test/src/main/scala/scalarules/test/scala_import/BUILD +++ b/test/src/main/scala/scalarules/test/scala_import/BUILD @@ -1,3 +1,4 @@ +load("@rules_java//java:defs.bzl", "java_import") load("//scala:scala.bzl", "scala_library", "scala_specs2_junit_test") load("//scala:scala_import.bzl", "scala_import") diff --git a/test/src/main/scala/scalarules/test/twitter_scrooge/thrift/bare_jar_thrifts/BUILD b/test/src/main/scala/scalarules/test/twitter_scrooge/thrift/bare_jar_thrifts/BUILD index 7ba56931a..ac9c03ac4 100644 --- a/test/src/main/scala/scalarules/test/twitter_scrooge/thrift/bare_jar_thrifts/BUILD +++ b/test/src/main/scala/scalarules/test/twitter_scrooge/thrift/bare_jar_thrifts/BUILD @@ -1,3 +1,4 @@ +load("@rules_java//java:defs.bzl", "java_import") load("//thrift:thrift.bzl", "thrift_library") java_import( diff --git a/test/toolchains/BUILD.bazel b/test/toolchains/BUILD.bazel new file mode 100644 index 000000000..64a60a4ca --- /dev/null +++ b/test/toolchains/BUILD.bazel @@ -0,0 +1,77 @@ +load("//scala:scala_toolchain.bzl", "scala_toolchain") + +scala_toolchain( + name = "ast_plus_one_deps_unused_deps_warn_impl", + dependency_mode = "plus-one", + dependency_tracking_method = "ast", + unused_dependency_checker_mode = "warn", + visibility = ["//visibility:public"], +) + +toolchain( + name = "ast_plus_one_deps_unused_deps_warn", + toolchain = "ast_plus_one_deps_unused_deps_warn_impl", + toolchain_type = "@io_bazel_rules_scala//scala:toolchain_type", + visibility = ["//visibility:public"], +) + +scala_toolchain( + name = "ast_plus_one_deps_unused_deps_error_impl", + dependency_mode = "plus-one", + dependency_tracking_method = "ast", + unused_dependency_checker_mode = "error", + visibility = ["//visibility:public"], +) + +toolchain( + name = "ast_plus_one_deps_unused_deps_error", + toolchain = "ast_plus_one_deps_unused_deps_error_impl", + toolchain_type = "@io_bazel_rules_scala//scala:toolchain_type", + visibility = ["//visibility:public"], +) + +scala_toolchain( + name = "ast_plus_one_deps_strict_deps_warn_impl", + dependency_mode = "plus-one", + dependency_tracking_method = "ast", + strict_deps_mode = "warn", + visibility = ["//visibility:public"], +) + +toolchain( + name = "ast_plus_one_deps_strict_deps_warn", + toolchain = "ast_plus_one_deps_strict_deps_warn_impl", + toolchain_type = "@io_bazel_rules_scala//scala:toolchain_type", + visibility = ["//visibility:public"], +) + +scala_toolchain( + name = "ast_plus_one_deps_strict_deps_error_impl", + dependency_mode = "plus-one", + dependency_tracking_method = "ast", + strict_deps_mode = "error", + visibility = ["//visibility:public"], +) + +toolchain( + name = "ast_plus_one_deps_strict_deps_error", + toolchain = "ast_plus_one_deps_strict_deps_error_impl", + toolchain_type = "@io_bazel_rules_scala//scala:toolchain_type", + visibility = ["//visibility:public"], +) + +scala_toolchain( + name = "ast_plus_one_deps_strict_deps_unused_deps_error_impl", + dependency_mode = "plus-one", + dependency_tracking_method = "ast", + strict_deps_mode = "error", + unused_dependency_checker_mode = "error", + visibility = ["//visibility:public"], +) + +toolchain( + name = "ast_plus_one_deps_strict_deps_unused_deps_error", + toolchain = "ast_plus_one_deps_strict_deps_unused_deps_error_impl", + toolchain_type = "@io_bazel_rules_scala//scala:toolchain_type", + visibility = ["//visibility:public"], +) diff --git a/test/unstable/BUILD b/test/unstable/BUILD new file mode 100644 index 000000000..621ee2e25 --- /dev/null +++ b/test/unstable/BUILD @@ -0,0 +1,27 @@ +load( + "//scala/unstable:defs.bzl", + "scala_binary", + "scala_library", + "scala_test", +) + +scala_binary( + name = "binary", + srcs = ["binary.scala"], + main_class = "test.v2.Binary", + deps = [":library"], +) + +scala_library( + name = "library", + srcs = ["library.scala"], + deps = [], +) + +scala_test( + name = "test", + srcs = ["test.scala"], + deps = [ + ":library", + ], +) diff --git a/test/unstable/binary.scala b/test/unstable/binary.scala new file mode 100644 index 000000000..56486bffa --- /dev/null +++ b/test/unstable/binary.scala @@ -0,0 +1,7 @@ +package test.v2 + +object Binary { + def main(args: Array[String]): Unit = { + println(s"${Library.method1} ${Library.method2}") + } +} diff --git a/test/unstable/library.scala b/test/unstable/library.scala new file mode 100644 index 000000000..8048a67ca --- /dev/null +++ b/test/unstable/library.scala @@ -0,0 +1,6 @@ +package test.v2 + +object Library { + def method1(): String = "hello" + def method2(): String = "world" +} diff --git a/test/unstable/test.scala b/test/unstable/test.scala new file mode 100644 index 000000000..88985c7fb --- /dev/null +++ b/test/unstable/test.scala @@ -0,0 +1,13 @@ +package test.v2 + +import org.scalatest.FunSuite + +class Test extends FunSuite { + test("method1") { + assert(Library.method1 == "hello") + } + + test("method2") { + assert(Library.method2 == "world") + } +} diff --git a/test_expect_failure/jmh/BUILD b/test_expect_failure/jmh/BUILD new file mode 100644 index 000000000..fbd48091d --- /dev/null +++ b/test_expect_failure/jmh/BUILD @@ -0,0 +1,11 @@ +load( + "//jmh:jmh.bzl", + "scala_benchmark_jmh", +) + +scala_benchmark_jmh( + name = "jmh_reports_failure", + srcs = [ + "InvalidBenchmark.scala", + ], +) diff --git a/test_expect_failure/jmh/InvalidBenchmark.scala b/test_expect_failure/jmh/InvalidBenchmark.scala new file mode 100644 index 000000000..5a0472942 --- /dev/null +++ b/test_expect_failure/jmh/InvalidBenchmark.scala @@ -0,0 +1,10 @@ +package foo + +import org.openjdk.jmh.annotations.Benchmark + +// Benchmark classes cannot be final. +final class InvalidBenchmark { + @Benchmark + def sumIntegersBenchmark: Int = + (1 to 100).sum +} diff --git a/test_expect_failure/missing_direct_deps/internal_deps/BUILD b/test_expect_failure/missing_direct_deps/internal_deps/BUILD index de6053f4f..ac1ade64a 100644 --- a/test_expect_failure/missing_direct_deps/internal_deps/BUILD +++ b/test_expect_failure/missing_direct_deps/internal_deps/BUILD @@ -1,3 +1,5 @@ +load("@rules_java//java:defs.bzl", "java_library") + package(default_visibility = ["//visibility:public"]) load("//scala:scala.bzl", "scala_binary", "scala_library", "scala_test") diff --git a/test_expect_failure/proto_source_root/dependency/BUILD b/test_expect_failure/proto_source_root/dependency/BUILD index 37ca7370a..a514e80b9 100644 --- a/test_expect_failure/proto_source_root/dependency/BUILD +++ b/test_expect_failure/proto_source_root/dependency/BUILD @@ -1,3 +1,5 @@ +load("@rules_proto//proto:defs.bzl", "proto_library") + proto_library( name = "dependency", srcs = glob(["*.proto"]), diff --git a/test_expect_failure/proto_source_root/user/BUILD b/test_expect_failure/proto_source_root/user/BUILD index 4431a2876..34ce09598 100644 --- a/test_expect_failure/proto_source_root/user/BUILD +++ b/test_expect_failure/proto_source_root/user/BUILD @@ -1,3 +1,4 @@ +load("@rules_proto//proto:defs.bzl", "proto_library") load( "//scala_proto:scala_proto.bzl", "scala_proto_library", diff --git a/test_expect_failure/transitive/java_to_scala/BUILD b/test_expect_failure/transitive/java_to_scala/BUILD index 73d288846..c60bd89cb 100644 --- a/test_expect_failure/transitive/java_to_scala/BUILD +++ b/test_expect_failure/transitive/java_to_scala/BUILD @@ -1,3 +1,4 @@ +load("@rules_java//java:defs.bzl", "java_library") load("//scala:scala.bzl", "scala_export_to_java", "scala_library") scala_library( diff --git a/test_expect_failure/transitive/scala_to_java/BUILD b/test_expect_failure/transitive/scala_to_java/BUILD index 9e2ab1c2b..dd06d56e0 100644 --- a/test_expect_failure/transitive/scala_to_java/BUILD +++ b/test_expect_failure/transitive/scala_to_java/BUILD @@ -1,3 +1,4 @@ +load("@rules_java//java:defs.bzl", "java_library") load("//scala:scala.bzl", "scala_library") java_library( diff --git a/test_rules_scala.sh b/test_rules_scala.sh index e8456fabf..c57f62661 100755 --- a/test_rules_scala.sh +++ b/test_rules_scala.sh @@ -30,6 +30,7 @@ $runner bazel test //test/... --extra_toolchains="//test_expect_failure/plus_one . "${test_dir}"/test_junit.sh . "${test_dir}"/test_misc.sh . "${test_dir}"/test_phase.sh +. "${test_dir}"/test_scalafmt.sh . "${test_dir}"/test_scala_binary.sh . "${test_dir}"/test_scalac_jvm_flags.sh . "${test_dir}"/test_scala_classpath.sh diff --git a/test_version/WORKSPACE.template b/test_version/WORKSPACE.template index 0dd7cd7b3..f2705e2cb 100644 --- a/test_version/WORKSPACE.template +++ b/test_version/WORKSPACE.template @@ -11,9 +11,12 @@ scala_version = "${scala_version}" load("@io_bazel_rules_scala//scala:scala.bzl", "scala_repositories") -scala_repositories((scala_version, { -${scala_version_shas} -})) +scala_repositories( + (scala_version, { + ${scala_version_shas} + }), + fetch_sources = True +) load("@io_bazel_rules_scala//twitter_scrooge:twitter_scrooge.bzl", "twitter_scrooge", "scrooge_scala_library") diff --git a/test_version/version_specific_tests_dir/BUILD b/test_version/version_specific_tests_dir/BUILD index 5c048a244..b80c9f893 100644 --- a/test_version/version_specific_tests_dir/BUILD +++ b/test_version/version_specific_tests_dir/BUILD @@ -1,3 +1,5 @@ +load("@rules_java//java:defs.bzl", "java_binary", "java_library") + package(default_testonly = 1) load( diff --git a/test_version/version_specific_tests_dir/proto/BUILD b/test_version/version_specific_tests_dir/proto/BUILD index 98f34bea6..e49bf63f7 100644 --- a/test_version/version_specific_tests_dir/proto/BUILD +++ b/test_version/version_specific_tests_dir/proto/BUILD @@ -1,7 +1,9 @@ +load("@rules_java//java:defs.bzl", "java_proto_library") load( "@io_bazel_rules_scala//scala_proto:scala_proto.bzl", "scala_proto_library", ) +load("@rules_proto//proto:defs.bzl", "proto_library") proto_library( name = "test2", diff --git a/test_version/version_specific_tests_dir/proto2/BUILD b/test_version/version_specific_tests_dir/proto2/BUILD index 96e963557..2087f7626 100644 --- a/test_version/version_specific_tests_dir/proto2/BUILD +++ b/test_version/version_specific_tests_dir/proto2/BUILD @@ -1,3 +1,5 @@ +load("@rules_proto//proto:defs.bzl", "proto_library") + proto_library( name = "test", srcs = ["test.proto"], diff --git a/test_version/version_specific_tests_dir/src/main/scala/scalarules/test/twitter_scrooge/thrift/bare_jar_thrifts/BUILD b/test_version/version_specific_tests_dir/src/main/scala/scalarules/test/twitter_scrooge/thrift/bare_jar_thrifts/BUILD index 199526a6a..cef19fe06 100644 --- a/test_version/version_specific_tests_dir/src/main/scala/scalarules/test/twitter_scrooge/thrift/bare_jar_thrifts/BUILD +++ b/test_version/version_specific_tests_dir/src/main/scala/scalarules/test/twitter_scrooge/thrift/bare_jar_thrifts/BUILD @@ -1,3 +1,4 @@ +load("@rules_java//java:defs.bzl", "java_import") load("@io_bazel_rules_scala//thrift:thrift.bzl", "thrift_library") java_import( diff --git a/third_party/bazel/src/main/protobuf/BUILD b/third_party/bazel/src/main/protobuf/BUILD new file mode 100644 index 000000000..a505b865d --- /dev/null +++ b/third_party/bazel/src/main/protobuf/BUILD @@ -0,0 +1,20 @@ +load("@rules_java//java:defs.bzl", "java_proto_library") +load("@rules_proto//proto:defs.bzl", "proto_library") + +licenses(["notice"]) + +package(default_visibility = ["//visibility:public"]) + +FILES = [ + "worker_protocol", +] + +[proto_library( + name = s + "_proto", + srcs = [s + ".proto"], +) for s in FILES] + +[java_proto_library( + name = s + "_java_proto", + deps = [":" + s + "_proto"], +) for s in FILES] diff --git a/third_party/bazel/src/main/protobuf/worker_protocol.proto b/third_party/bazel/src/main/protobuf/worker_protocol.proto new file mode 100644 index 000000000..c628b7eb7 --- /dev/null +++ b/third_party/bazel/src/main/protobuf/worker_protocol.proto @@ -0,0 +1,62 @@ +// Copyright 2015 The Bazel Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package blaze.worker; + +option java_package = "com.google.devtools.build.lib.worker"; + +// An input file. +message Input { + // The path in the file system where to read this input artifact from. This is + // either a path relative to the execution root (the worker process is + // launched with the working directory set to the execution root), or an + // absolute path. + string path = 1; + + // A hash-value of the contents. The format of the contents is unspecified and + // the digest should be treated as an opaque token. + bytes digest = 2; +} + +// This represents a single work unit that Blaze sends to the worker. +message WorkRequest { + repeated string arguments = 1; + + // The inputs that the worker is allowed to read during execution of this + // request. + repeated Input inputs = 2; + + // To support multiplex worker, each WorkRequest must have an unique ID. This + // ID should be attached unchanged to the WorkResponse. + int32 request_id = 3; +} + +// The worker sends this message to Blaze when it finished its work on the +// WorkRequest message. +message WorkResponse { + int32 exit_code = 1; + + // This is printed to the user after the WorkResponse has been received and is + // supposed to contain compiler warnings / errors etc. - thus we'll use a + // string type here, which gives us UTF-8 encoding. + string output = 2; + + // To support multiplex worker, each WorkResponse must have an unique ID. + // Since worker processes which support multiplex worker will handle multiple + // WorkRequests in parallel, this ID will be used to determined which + // WorkerProxy does this WorkResponse belong to. + int32 request_id = 3; +} diff --git a/third_party/dependency_analyzer/src/main/BUILD b/third_party/dependency_analyzer/src/main/BUILD index 40fb822a6..e9e00a221 100644 --- a/third_party/dependency_analyzer/src/main/BUILD +++ b/third_party/dependency_analyzer/src/main/BUILD @@ -2,14 +2,34 @@ licenses(["notice"]) # 3-clause BSD load("//scala:scala.bzl", "scala_library_for_plugin_bootstrapping") +scala_library_for_plugin_bootstrapping( + name = "scala_version", + srcs = [ + "io/bazel/rulesscala/dependencyanalyzer/ScalaVersion.scala", + ], + # As this contains macros we shouldn't make an ijar + build_ijar = False, + resources = ["resources/scalac-plugin.xml"], + visibility = ["//visibility:public"], + deps = [ + "//external:io_bazel_rules_scala/dependency/scala/scala_compiler", + "//external:io_bazel_rules_scala/dependency/scala/scala_reflect", + ], +) + scala_library_for_plugin_bootstrapping( name = "dependency_analyzer", srcs = [ + "io/bazel/rulesscala/dependencyanalyzer/AstUsedJarFinder.scala", "io/bazel/rulesscala/dependencyanalyzer/DependencyAnalyzer.scala", + "io/bazel/rulesscala/dependencyanalyzer/DependencyAnalyzerSettings.scala", + "io/bazel/rulesscala/dependencyanalyzer/HighLevelCrawlUsedJarFinder.scala", + "io/bazel/rulesscala/dependencyanalyzer/OptionsParser.scala", ], resources = ["resources/scalac-plugin.xml"], visibility = ["//visibility:public"], deps = [ + ":scala_version", "//external:io_bazel_rules_scala/dependency/scala/scala_compiler", "//external:io_bazel_rules_scala/dependency/scala/scala_reflect", ], diff --git a/third_party/dependency_analyzer/src/main/io/bazel/rulesscala/dependencyanalyzer/AstUsedJarFinder.scala b/third_party/dependency_analyzer/src/main/io/bazel/rulesscala/dependencyanalyzer/AstUsedJarFinder.scala new file mode 100644 index 000000000..9f21f3fd2 --- /dev/null +++ b/third_party/dependency_analyzer/src/main/io/bazel/rulesscala/dependencyanalyzer/AstUsedJarFinder.scala @@ -0,0 +1,144 @@ +package third_party.dependency_analyzer.src.main.io.bazel.rulesscala.dependencyanalyzer + +import scala.reflect.io.AbstractFile +import scala.tools.nsc.Global + +class AstUsedJarFinder( + global: Global +) { + import global._ + + def findUsedJars: Map[AbstractFile, Global#Position] = { + val jars = collection.mutable.Map[AbstractFile, global.Position]() + + def recordUse(source: AbstractFile, pos: Position): Unit = { + // We prefer to report locations which have information (e.g. + // we don't want NoPosition). + if (!jars.contains(source) || !jars(source).isDefined) { + jars.put(source, pos) + } + } + + def handleType(tpe: Type, pos: Position): Unit = { + val sym = tpe.typeSymbol + val assocFile = sym.associatedFile + if (assocFile.path.endsWith(".class")) + assocFile.underlyingSource.foreach { source => + recordUse(source, pos) + } + } + + def exploreType(tpe: Type, pos: Position): Unit = { + handleType(tpe, pos) + tpe.typeArgs.foreach(exploreType(_, pos)) + } + + def exploreClassfileAnnotArg(arg: ClassfileAnnotArg, pos: Position): Unit = { + arg match { + case LiteralAnnotArg(value) => + exploreConstant(value, pos) + case ArrayAnnotArg(args) => + args.foreach(exploreClassfileAnnotArg(_, pos)) + case NestedAnnotArg(info) => + exploreAnnotationInfo(info) + case _ => + } + } + def exploreAnnotationInfo(annot: AnnotationInfo): Unit = { + // It would be nice if we could just do + // fullyExploreTree(annot.tree) + // Unfortunately that tree is synthetic and hence doesn't have + // positions attached. Hence we examine the components that + // go into that tree separately, as those do have positions. + exploreType(annot.tpe, annot.pos) + annot.scalaArgs.foreach(fullyExploreTree) + annot.javaArgs.values.foreach(exploreClassfileAnnotArg(_, annot.pos)) + } + + def exploreConstant(value: Constant, pos: Position): Unit = { + value.value match { + case tpe: Type => + exploreType(tpe, pos) + case _ => + } + } + + def fullyExploreTree(tree: Tree): Unit = { + def visitNode(tree: Tree): Unit = { + tree match { + case node: TypeTree => + if (node.original != null) { + fullyExploreTree(node.original) + } + case node: Literal => + // We should examine OriginalTreeAttachment but that was only + // added in 2.12.4, so include a version check + ScalaVersion.conditional( + Some("2.12.4"), + None, + """ + node.attachments + .get[global.treeChecker.OriginalTreeAttachment] + .foreach { attach => + fullyExploreTree(attach.original) + } + """ + ) + + exploreConstant(node.value, tree.pos) + case _ => + } + + // If this expression is the result of a macro, then we + // should also examine the original macro expression + tree.attachments + .get[global.treeChecker.MacroExpansionAttachment] + .foreach { attach => + // When we explore the original, the original also has + // this attachment. So we should not examine the original + // again if so. + if (attach.expandee != tree) { + fullyExploreTree(attach.expandee) + } + } + + val shouldExamine = + tree match { + case select: Select if select.symbol.isDefaultGetter => + false + case _ => + true + } + + if (shouldExamine) { + if (tree.hasSymbolField) { + tree.symbol.annotations + // We skip annotations without positions. The reason for + // this is the case of + // @SomeAnnotation class A + // class B extends A + // Now assuming A and B are in separate packages, while + // examining B we will examine A as well, and hence + // examine A's annotations. However we don't wish to examine + // A's annotations as we don't care about those details of A. + // Hence we only examine annotations with positions (hence, + // they were defined in the same compilation unit and thus + // matter). + .filter(_.pos.isDefined) + .foreach(exploreAnnotationInfo) + } + if (tree.tpe != null) { + exploreType(tree.tpe, tree.pos) + } + } + } + + tree.foreach(visitNode) + } + + currentRun.units.foreach { unit => + fullyExploreTree(unit.body) + } + jars.toMap + } +} diff --git a/third_party/dependency_analyzer/src/main/io/bazel/rulesscala/dependencyanalyzer/DependencyAnalyzer.scala b/third_party/dependency_analyzer/src/main/io/bazel/rulesscala/dependencyanalyzer/DependencyAnalyzer.scala index b04724566..847188f20 100644 --- a/third_party/dependency_analyzer/src/main/io/bazel/rulesscala/dependencyanalyzer/DependencyAnalyzer.scala +++ b/third_party/dependency_analyzer/src/main/io/bazel/rulesscala/dependencyanalyzer/DependencyAnalyzer.scala @@ -1,110 +1,180 @@ package third_party.dependency_analyzer.src.main.io.bazel.rulesscala.dependencyanalyzer import scala.reflect.io.AbstractFile -import scala.tools.nsc.plugins.{Plugin, PluginComponent} -import scala.tools.nsc.{Global, Phase} +import scala.tools.nsc.plugins.Plugin +import scala.tools.nsc.plugins.PluginComponent +import scala.tools.nsc.Global +import scala.tools.nsc.Phase class DependencyAnalyzer(val global: Global) extends Plugin { - val name = "dependency-analyzer" - val description = - "Analyzes the used dependencies and fails the compilation " + - "if they are not explicitly used as direct dependencies (only declared transitively)" - val components = List[PluginComponent](Component) - - var indirect: Map[String, String] = Map.empty - var direct: Set[String] = Set.empty - var analyzerMode: String = "error" - var currentTarget: String = "NA" - - override def processOptions(options: List[String], error: (String) => Unit): Unit = { - var indirectJars: Seq[String] = Seq.empty - var indirectTargets: Seq[String] = Seq.empty - - for (option <- options) { - option.split(":").toList match { - case "direct-jars" :: data => direct = data.toSet - case "indirect-jars" :: data => indirectJars = data; - case "indirect-targets" :: data => indirectTargets = data.map(_.replace(";", ":")) - case "current-target" :: target => currentTarget = target.map(_.replace(";", ":")).head - case "mode" :: mode => analyzerMode = mode.head - case unknown :: _ => error(s"unknown param $unknown") - case Nil => - } - } - indirect = indirectJars.zip(indirectTargets).toMap + override val name = "dependency-analyzer" + override val description = + "Analyzes the used dependencies. Can check and warn or fail the " + + "compilation for issues including not directly including " + + "dependencies which are directly included in the code, or " + + "including unused dependencies." + override val components = + List[PluginComponent]( + new AnalyzerComponent( + runsAfterPhase = "typer", + handles = DependencyTrackingMethod.Ast + ), + new AnalyzerComponent( + runsAfterPhase = "jvm", + handles = DependencyTrackingMethod.HighLevel + ) + ) + + private val isWindows: Boolean = System.getProperty("os.name").toLowerCase.contains("windows") + private var settings: DependencyAnalyzerSettings = null + + override def init( + options: List[String], + error: String => Unit + ): Boolean = { + settings = DependencyAnalyzerSettings.parseSettings(options = options, error = error) + true } - - private object Component extends PluginComponent { - val global: DependencyAnalyzer.this.global.type = + private class AnalyzerComponent( + // Typer seems to be the better method at least for AST - it seems like + // some things get eliminated in later phases. However, due to backwards + // compatibility we have to preserve using jvm for the high-level-crawl + // dependency tracking method + runsAfterPhase: String, + handles: DependencyTrackingMethod + ) extends PluginComponent { + override val global: DependencyAnalyzer.this.global.type = DependencyAnalyzer.this.global - import global._ - - override val runsAfter = List("jvm") + override val runsAfter = List(runsAfterPhase) - val phaseName = DependencyAnalyzer.this.name + val phaseName = s"${DependencyAnalyzer.this.name}-post-$runsAfterPhase" override def newPhase(prev: Phase): StdPhase = new StdPhase(prev) { override def run(): Unit = { - super.run() - - val usedJars = findUsedJars - - warnOnIndirectTargetsFoundIn(usedJars) + if (settings.dependencyTrackingMethod == handles) { + runAnalysis() + } } - private def warnOnIndirectTargetsFoundIn(usedJars: Set[AbstractFile]) = { - for (usedJar <- usedJars; - usedJarPath = usedJar.path; - target <- indirect.get(usedJarPath) if !direct.contains(usedJarPath)) { - val errorMessage = - s"""Target '$target' is used but isn't explicitly declared, please add it to the deps. - |You can use the following buildozer command: - |buildozer 'add deps $target' $currentTarget""".stripMargin + override def apply(unit: global.CompilationUnit): Unit = () + } + } - analyzerMode match { - case "error" => reporter.error(NoPosition, errorMessage) - case "warn" => reporter.warning(NoPosition, errorMessage) - } + private def runAnalysis(): Unit = { + val usedJarsToPositions = findUsedJarsAndPositions + val usedJarPathToPositions = + if (!isWindows) { + usedJarsToPositions.map { case (jar, pos) => + jar.path -> pos + } + } else { + usedJarsToPositions.map { case (jar, pos) => + jar.path.replaceAll("\\\\", "/") -> pos } } - override def apply(unit: CompilationUnit): Unit = () + if (settings.unusedDepsMode != AnalyzerMode.Off) { + reportUnusedDepsFoundIn(usedJarPathToPositions) } + if (settings.strictDepsMode != AnalyzerMode.Off) { + reportIndirectTargetsFoundIn(usedJarPathToPositions) + } } - import global._ + private def reportIndirectTargetsFoundIn( + usedJarPathAndPositions: Map[String, global.Position] + ): Unit = { + val errors = + usedJarPathAndPositions + .filterNot { case (jarPath, _) => + settings.directTargetSet.jarSet.contains(jarPath) + } + .flatMap { case (jarPath, pos) => + settings.indirectTargetSet.targetFromJarOpt(jarPath).map { target => + target -> pos + } + } + .map { case (target, pos) => + val message = + s"""Target '$target' is used but isn't explicitly declared, please add it to the deps. + |You can use the following buildozer command: + |buildozer 'add deps $target' ${settings.currentTarget}""".stripMargin + message -> pos + } - private def findUsedJars: Set[AbstractFile] = { - val jars = collection.mutable.Set[AbstractFile]() + warnOrError(settings.strictDepsMode, errors) + } - def walkTopLevels(root: Symbol): Unit = { - def safeInfo(sym: Symbol): Type = - if (sym.hasRawInfo && sym.rawInfo.isComplete) sym.info else NoType + private def reportUnusedDepsFoundIn( + usedJarPathAndPositions: Map[String, global.Position] + ): Unit = { + val directJarPaths = settings.directTargetSet.jarSet - def packageClassOrSelf(sym: Symbol): Symbol = - if (sym.hasPackageFlag && !sym.isModuleClass) sym.moduleClass else sym - for (x <- safeInfo(packageClassOrSelf(root)).decls) { - if (x == root) () - else if (x.hasPackageFlag) walkTopLevels(x) - else if (x.owner != root) { // exclude package class members - if (x.hasRawInfo && x.rawInfo.isComplete) { - val assocFile = x.associatedFile - if (assocFile.path.endsWith(".class") && assocFile.underlyingSource.isDefined) - assocFile.underlyingSource.foreach(jars += _) - } + val usedTargets = + usedJarPathAndPositions + .flatMap { case (jar, _) => + settings.directTargetSet.targetFromJarOpt(jar) } + .toSet + + val unusedTargets = directJarPaths + // This .get is safe because [jar] was gotten from [directJarPaths] + // which is the set of keys of the direct targets. + .filter(jar => !usedTargets.contains(settings.directTargetSet.targetFromJarOpt(jar).get)) + .flatMap(settings.directTargetSet.targetFromJarOpt) + .diff(settings.ignoredUnusedDependencyTargets) + + val toWarnOrError = + unusedTargets.map { target => + val message = + s"""Target '$target' is specified as a dependency to ${settings.currentTarget} but isn't used, please remove it from the deps. + |You can use the following buildozer command: + |buildozer 'remove deps $target' ${settings.currentTarget} + |""".stripMargin + (message, global.NoPosition: global.Position) } + + warnOrError(settings.unusedDepsMode, toWarnOrError.toMap) + } + + private def warnOrError( + analyzerMode: AnalyzerMode, + errors: Map[String, global.Position] + ): Unit = { + val reportFunction: (String, global.Position) => Unit = analyzerMode match { + case AnalyzerMode.Error => + { case (message, pos) => + global.reporter.error(pos, message) + } + case AnalyzerMode.Warn => + { case (message, pos) => + global.reporter.warning(pos, message) + } + case AnalyzerMode.Off => (_, _) => () } - exitingTyper { - walkTopLevels(RootClass) + errors.foreach { case (message, pos) => + reportFunction(message, pos) + } + } + + /** + * + * @return map of used jar file -> representative position in file where + * it was used + */ + private def findUsedJarsAndPositions: Map[AbstractFile, global.Position] = { + settings.dependencyTrackingMethod match { + case DependencyTrackingMethod.HighLevel => + new HighLevelCrawlUsedJarFinder(global).findUsedJars + case DependencyTrackingMethod.Ast => + new AstUsedJarFinder(global).findUsedJars } - jars.toSet } } diff --git a/third_party/dependency_analyzer/src/main/io/bazel/rulesscala/dependencyanalyzer/DependencyAnalyzerSettings.scala b/third_party/dependency_analyzer/src/main/io/bazel/rulesscala/dependencyanalyzer/DependencyAnalyzerSettings.scala new file mode 100644 index 000000000..af92abd5b --- /dev/null +++ b/third_party/dependency_analyzer/src/main/io/bazel/rulesscala/dependencyanalyzer/DependencyAnalyzerSettings.scala @@ -0,0 +1,121 @@ +package third_party.dependency_analyzer.src.main.io.bazel.rulesscala.dependencyanalyzer + +object AnalyzerMode { + case object Error extends AnalyzerMode + case object Warn extends AnalyzerMode + case object Off extends AnalyzerMode + + def parse(mode: String): Option[AnalyzerMode] = { + mode match { + case "error" => Some(Error) + case "warn" => Some(Warn) + case "off" => Some(Off) + case _ => None + } + } +} + +sealed trait AnalyzerMode + +object DependencyTrackingMethod { + case object HighLevel extends DependencyTrackingMethod("high-level") + + /** + * Discovers dependencies by crawling the AST. + */ + case object Ast extends DependencyTrackingMethod("ast") + + def parse(mode: String): Option[DependencyTrackingMethod] = { + Seq(HighLevel, Ast).find(_.name == mode) + } +} + +sealed abstract class DependencyTrackingMethod(val name: String) + +class TargetSet( + prefix: String, + jarsSeq: Seq[String], + targetsSeq: Seq[String] +) { + private lazy val jarToTargetMap: Map[String, String] = { + require(targetsSeq.size == jarsSeq.size, s"Arguments $prefix-jars and $prefix-targets had mismatched size") + + jarsSeq.zip(targetsSeq).toMap + } + + def targetFromJarOpt(jar: String): Option[String] = { + jarToTargetMap.get(jar) + } + + lazy val jarSet: Set[String] = { + jarsSeq.toSet + } +} + +object DependencyAnalyzerSettings { + def parseSettings( + options: List[String], + error: String => Unit + ): DependencyAnalyzerSettings = { + + val optionsParser = OptionsParser.create(options, error) + + def decodeTarget(target: String): String = { + target.replace(";", ":") + } + + def parseTargetSet(prefix: String): TargetSet = { + new TargetSet( + prefix = prefix, + jarsSeq = optionsParser.takeStringSeqOpt(s"$prefix-jars").getOrElse(Seq.empty), + targetsSeq = optionsParser.takeStringSeqOpt(s"$prefix-targets").map(_.map(decodeTarget)).getOrElse(Seq.empty) + ) + } + + def extractAnalyzerMode(key: String): AnalyzerMode = { + optionsParser + .takeStringOpt(key) + .map { str => + AnalyzerMode.parse(str).getOrElse { + error(s"Failed to parse option $key with value $str") + AnalyzerMode.Error + } + } + .getOrElse(AnalyzerMode.Off) + } + + val settings = + DependencyAnalyzerSettings( + currentTarget = decodeTarget(optionsParser.takeString("current-target")), + dependencyTrackingMethod = + DependencyTrackingMethod + .parse(optionsParser.takeString("dependency-tracking-method")) + .getOrElse { + error("Failed to parse option dependency-tracking-method") + DependencyTrackingMethod.HighLevel + }, + indirectTargetSet = parseTargetSet("indirect"), + directTargetSet = parseTargetSet("direct"), + unusedDepsMode = extractAnalyzerMode("unused-deps-mode"), + strictDepsMode = extractAnalyzerMode("strict-deps-mode"), + ignoredUnusedDependencyTargets = + optionsParser + .takeStringSeqOpt(s"unused-deps-ignored-targets") + .getOrElse(Seq.empty) + .map(decodeTarget) + .toSet + ) + optionsParser.failOnUnparsedOptions() + settings + } +} + +case class DependencyAnalyzerSettings( + indirectTargetSet: TargetSet, + directTargetSet: TargetSet, + currentTarget: String, + dependencyTrackingMethod: DependencyTrackingMethod, + unusedDepsMode: AnalyzerMode, + strictDepsMode: AnalyzerMode, + ignoredUnusedDependencyTargets: Set[String] +) diff --git a/third_party/dependency_analyzer/src/main/io/bazel/rulesscala/dependencyanalyzer/HighLevelCrawlUsedJarFinder.scala b/third_party/dependency_analyzer/src/main/io/bazel/rulesscala/dependencyanalyzer/HighLevelCrawlUsedJarFinder.scala new file mode 100644 index 000000000..18e8ae2cd --- /dev/null +++ b/third_party/dependency_analyzer/src/main/io/bazel/rulesscala/dependencyanalyzer/HighLevelCrawlUsedJarFinder.scala @@ -0,0 +1,39 @@ +package third_party.dependency_analyzer.src.main.io.bazel.rulesscala.dependencyanalyzer + +import scala.reflect.io.AbstractFile +import scala.tools.nsc.Global + +class HighLevelCrawlUsedJarFinder( + global: Global +) { + import global.Symbol + + def findUsedJars: Map[AbstractFile, Global#Position] = { + val jars = collection.mutable.Set[AbstractFile]() + + global.exitingTyper { + walkTopLevels(global.RootClass, jars) + } + jars.map(jar => jar -> global.NoPosition).toMap + } + + private def walkTopLevels(root: Symbol, jars: collection.mutable.Set[AbstractFile]): Unit = { + def safeInfo(sym: Symbol): global.Type = + if (sym.hasRawInfo && sym.rawInfo.isComplete) sym.info else global.NoType + + def packageClassOrSelf(sym: Symbol): Symbol = + if (sym.hasPackageFlag && !sym.isModuleClass) sym.moduleClass else sym + + for (x <- safeInfo(packageClassOrSelf(root)).decls) { + if (x == root) () + else if (x.hasPackageFlag) walkTopLevels(x, jars) + else if (x.owner != root) { // exclude package class members + if (x.hasRawInfo && x.rawInfo.isComplete) { + val assocFile = x.associatedFile + if (assocFile.path.endsWith(".class") && assocFile.underlyingSource.isDefined) + assocFile.underlyingSource.foreach(jars += _) + } + } + } + } +} diff --git a/third_party/dependency_analyzer/src/main/io/bazel/rulesscala/dependencyanalyzer/OptionsParser.scala b/third_party/dependency_analyzer/src/main/io/bazel/rulesscala/dependencyanalyzer/OptionsParser.scala new file mode 100644 index 000000000..f36c93ca1 --- /dev/null +++ b/third_party/dependency_analyzer/src/main/io/bazel/rulesscala/dependencyanalyzer/OptionsParser.scala @@ -0,0 +1,51 @@ +package third_party.dependency_analyzer.src.main.io.bazel.rulesscala.dependencyanalyzer + +import scala.collection.mutable + +object OptionsParser { + def create( + options: List[String], + error: String => Unit + ): OptionsParser = { + val optionsMap = mutable.Map[String, String]() + options.foreach { option => + option.split(":", 2) match { + case Array(key) => + error(s"Argument $key missing value") + case Array(key, value) => + if (optionsMap.contains(key)) { + error(s"Argument $key found multiple times") + } + optionsMap.put(key, value) + } + } + + new OptionsParser(error = error, options = optionsMap) + } +} + +class OptionsParser private( + error: String => Unit, + options: mutable.Map[String, String] +) { + def failOnUnparsedOptions(): Unit = { + options.keys.foreach { key => + error(s"Unrecognized option $key") + } + } + + def takeStringOpt(key: String): Option[String] = { + options.remove(key) + } + + def takeString(key: String): String = { + takeStringOpt(key).getOrElse { + error(s"Missing required option $key") + "NA" + } + } + + def takeStringSeqOpt(key: String): Option[Seq[String]] = { + takeStringOpt(key).map(_.split(":")) + } +} diff --git a/third_party/dependency_analyzer/src/main/io/bazel/rulesscala/dependencyanalyzer/ScalaVersion.scala b/third_party/dependency_analyzer/src/main/io/bazel/rulesscala/dependencyanalyzer/ScalaVersion.scala new file mode 100644 index 000000000..e7be56c5a --- /dev/null +++ b/third_party/dependency_analyzer/src/main/io/bazel/rulesscala/dependencyanalyzer/ScalaVersion.scala @@ -0,0 +1,158 @@ +package third_party.dependency_analyzer.src.main.io.bazel.rulesscala.dependencyanalyzer + +import scala.language.experimental.macros +import scala.reflect.macros.blackbox + +object ScalaVersion { + val Current: ScalaVersion = ScalaVersion(util.Properties.versionNumberString) + + def apply(versionString: String): ScalaVersion = { + versionString.split("\\.") match { + case Array(superMajor, major, minor) => + new ScalaVersion(superMajor.toInt, major.toInt, minor.toInt) + case _ => + throw new Exception(s"Failed to parse version $versionString") + } + } + + /** + * Runs [code] only if minVersion and maxVersion constraints are met. + * + * NOTE: This method should be used only rarely. Most of the time + * just comparing versions in code should be enough. This is needed + * only when the code we want to run can't compile under certain + * versions. The reason to use this rarely is the API's inflexibility + * and the difficulty in debugging this code. + * + * Each of minVersionOpt and maxVersionOpt can either be None + * to signify that there is no restriction on this bound, + * or it can be a string of a full version number such as "2.12.10". + * + * When set to a version number, the bounds are inclusive. + * For example, a maxVersion of "2.12.10" will accept version "2.12.10". + * + * Note only literal strings are accepted and inlined variables are accepted. + * If any non-inlined variables are passed the code will fail to compile. + * Inlined variables are generally those declared final on an object which + * do not have a type attached. + * + * valid: + * conditional(Some("2.12.4"), None, "foo()") + * invalid: + * conditional(MinVersionForFoo, None, "foo()") + */ + def conditional( + minVersionOpt: Option[String], + maxVersionOpt: Option[String], + code: String + ): Unit = + macro conditionalImpl + + def conditionalImpl( + c: blackbox.Context + )( + minVersionOpt: c.Expr[Option[String]], + maxVersionOpt: c.Expr[Option[String]], + code: c.Expr[String] + ): c.Tree = { + import c.{universe => u} + + // Due to non-deterministic code generation of quasiquotes, we do + // not use them + // See https://github.com/scala/bug/issues/11008 + // Eventually once we stop supporting all versions which don't have + // the bugfix, we can use quasiquotes as desired + + def extractStringFromTree(tree: c.Tree): Option[String] = { + tree match { + case u.Literal(u.Constant(s: String)) => + Some(s) + case _ => + None + } + } + + def extractStringOption(expr: c.Expr[Option[String]]): Option[String] = { + expr.tree match { + case u.Apply( + u.TypeApply( + u.Select(u.Select(u.Ident(u.TermName("scala")), u.TermName("Some")), u.TermName("apply")), + List(u.TypeTree())), + str :: Nil + ) if extractStringFromTree(str).nonEmpty => + extractStringFromTree(str) + case u.Select(u.Ident(u.TermName("scala")), u.TermName("None")) => + None + case _ => + c.error( + expr.tree.pos, + "Parameter must be passed as an Option[String] literal such as " + + "Some(\"2.12.10\") or None") + None + } + } + + def extractString(expr: c.Expr[String]): String = { + extractStringFromTree(expr.tree).getOrElse { + c.error( + expr.tree.pos, + "Parameter must be passed as a string literal such as \"2.12.10\"") + "" + } + } + + val meetsMinVersionRequirement = { + val minVersionOptValue = extractStringOption(minVersionOpt) + + // Note: Unit tests do not test that this bound is inclusive rather + // than exclusive so be careful when changing this code not to + // accidentally make this an exclusive bound (see ScalaVersionTest for + // details) + minVersionOptValue.forall(version => Current >= ScalaVersion(version)) + } + + val meetsMaxVersionRequirement = { + val maxVersionOptValue = extractStringOption(maxVersionOpt) + // Note: Unit tests do not test that this bound is inclusive rather + // than exclusive so be careful when changing this code not to + // accidentally make this an exclusive bound (see ScalaVersionTest for + // details) + maxVersionOptValue.forall(version => Current <= ScalaVersion(version)) + } + + if (meetsMinVersionRequirement && meetsMaxVersionRequirement) { + c.parse(extractString(code)) + } else { + u.EmptyTree + } + } +} + +class ScalaVersion private( + private val superMajor: Int, + private val major: Int, + private val minor: Int +) extends Ordered[ScalaVersion] { + override def compare(that: ScalaVersion): Int = { + if (this.superMajor != that.superMajor) { + this.superMajor.compareTo(that.superMajor) + } else if (this.major != that.major) { + this.major.compareTo(that.major) + } else { + this.minor.compareTo(that.minor) + } + } + + override def equals(obj: Any): Boolean = { + obj match { + case that: ScalaVersion => + compare(that) == 0 + case _ => + false + } + } + + override def toString: String = { + s"$superMajor.$major.$minor" + } +} diff --git a/third_party/dependency_analyzer/src/test/BUILD b/third_party/dependency_analyzer/src/test/BUILD index 0b279289b..51aa24242 100644 --- a/third_party/dependency_analyzer/src/test/BUILD +++ b/third_party/dependency_analyzer/src/test/BUILD @@ -2,19 +2,71 @@ licenses(["notice"]) # 3-clause BSD load("//scala:scala.bzl", "scala_junit_test", "scala_test") -scala_junit_test( - name = "dependency_analyzer_test", +common_jvm_flags = [ + "-Dplugin.jar.location=$(location //third_party/dependency_analyzer/src/main:dependency_analyzer)", + "-Dscala.library.location=$(location //external:io_bazel_rules_scala/dependency/scala/scala_library)", + "-Dscala.reflect.location=$(location //external:io_bazel_rules_scala/dependency/scala/scala_reflect)", +] + +scala_test( + name = "ast_used_jar_finder_test", + size = "small", + srcs = [ + "io/bazel/rulesscala/dependencyanalyzer/AstUsedJarFinderTest.scala", + ], + jvm_flags = common_jvm_flags, + deps = [ + "//external:io_bazel_rules_scala/dependency/scala/scala_compiler", + "//external:io_bazel_rules_scala/dependency/scala/scala_library", + "//external:io_bazel_rules_scala/dependency/scala/scala_reflect", + "//third_party/dependency_analyzer/src/main:dependency_analyzer", + "//third_party/dependency_analyzer/src/main:scala_version", + "//third_party/utils/src/test:test_util", + "@scalac_rules_commons_io//jar", + ], +) + +scala_test( + name = "scala_version_test", size = "small", srcs = [ - "io/bazel/rulesscala/dependencyanalyzer/DependencyAnalyzerTest.scala", + "io/bazel/rulesscala/dependencyanalyzer/ScalaVersionTest.scala", ], - jvm_flags = [ - "-Dplugin.jar.location=$(location //third_party/dependency_analyzer/src/main:dependency_analyzer)", - "-Dscala.library.location=$(location //external:io_bazel_rules_scala/dependency/scala/scala_library)", + deps = [ + "//external:io_bazel_rules_scala/dependency/scala/scala_library", + "//external:io_bazel_rules_scala/dependency/scala/scala_reflect", + "//third_party/dependency_analyzer/src/main:scala_version", + ], +) + +scala_test( + name = "scalac_dependency_test", + size = "small", + srcs = [ + "io/bazel/rulesscala/dependencyanalyzer/ScalacDependencyTest.scala", + ], + jvm_flags = common_jvm_flags, + unused_dependency_checker_mode = "off", + deps = [ + "//external:io_bazel_rules_scala/dependency/scala/scala_compiler", + "//external:io_bazel_rules_scala/dependency/scala/scala_library", + "//external:io_bazel_rules_scala/dependency/scala/scala_reflect", + "//third_party/dependency_analyzer/src/main:dependency_analyzer", + "//third_party/utils/src/test:test_util", + "@scalac_rules_commons_io//jar", + ], +) + +scala_test( + name = "strict_deps_test", + size = "small", + srcs = [ + "io/bazel/rulesscala/dependencyanalyzer/StrictDepsTest.scala", + ], + jvm_flags = common_jvm_flags + [ "-Dguava.jar.location=$(location @com_google_guava_guava_21_0_with_file//jar)", "-Dapache.commons.jar.location=$(location @org_apache_commons_commons_lang_3_5_without_file//:linkable_org_apache_commons_commons_lang_3_5_without_file)", ], - suffixes = ["Test"], unused_dependency_checker_mode = "off", deps = [ "//external:io_bazel_rules_scala/dependency/scala/scala_compiler", @@ -26,3 +78,23 @@ scala_junit_test( "@org_apache_commons_commons_lang_3_5_without_file//:linkable_org_apache_commons_commons_lang_3_5_without_file", ], ) + +scala_test( + name = "unused_dependency_checker_test", + size = "small", + srcs = [ + "io/bazel/rulesscala/dependencyanalyzer/UnusedDependencyCheckerTest.scala", + ], + jvm_flags = common_jvm_flags + [ + "-Dapache.commons.jar.location=$(location @org_apache_commons_commons_lang_3_5_without_file//:linkable_org_apache_commons_commons_lang_3_5_without_file)", + ], + unused_dependency_checker_mode = "off", + deps = [ + "//external:io_bazel_rules_scala/dependency/scala/scala_compiler", + "//external:io_bazel_rules_scala/dependency/scala/scala_library", + "//external:io_bazel_rules_scala/dependency/scala/scala_reflect", + "//third_party/dependency_analyzer/src/main:dependency_analyzer", + "//third_party/utils/src/test:test_util", + "@org_apache_commons_commons_lang_3_5_without_file//:linkable_org_apache_commons_commons_lang_3_5_without_file", + ], +) diff --git a/third_party/dependency_analyzer/src/test/io/bazel/rulesscala/dependencyanalyzer/AstUsedJarFinderTest.scala b/third_party/dependency_analyzer/src/test/io/bazel/rulesscala/dependencyanalyzer/AstUsedJarFinderTest.scala new file mode 100644 index 000000000..e63534faa --- /dev/null +++ b/third_party/dependency_analyzer/src/test/io/bazel/rulesscala/dependencyanalyzer/AstUsedJarFinderTest.scala @@ -0,0 +1,613 @@ +package third_party.dependency_analyzer.src.test.io.bazel.rulesscala.dependencyanalyzer + +import java.nio.file.Files +import java.nio.file.Path +import org.apache.commons.io.FileUtils +import org.scalatest._ +import scala.tools.nsc.reporters.StoreReporter +import third_party.dependency_analyzer.src.main.io.bazel.rulesscala.dependencyanalyzer.DependencyTrackingMethod +import third_party.dependency_analyzer.src.main.io.bazel.rulesscala.dependencyanalyzer.ScalaVersion +import third_party.utils.src.test.io.bazel.rulesscala.utils.JavaCompileUtil +import third_party.utils.src.test.io.bazel.rulesscala.utils.TestUtil +import third_party.utils.src.test.io.bazel.rulesscala.utils.TestUtil.DependencyAnalyzerTestParams + +// NOTE: Some tests are version-dependent as some false positives +// cannot be fixed in older versions of Scala for various reasons. +// Hence make sure to look at any version checks to understand +// which versions do and don't support which cases. +class AstUsedJarFinderTest extends FunSuite { + private def withSandbox(action: Sandbox => Unit): Unit = { + val tmpDir = Files.createTempDirectory("dependency_analyzer_test_temp") + val file = tmpDir.toFile + try { + action(new Sandbox(tmpDir)) + } finally { + FileUtils.deleteDirectory(file) + } + } + + private class Sandbox(tmpDir: Path) { + def compileWithoutAnalyzer( + code: String + ): Unit = { + val errors = + TestUtil.runCompiler( + code = code, + extraClasspath = List(tmpDir.toString), + outputPathOpt = Some(tmpDir) + ) + assert(errors.isEmpty) + } + + def compileJava( + className: String, + code: String + ): Unit = { + JavaCompileUtil.compile( + tmpDir = tmpDir.toString, + className = className, + code = code + ) + } + + private def verifyAndConvertDepToClass(dep: String): String = { + val classPath = tmpDir.resolve(s"$dep.class") + // Make sure the dep refers to a real file + assert(classPath.toFile.isFile) + classPath.toString + } + + def checkStrictDepsErrorsReported( + code: String, + expectedStrictDeps: List[String] + ): List[StoreReporter#Info] = { + val errors = + TestUtil.runCompiler( + code = code, + extraClasspath = List(tmpDir.toString), + dependencyAnalyzerParamsOpt = + Some( + DependencyAnalyzerTestParams( + indirectJars = expectedStrictDeps.map(verifyAndConvertDepToClass), + indirectTargets = expectedStrictDeps, + strictDeps = true, + dependencyTrackingMethod = DependencyTrackingMethod.Ast + ) + ) + ) + + assert(errors.size == expectedStrictDeps.size) + errors.foreach { err => + // We should be emitting errors with positions + assert(err.pos.isDefined) + } + + expectedStrictDeps.foreach { dep => + val expectedError = s"Target '$dep' is used but isn't explicitly declared, please add it to the deps" + assert(errors.exists(_.msg.contains(expectedError))) + } + + errors + } + + def checkUnusedDepsErrorReported( + code: String, + expectedUnusedDeps: List[String] + ): Unit = { + val errors = + TestUtil.runCompiler( + code = code, + extraClasspath = List(tmpDir.toString), + dependencyAnalyzerParamsOpt = + Some( + DependencyAnalyzerTestParams( + directJars = expectedUnusedDeps.map(verifyAndConvertDepToClass), + directTargets = expectedUnusedDeps, + unusedDeps = true, + dependencyTrackingMethod = DependencyTrackingMethod.Ast + ) + ) + ) + + assert(errors.size == expectedUnusedDeps.size) + errors.foreach { err => + // As an unused dep we shouldn't include a position or anything like that + assert(!err.pos.isDefined) + } + + expectedUnusedDeps.foreach { dep => + val expectedError = s"Target '$dep' is specified as a dependency to ${TestUtil.defaultTarget} but isn't used, please remove it from the deps." + assert(errors.exists(_.msg.contains(expectedError))) + } + } + } + + /** + * In a situation where B depends on A directly, ensure that the + * dependency analyzer recognizes this fact. + */ + private def checkDirectDependencyRecognized( + aCode: String, + bCode: String + ): Unit = { + withSandbox { sandbox => + sandbox.compileWithoutAnalyzer(aCode) + sandbox.checkStrictDepsErrorsReported( + code = bCode, + expectedStrictDeps = List("A") + ) + } + } + + /** + * In a situation where C depends on both A and B directly, ensure + * that the dependency analyzer recognizes this fact. + */ + private def checkDirectDependencyRecognized( + aCode: String, + bCode: String, + cCode: String + ): Unit = { + withSandbox { sandbox => + sandbox.compileWithoutAnalyzer(aCode) + sandbox.compileWithoutAnalyzer(bCode) + sandbox.checkStrictDepsErrorsReported( + code = cCode, + expectedStrictDeps = List("A", "B") + ) + } + } + + /** + * In a situation where C depends directly on B but not on A, ensure + * that the dependency analyzer recognizes this fact. + */ + private def checkIndirectDependencyDetected( + aCode: String, + bCode: String, + cCode: String + ): Unit = { + withSandbox { sandbox => + sandbox.compileWithoutAnalyzer(aCode) + sandbox.compileWithoutAnalyzer(bCode) + sandbox.checkUnusedDepsErrorReported( + code = cCode, + expectedUnusedDeps = List("A") + ) + } + } + + /** + * In a situation where B depends indirectly on A, ensure + * that the dependency analyzer recognizes this fact. + */ + private def checkIndirectDependencyDetected( + aCode: String, + bCode: String + ): Unit = { + withSandbox { sandbox => + sandbox.compileWithoutAnalyzer(aCode) + sandbox.checkUnusedDepsErrorReported( + code = bCode, + expectedUnusedDeps = List("A") + ) + } + } + + test("simple composition in indirect") { + checkIndirectDependencyDetected( + aCode = + """ + |class A + |""".stripMargin, + bCode = + """ + |class B(a: A) + |""".stripMargin, + cCode = + """ + |class C(b: B) + |""".stripMargin + ) + } + + test("method call argument is direct") { + checkDirectDependencyRecognized( + aCode = + """ + |class A + |""".stripMargin, + bCode = + """ + |class B { + | def foo(a: A = new A()): Unit = {} + |} + |""".stripMargin, + cCode = + """ + |class C { + | def bar(): Unit = { + | new B().foo(new A()) + | } + |} + |""".stripMargin + ) + } + + test("class ctor arg type parameter is direct") { + checkDirectDependencyRecognized( + aCode = + s""" + |class A( + |) + |""".stripMargin, + bCode = + s""" + |class B( + | a: Option[A] + |) + |""".stripMargin + ) + } + + test("class static annotation is direct") { + checkDirectDependencyRecognized( + aCode = + s""" + |class A( + |) extends scala.annotation.StaticAnnotation + |""".stripMargin, + bCode = + s""" + |@A + |class B( + |) + |""".stripMargin + ) + } + + test("class annotation is direct") { + checkDirectDependencyRecognized( + aCode = + s""" + |class A( + |) + |""".stripMargin, + bCode = + s""" + |@A + |class B( + |) + |""".stripMargin + ) + } + + test("method annotation is direct") { + checkDirectDependencyRecognized( + aCode = + s""" + |class A( + |) + |""".stripMargin, + bCode = + s""" + |class B { + | @A + | def foo(): Unit = { + | } + |} + |""".stripMargin + ) + } + + test("static annotation of inherited class is indirect") { + checkIndirectDependencyDetected( + aCode = "class A extends scala.annotation.StaticAnnotation", + bCode = "@A class B", + cCode = "class C extends B" + ) + } + + test("class type parameter bound is direct") { + checkDirectDependencyRecognized( + aCode = + s""" + |class A( + |) + |""".stripMargin, + bCode = + s""" + |class B[T <: A]( + |) + |""".stripMargin + ) + } + + test("classOf is direct") { + checkDirectDependencyRecognized( + aCode = + s""" + |class A( + |) + |""".stripMargin, + bCode = + s""" + |class B( + |) { + | val x: Class[_] = classOf[A] + |} + |""".stripMargin + ) + } + + test("classOf in class annotation is direct") { + checkDirectDependencyRecognized( + aCode = "class A", + bCode = "class B(a: Any)", + cCode = + s""" + |@B(classOf[A]) + |class C + |""".stripMargin + ) + } + + test("inlined literal is direct") { + // Note: For a constant to be inlined + // - it must not have a type declaration such as `: Int`. + // (this appears to be the case in practice at least) + // (is this documented anywhere???) + // - some claim it must start with a capital letter, though + // this does not seem to be the case. Nevertheless we do that + // anyways. + // + // Hence it is possible that as newer versions of scala + // are released then this test may need to be updated to + // conform to changing requirements of what is inlined. + + // Note that in versions of scala < 2.12.4 we cannot detect + // such a situation. Hence we will have a false positive here + // for those older versions, which we verify in test. + + val aCode = + s""" + |object A { + | final val Inlined = 123 + |} + |""".stripMargin + val bCode = + s""" + |object B { + | val d: Int = A.Inlined + |} + |""".stripMargin + + if (ScalaVersion.Current >= ScalaVersion("2.12.4")) { + checkDirectDependencyRecognized(aCode = aCode, bCode = bCode) + } else { + checkIndirectDependencyDetected(aCode = aCode, bCode = bCode) + } + } + + test("unspecified default argument type is indirect") { + checkIndirectDependencyDetected( + aCode = "class A", + bCode = "class B(a: A = new A())", + cCode = + s""" + |class C { + | new B() + |} + |""".stripMargin + ) + } + + test("macro is direct") { + checkDirectDependencyRecognized( + aCode = + s""" + |import scala.language.experimental.macros + |import scala.reflect.macros.blackbox.Context + | + |object A { + | def foo(): Unit = macro fooImpl + | def fooImpl( + | c: Context + | )(): c.universe.Tree = { + | import c.universe._ + | q"" + | } + |} + |""".stripMargin, + bCode = + s""" + |object B { + | A.foo() + |} + |""".stripMargin + ) + } + + test("imports are complicated") { + // This test documents the behavior of imports as is currently. + // Ideally all imports would be direct dependencies. However there + // are complications. The main one being that the scala AST treats + // imports as (expr, selectors) where in e.g. `import a.b.{c, d}` + // expr=`a.b` and selectors=[c, d]. (Note selectors are always formed + // from the last part of the import). + // And only the expr part has type information attached. In order + // to gather type information from the selector, we would need to + // do some resolution of types, which is possible but probably complex. + // Note also that fixing this is probably less of a priority, as + // people who want to check unused deps generally also want to check + // unused imports, so they wouldn't run into these problems in the + // first place. + + def testImport(importString: String, isDirect: Boolean): Unit = { + withSandbox { sandbox => + sandbox.compileWithoutAnalyzer( + s""" + |package foo.bar + | + |object A { val i: Int = 0 } + |""".stripMargin + ) + + val bCode = + s""" + |import $importString + | + |class B + |""".stripMargin + val dep = "foo/bar/A" + + if (isDirect) { + sandbox.checkStrictDepsErrorsReported( + code = bCode, + expectedStrictDeps = List(dep) + ) + } else { + sandbox.checkUnusedDepsErrorReported( + code = bCode, + expectedUnusedDeps = List(dep) + ) + } + } + } + + // In this case, expr=foo.bar.A and selectors=[i], so looking at expr does + // give us a type. + testImport("foo.bar.A.i", isDirect = true) + + // In this case expr=foo.bar and selectors=[A], so expr does not have + // a type which corresponds with A. + testImport("foo.bar.A", isDirect = false) + + // In this case expr=foo and selectors=[bar], so expr does not have + // a type which corresponds with A. + testImport("foo.bar", isDirect = false) + + // In this case expr=foo.bar and selectors=[_], so expr does not have + // a type which corresponds with A. + testImport("foo.bar._", isDirect = false) + } + + test("java interface method argument is direct") { + withSandbox { sandbox => + sandbox.compileJava( + className = "B", + code = "public interface B { }" + ) + sandbox.checkStrictDepsErrorsReported( + """ + |class C { + | def foo(x: B): Unit = {} + |} + |""".stripMargin, + expectedStrictDeps = List("B") + ) + } + } + + test("java interface field and method is direct") { + withSandbox { sandbox => + sandbox.compileJava( + className = "A", + code = "public interface A { int a = 42; }" + ) + val bCode = + """ + |class B { + | def foo(x: A): Unit = {} + | val b = A.a + |} + |""".stripMargin + + // Unlike other tests, this one includes both access to an inlined + // variable and taking the class A as an argument. In theory, + // this test should work for all supported versions just like + // test `java interface method argument is direct` since they + // both have a method taking A as an argument. + // + // However, it does not work for all versions. It is unclear why but + // presumably there were various compiler improvements. + if (ScalaVersion.Current >= ScalaVersion("2.12.0")) { + sandbox.checkStrictDepsErrorsReported( + bCode, + expectedStrictDeps = List("A") + ) + } else { + sandbox.checkUnusedDepsErrorReported( + bCode, + expectedUnusedDeps = List("A") + ) + } + } + } + + test("java interface field is direct") { + withSandbox { sandbox => + sandbox.compileJava( + className = "A", + code = "public interface A { int a = 42; }" + ) + val bCode = + """ + |class B { + | val b = A.a + |} + |""".stripMargin + if (ScalaVersion.Current >= ScalaVersion("2.12.4")) { + sandbox.checkStrictDepsErrorsReported( + bCode, + expectedStrictDeps = List("A") + ) + } else { + sandbox.checkUnusedDepsErrorReported( + bCode, + expectedUnusedDeps = List("A") + ) + } + } + } + + test("classOf in class Java annotation is direct") { + withSandbox { sandbox => + sandbox.compileJava( + className = "Category", + code = + s""" + |public @interface Category { + | Class value(); + |} + |""".stripMargin + ) + sandbox.compileWithoutAnalyzer("class UnitTests") + sandbox.checkStrictDepsErrorsReported( + """ + |@Category(classOf[UnitTests]) + |class C + |""".stripMargin, + expectedStrictDeps = List("UnitTests", "Category") + ) + } + } + + test("position of strict deps error is correct") { + // While we do ensure that generally strict deps errors have + // a position in the other tests, here we make sure that that + // position is correctly computed. + withSandbox { sandbox => + sandbox.compileWithoutAnalyzer("class A") + val errors = + sandbox.checkStrictDepsErrorsReported( + "class B(a: A)", + expectedStrictDeps = List("A") + ) + assert(errors.size == 1) + val pos = errors(0).pos + assert(pos.line == 1) + assert(pos.column == 12) + } + } +} diff --git a/third_party/dependency_analyzer/src/test/io/bazel/rulesscala/dependencyanalyzer/ScalaVersionTest.scala b/third_party/dependency_analyzer/src/test/io/bazel/rulesscala/dependencyanalyzer/ScalaVersionTest.scala new file mode 100644 index 000000000..c3d46075c --- /dev/null +++ b/third_party/dependency_analyzer/src/test/io/bazel/rulesscala/dependencyanalyzer/ScalaVersionTest.scala @@ -0,0 +1,146 @@ +package third_party.dependency_analyzer.src.test.io.bazel.rulesscala.dependencyanalyzer + +import org.scalatest._ +import third_party.dependency_analyzer.src.main.io.bazel.rulesscala.dependencyanalyzer.ScalaVersion + +class ScalaVersionTest extends FunSuite { + test("version comparison works") { + // Test that when a > b, all the comparisons are as expected + def testOrder(a: String, b: String): Unit = { + val va = ScalaVersion(a) + val vb = ScalaVersion(b) + + assert(!(va == vb)) + assert(va != vb) + assert(!(va < vb)) + assert(!(va <= vb)) + assert(va > vb) + assert(va >= vb) + + assert(!(vb == va)) + assert(vb != va) + assert(vb < va) + assert(vb <= va) + assert(!(vb > va)) + assert(!(vb >= va)) + } + + def testEqual(a: String, b: String): Unit = { + val va = ScalaVersion(a) + val vb = ScalaVersion(b) + + assert(va == vb) + assert(!(va != vb)) + assert(!(va < vb)) + assert(va <= vb) + assert(!(va > vb)) + assert(va >= vb) + } + + testEqual("1.1.1", "1.1.1") + testEqual("1.2.3", "1.2.3") + testEqual("30.20.10", "30.20.10") + + testOrder("1.2.3", "1.0.0") + testOrder("1.2.1", "1.2.0") + testOrder("1.2.0", "1.1.9") + testOrder("2.12.12", "2.12.11") + testOrder("2.12.0", "2.1.50") + } + + test("macro works") { + // These are rather duplicative unfortunately as the person + // who wrote the macro is not very smart + + // We use versions like 1.0.0 and 500.0.0 so that even + // as versions of scala change the test won't need to be updated + + // Note: this test unfortunately does not test that the min and max + // bounds are inclusive rather than exclusive, because this code has to + // compile across all supported scala versions and we can't get an + // inlineable constant with the version string. In theory there may + // be complex solutions such as making this a template file and + // inserting the version, but that seems rather overdifficult. + // + // As version-differing behavior should be tested in unit tests anyways, + // with their own version bounds checks, this seems an acceptable risk + // given the costs of fixing. + + // No bounds + { + var hit = false + ScalaVersion.conditional( + None, + None, + "hit = true" + ) + assert(hit) + } + + // Min bounds hit + { + var hit = false + ScalaVersion.conditional( + Some("1.0.0"), + None, + "hit = true" + ) + assert(hit) + } + + // Min bounds not hit + { + var hit = false + ScalaVersion.conditional( + Some("500.0.0"), + None, + "hit = true" + ) + assert(!hit) + } + + // Max bounds hit + { + var hit = false + ScalaVersion.conditional( + None, + Some("500.0.0"), + "hit = true" + ) + assert(hit) + } + + // Max bounds not hit + { + var hit = false + ScalaVersion.conditional( + None, + Some("1.0.0"), + "hit = true" + ) + assert(!hit) + } + + // Min-max bound hit + { + var hit = false + ScalaVersion.conditional( + Some("1.0.0"), + Some("500.0.0"), + "hit = true" + ) + assert(hit) + } + + // Min-max bound not hit + { + var hit = false + ScalaVersion.conditional( + Some("500.0.0"), + Some("1000.0.0"), + "hit = true" + ) + assert(!hit) + } + } +} diff --git a/third_party/dependency_analyzer/src/test/io/bazel/rulesscala/dependencyanalyzer/ScalacDependencyTest.scala b/third_party/dependency_analyzer/src/test/io/bazel/rulesscala/dependencyanalyzer/ScalacDependencyTest.scala new file mode 100644 index 000000000..6126d9c2e --- /dev/null +++ b/third_party/dependency_analyzer/src/test/io/bazel/rulesscala/dependencyanalyzer/ScalacDependencyTest.scala @@ -0,0 +1,109 @@ +package third_party.dependency_analyzer.src.test.io.bazel.rulesscala.dependencyanalyzer + +import java.nio.file.Files +import java.nio.file.Path +import java.util.UUID +import org.apache.commons.io.FileUtils +import org.scalatest._ +import third_party.utils.src.test.io.bazel.rulesscala.utils.JavaCompileUtil +import third_party.utils.src.test.io.bazel.rulesscala.utils.TestUtil + +/** + * Test that the scalac compiler behaves how we expect it to around + * dependencies. That is, for given scenarios, we want to make sure + * that scalac requires the given set of dependencies; no more and + * no less. + * + * To clarify: these tests do not reflect the end result of strict/unused + * deps as we are considering alternatives of how to mitigate scalac's + * limitations. + */ +class ScalacDependencyTest extends FunSuite { + private def withSandbox(action: Sandbox => Unit): Unit = { + val tmpDir = Files.createTempDirectory("dependency_analyzer_test_temp") + val file = tmpDir.toFile + try { + action(new Sandbox(tmpDir)) + } finally { + FileUtils.deleteDirectory(file) + } + } + + private class Sandbox(tmpDir: Path) { + def compile( + code: String + ): Unit = { + val errors = + TestUtil.runCompiler( + code = code, + extraClasspath = List(tmpDir.toString), + outputPathOpt = Some(tmpDir) + ) + assert(errors.isEmpty) + } + + def compileJava( + className: String, + code: String + ): Unit = { + JavaCompileUtil.compile( + tmpDir = tmpDir.toString, + className = className, + code = code + ) + } + + def checkExactDepsNeeded( + code: String, + deps: List[String] + ): Unit = { + def doesCompileSucceed(usedDeps: List[String]): Boolean = { + val subdir = tmpDir.resolve(UUID.randomUUID().toString) + Files.createDirectory(subdir) + usedDeps.foreach { dep => + val name = s"$dep.class" + Files.copy(tmpDir.resolve(name), subdir.resolve(name)) + } + val errors = + TestUtil.runCompiler( + code = code, + extraClasspath = List(subdir.toString) + ) + errors.isEmpty + } + + assert(doesCompileSucceed(deps), s"Failed to compile with all deps") + + deps.foreach { toSkip => + val remaining = deps.filter(_ != toSkip) + // sanity check we removed exactly one item + assert(remaining.size + 1 == deps.size) + assert( + !doesCompileSucceed(remaining), + s"Compile succeeded even though $toSkip was missing") + } + } + } + + test("static annotation of superclass not needed") { + withSandbox { sandbox => + sandbox.compile("class A extends scala.annotation.StaticAnnotation") + sandbox.compile("@A class B") + sandbox.checkExactDepsNeeded( + code = "class C extends B", + deps = List("B") + ) + } + } + + test("superclass of superclass needed") { + withSandbox { sandbox => + sandbox.compile("class A") + sandbox.compile("class B extends A") + sandbox.checkExactDepsNeeded( + code = "class C extends B", + deps = List("A", "B") + ) + } + } +} diff --git a/third_party/dependency_analyzer/src/test/io/bazel/rulesscala/dependencyanalyzer/DependencyAnalyzerTest.scala b/third_party/dependency_analyzer/src/test/io/bazel/rulesscala/dependencyanalyzer/StrictDepsTest.scala similarity index 62% rename from third_party/dependency_analyzer/src/test/io/bazel/rulesscala/dependencyanalyzer/DependencyAnalyzerTest.scala rename to third_party/dependency_analyzer/src/test/io/bazel/rulesscala/dependencyanalyzer/StrictDepsTest.scala index 7ec3d70df..1f837a1f4 100644 --- a/third_party/dependency_analyzer/src/test/io/bazel/rulesscala/dependencyanalyzer/DependencyAnalyzerTest.scala +++ b/third_party/dependency_analyzer/src/test/io/bazel/rulesscala/dependencyanalyzer/StrictDepsTest.scala @@ -1,42 +1,34 @@ package third_party.dependency_analyzer.src.test.io.bazel.rulesscala.dependencyanalyzer -import org.junit.Test -import org.junit.runner.RunWith -import org.junit.runners.JUnit4 -import java.nio.file.Paths - +import org.scalatest._ +import third_party.dependency_analyzer.src.main.io.bazel.rulesscala.dependencyanalyzer.DependencyTrackingMethod +import third_party.utils.src.test.io.bazel.rulesscala.utils.TestUtil import third_party.utils.src.test.io.bazel.rulesscala.utils.TestUtil._ -@RunWith(classOf[JUnit4]) -class DependencyAnalyzerTest { +class StrictDepsTest extends FunSuite { val pluginName = "dependency_analyzer" def compileWithDependencyAnalyzer(code: String, withDirect: List[String] = Nil, withIndirect: List[(String, String)] = Nil): List[String] = { - val toolboxPluginOptions: String = { - val jar = System.getProperty(s"plugin.jar.location") - val start= jar.indexOf(s"/third_party/$pluginName") - // this substring is needed due to issue: https://github.com/bazelbuild/bazel/issues/2475 - val jarInRelationToBaseDir = jar.substring(start, jar.length) - val pluginPath = Paths.get(baseDir, jarInRelationToBaseDir).toAbsolutePath - s"-Xplugin:$pluginPath -Jdummy=${pluginPath.toFile.lastModified}" - } - - val constructParam: (String, Iterable[String]) => String = constructPluginParam("dependency-analyzer") - val compileOptions = Seq( - constructParam("direct-jars", withDirect), - constructParam("indirect-jars", withIndirect.map(_._1)), - constructParam("indirect-targets", withIndirect.map(_._2)), - constructParam("current-target", Seq(defaultTarget)) - ).mkString(" ") - val extraClasspath = withDirect ++ withIndirect.map(_._1) - runCompiler(code, compileOptions, extraClasspath, toolboxPluginOptions) + TestUtil.runCompiler( + code = code, + extraClasspath = extraClasspath, + dependencyAnalyzerParamsOpt = + Some( + DependencyAnalyzerTestParams( + directJars = withDirect, + indirectJars = withIndirect.map(_._1), + indirectTargets = withIndirect.map(_._2), + strictDeps = true, + dependencyTrackingMethod = DependencyTrackingMethod.HighLevel + ) + ) + ) + .map(_.msg) } - - @Test - def `error on indirect dependency target`(): Unit = { + test("error on indirect dependency target") { val testCode = """object Foo { | org.apache.commons.lang3.ArrayUtils.EMPTY_BOOLEAN_ARRAY.length @@ -49,8 +41,7 @@ class DependencyAnalyzerTest { compileWithDependencyAnalyzer(testCode, withIndirect = indirect).expectErrorOn(commonsTarget) } - @Test - def `error on multiple indirect dependency targets`(): Unit = { + test("error on multiple indirect dependency targets") { val testCode = """object Foo { | org.apache.commons.lang3.ArrayUtils.EMPTY_BOOLEAN_ARRAY.length @@ -65,8 +56,7 @@ class DependencyAnalyzerTest { compileWithDependencyAnalyzer(testCode, withIndirect = indirect).expectErrorOn(commonsTarget, guavaTarget) } - @Test - def `do not give error on direct dependency target`(): Unit = { + test("do not give error on direct dependency target") { val testCode = """object Foo { | org.apache.commons.lang3.ArrayUtils.EMPTY_BOOLEAN_ARRAY.length @@ -77,7 +67,6 @@ class DependencyAnalyzerTest { val direct = List(apacheCommonsClasspath) val indirect = List(apacheCommonsClasspath -> commonsTarget) - val a = compileWithDependencyAnalyzer(testCode, withDirect = direct, withIndirect = indirect) compileWithDependencyAnalyzer(testCode, withDirect = direct, withIndirect = indirect).noErrorOn(commonsTarget) } @@ -96,11 +85,11 @@ class DependencyAnalyzerTest { def expectErrorOn(targets: String*): Unit = targets.foreach(target => assert( infos.exists(checkErrorContainsMessage(target)), - s"expected an error on $target to appear in errors (with buildozer command)!") + s"expected an error on $target to appear in errors (with buildozer command)! Errors: $info") ) def noErrorOn(target: String) = assert( !infos.exists(checkErrorContainsMessage(target)), - s"error on $target should not appear in errors!") + s"error on $target should not appear in errors! Errors: $info") } } diff --git a/third_party/unused_dependency_checker/src/test/io/bazel/rulesscala/dependencyanalyzer/UnusedDependencyCheckerTest.scala b/third_party/dependency_analyzer/src/test/io/bazel/rulesscala/dependencyanalyzer/UnusedDependencyCheckerTest.scala similarity index 54% rename from third_party/unused_dependency_checker/src/test/io/bazel/rulesscala/dependencyanalyzer/UnusedDependencyCheckerTest.scala rename to third_party/dependency_analyzer/src/test/io/bazel/rulesscala/dependencyanalyzer/UnusedDependencyCheckerTest.scala index 3b928d983..97a5a650d 100644 --- a/third_party/unused_dependency_checker/src/test/io/bazel/rulesscala/dependencyanalyzer/UnusedDependencyCheckerTest.scala +++ b/third_party/dependency_analyzer/src/test/io/bazel/rulesscala/dependencyanalyzer/UnusedDependencyCheckerTest.scala @@ -1,31 +1,28 @@ -package third_party.unused_dependency_checker.src.test.io.bazel.rulesscala.dependencyanalyzer - -import java.nio.file.Paths +package third_party.dependency_analyzer.src.test.io.bazel.rulesscala.dependencyanalyzer import org.scalatest._ +import third_party.dependency_analyzer.src.main.io.bazel.rulesscala.dependencyanalyzer.DependencyTrackingMethod +import third_party.utils.src.test.io.bazel.rulesscala.utils.TestUtil import third_party.utils.src.test.io.bazel.rulesscala.utils.TestUtil._ class UnusedDependencyCheckerTest extends FunSuite { def compileWithUnusedDependencyChecker(code: String, withDirect: List[(String, String)] = Nil): List[String] = { - val toolboxPluginOptions: String = { - val jar = System.getProperty("plugin.jar.location") - val start = jar.indexOf("/third_party/unused_dependency_checker") - // this substring is needed due to issue: https://github.com/bazelbuild/bazel/issues/2475 - val jarInRelationToBaseDir = jar.substring(start, jar.length) - val pluginPath = Paths.get(baseDir, jarInRelationToBaseDir).toAbsolutePath - s"-Xplugin:$pluginPath -Jdummy=${pluginPath.toFile.lastModified}" - } - - val constructParam: (String, Iterable[String]) => String = constructPluginParam("unused-dependency-checker") - val compileOptions = List( - constructParam("direct-jars", withDirect.map(_._1)), - constructParam("direct-targets", withDirect.map(_._2)), - constructParam("current-target", Seq(defaultTarget)) - ).mkString(" ") - val extraClasspath = withDirect.map(_._1) - runCompiler(code, compileOptions, extraClasspath, toolboxPluginOptions) + TestUtil.runCompiler( + code = code, + extraClasspath = extraClasspath, + dependencyAnalyzerParamsOpt = + Some( + DependencyAnalyzerTestParams( + directJars = withDirect.map(_._1), + directTargets = withDirect.map(_._2), + unusedDeps = true, + dependencyTrackingMethod = DependencyTrackingMethod.HighLevel + ) + ) + ) + .map(_.msg) } test("error on unused direct dependencies") { diff --git a/third_party/test/strip_resource_external_workspace/WORKSPACE b/third_party/test/strip_resource_external_workspace/WORKSPACE new file mode 100644 index 000000000..e5d127dc1 --- /dev/null +++ b/third_party/test/strip_resource_external_workspace/WORKSPACE @@ -0,0 +1 @@ +workspace(name = "strip_resource_external_workspace") diff --git a/third_party/test/strip_resource_external_workspace/strip/BUILD.bazel b/third_party/test/strip_resource_external_workspace/strip/BUILD.bazel new file mode 100644 index 000000000..2e2167d71 --- /dev/null +++ b/third_party/test/strip_resource_external_workspace/strip/BUILD.bazel @@ -0,0 +1,8 @@ +load("@io_bazel_rules_scala//scala:scala.bzl", "scala_library") + +scala_library( + name = "noSrcsWithResources", + resource_strip_prefix = "strip", + resources = ["nosrc_jar_resource.txt"], + visibility = ["//visibility:public"], +) diff --git a/third_party/test/strip_resource_external_workspace/strip/nosrc_jar_resource.txt b/third_party/test/strip_resource_external_workspace/strip/nosrc_jar_resource.txt new file mode 100644 index 000000000..b5bf914d1 --- /dev/null +++ b/third_party/test/strip_resource_external_workspace/strip/nosrc_jar_resource.txt @@ -0,0 +1 @@ +I am a text resource! diff --git a/third_party/unused_dependency_checker/LICENSE b/third_party/unused_dependency_checker/LICENSE deleted file mode 100644 index ec832b9a1..000000000 --- a/third_party/unused_dependency_checker/LICENSE +++ /dev/null @@ -1,29 +0,0 @@ -******************************************************************************* -* Classpath Shrinker: a scalac plugin to detect unused classpath entries -* Copyright (c) Scala Center -* All rights reserved. -* -* Redistribution and use in source and binary forms, with or without -* modification, are permitted provided that the following conditions -* are met: -* 1. Redistributions of source code must retain the above copyright -* notice, this list of conditions and the following disclaimer. -* 2. Redistributions in binary form must reproduce the above copyright -* notice, this list of conditions and the following disclaimer in the -* documentation and/or other materials provided with the distribution. -* 3. Neither the name of the copyright holders nor the names of its -* contributors may be used to endorse or promote products derived from -* this software without specific prior written permission. -* -* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE -* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE -* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR -* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF -* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS -* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN -* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) -* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF -* THE POSSIBILITY OF SUCH DAMAGE. -******************************************************************************* \ No newline at end of file diff --git a/third_party/unused_dependency_checker/src/main/BUILD b/third_party/unused_dependency_checker/src/main/BUILD deleted file mode 100644 index d28702892..000000000 --- a/third_party/unused_dependency_checker/src/main/BUILD +++ /dev/null @@ -1,16 +0,0 @@ -licenses(["notice"]) # 3-clause BSD - -load("//scala:scala.bzl", "scala_library_for_plugin_bootstrapping") - -scala_library_for_plugin_bootstrapping( - name = "unused_dependency_checker", - srcs = [ - "io/bazel/rulesscala/unuseddependencychecker/UnusedDependencyChecker.scala", - ], - resources = ["resources/scalac-plugin.xml"], - visibility = ["//visibility:public"], - deps = [ - "//external:io_bazel_rules_scala/dependency/scala/scala_compiler", - "//external:io_bazel_rules_scala/dependency/scala/scala_reflect", - ], -) diff --git a/third_party/unused_dependency_checker/src/main/io/bazel/rulesscala/unuseddependencychecker/UnusedDependencyChecker.scala b/third_party/unused_dependency_checker/src/main/io/bazel/rulesscala/unuseddependencychecker/UnusedDependencyChecker.scala deleted file mode 100644 index 469db9093..000000000 --- a/third_party/unused_dependency_checker/src/main/io/bazel/rulesscala/unuseddependencychecker/UnusedDependencyChecker.scala +++ /dev/null @@ -1,144 +0,0 @@ -package third_party.unused_dependency_checker.src.main.io.bazel.rulesscala.unused_dependency_checker - -import scala.reflect.io.AbstractFile -import scala.tools.nsc.plugins.{Plugin, PluginComponent} -import scala.tools.nsc.{Global, Phase} -import UnusedDependencyChecker._ - -class UnusedDependencyChecker(val global: Global) extends Plugin { self => - val name = "unused-dependency-checker" - val description = "Errors if there exists dependencies that are not used" - - val components: List[PluginComponent] = List[PluginComponent](Component) - - var direct: Map[String, String] = Map.empty - var ignoredTargets: Set[String] = Set.empty - var analyzerMode: AnalyzerMode = Error - var currentTarget: String = "NA" - - val isWindows: Boolean = System.getProperty("os.name").toLowerCase.contains("windows") - - override def init(options: List[String], error: (String) => Unit): Boolean = { - var directJars: Seq[String] = Seq.empty - var directTargets: Seq[String] = Seq.empty - - for (option <- options) { - option.split(":").toList match { - case "direct-jars" :: data => directJars = data.map(decodeTarget) - case "direct-targets" :: data => directTargets = data.map(decodeTarget) - case "ignored-targets" :: data => ignoredTargets = data.map(decodeTarget).toSet - case "current-target" :: target :: _ => currentTarget = decodeTarget(target) - case "mode" :: mode :: _ => parseAnalyzerMode(mode).foreach(analyzerMode = _) - case unknown :: _ => error(s"unknown param $unknown") - case Nil => - } - } - - direct = directJars.zip(directTargets).toMap - - true - } - - - private object Component extends PluginComponent { - val global: Global = self.global - - import global._ - - override val runsAfter = List("jvm") - - val phaseName: String = self.name - - private def warnOrError(messages: Set[String]): Unit = { - val reportFunction: String => Unit = analyzerMode match { - case Error => reporter.error(NoPosition, _) - case Warn => reporter.warning(NoPosition, _) - } - - messages.foreach(reportFunction) - } - - override def newPhase(prev: Phase): StdPhase = new StdPhase(prev) { - override def run(): Unit = { - super.run() - - warnOrError(unusedDependenciesFound) - } - - private def unusedDependenciesFound: Set[String] = { - val usedJars: Set[AbstractFile] = findUsedJars - val directJarPaths = direct.keys.toSet - val usedJarPaths = if (!isWindows) usedJars.map(_.path) else usedJars.map(_.path.replaceAll("\\\\", "/")) - - val usedTargets = usedJarPaths - .map(direct.get) - .collect { - case Some(target) => target - } - - val unusedTargets = directJarPaths - .filter(jar => !usedTargets.contains(direct(jar))) - .map(direct.get) - .collect { - case Some(target) if !ignoredTargets.contains(target) => target - } - - unusedTargets.map { target => - s"""Target '$target' is specified as a dependency to $currentTarget but isn't used, please remove it from the deps. - |You can use the following buildozer command: - |buildozer 'remove deps $target' $currentTarget - |""".stripMargin - } - } - - override def apply(unit: CompilationUnit): Unit = () - } - - def findUsedJars: Set[AbstractFile] = { - val jars = collection.mutable.Set[AbstractFile]() - - def walkTopLevels(root: Symbol): Unit = { - def safeInfo(sym: Symbol): Type = - if (sym.hasRawInfo && sym.rawInfo.isComplete) sym.info else NoType - - def packageClassOrSelf(sym: Symbol): Symbol = - if (sym.hasPackageFlag && !sym.isModuleClass) sym.moduleClass else sym - - for (x <- safeInfo(packageClassOrSelf(root)).decls) { - if (x == root) () - else if (x.hasPackageFlag) walkTopLevels(x) - else if (x.owner != root) { // exclude package class members - if (x.hasRawInfo && x.rawInfo.isComplete) { - val assocFile = x.associatedFile - if (assocFile.path.endsWith(".class") && assocFile.underlyingSource.isDefined) - assocFile.underlyingSource.foreach(jars += _) - } - } - } - } - - exitingTyper { - walkTopLevels(RootClass) - } - jars.toSet - } - } - -} - -object UnusedDependencyChecker { - - sealed trait AnalyzerMode - - case object Error extends AnalyzerMode - - case object Warn extends AnalyzerMode - - def parseAnalyzerMode(mode: String): Option[AnalyzerMode] = mode match { - case "error" => Some(Error) - case "warn" => Some(Warn) - case _ => None - } - - def decodeTarget(target: String): String = target.replace(";", ":") -} diff --git a/third_party/unused_dependency_checker/src/main/resources/scalac-plugin.xml b/third_party/unused_dependency_checker/src/main/resources/scalac-plugin.xml deleted file mode 100644 index ae5ddca8f..000000000 --- a/third_party/unused_dependency_checker/src/main/resources/scalac-plugin.xml +++ /dev/null @@ -1,4 +0,0 @@ - - unused-dependency-checker - third_party.unused_dependency_checker.src.main.io.bazel.rulesscala.unused_dependency_checker.UnusedDependencyChecker - diff --git a/third_party/unused_dependency_checker/src/test/BUILD b/third_party/unused_dependency_checker/src/test/BUILD deleted file mode 100644 index f8574859f..000000000 --- a/third_party/unused_dependency_checker/src/test/BUILD +++ /dev/null @@ -1,25 +0,0 @@ -licenses(["notice"]) # 3-clause BSD - -load("//scala:scala.bzl", "scala_junit_test", "scala_test") - -scala_test( - name = "unused_dependency_checker_test", - size = "small", - srcs = [ - "io/bazel/rulesscala/dependencyanalyzer/UnusedDependencyCheckerTest.scala", - ], - jvm_flags = [ - "-Dplugin.jar.location=$(location //third_party/unused_dependency_checker/src/main:unused_dependency_checker)", - "-Dscala.library.location=$(location //external:io_bazel_rules_scala/dependency/scala/scala_library)", - "-Dapache.commons.jar.location=$(location @org_apache_commons_commons_lang_3_5_without_file//:linkable_org_apache_commons_commons_lang_3_5_without_file)", - ], - unused_dependency_checker_mode = "off", - deps = [ - "//external:io_bazel_rules_scala/dependency/scala/scala_compiler", - "//external:io_bazel_rules_scala/dependency/scala/scala_library", - "//external:io_bazel_rules_scala/dependency/scala/scala_reflect", - "//third_party/unused_dependency_checker/src/main:unused_dependency_checker", - "//third_party/utils/src/test:test_util", - "@org_apache_commons_commons_lang_3_5_without_file//:linkable_org_apache_commons_commons_lang_3_5_without_file", - ], -) diff --git a/third_party/utils/src/test/BUILD b/third_party/utils/src/test/BUILD index 2dbce6ac2..9c3e8bafb 100644 --- a/third_party/utils/src/test/BUILD +++ b/third_party/utils/src/test/BUILD @@ -5,11 +5,13 @@ load("//scala:scala.bzl", "scala_library") scala_library( name = "test_util", srcs = [ + "io/bazel/rulesscala/utils/JavaCompileUtil.scala", "io/bazel/rulesscala/utils/TestUtil.scala", ], visibility = ["//visibility:public"], deps = [ "//external:io_bazel_rules_scala/dependency/scala/scala_compiler", "//external:io_bazel_rules_scala/dependency/scala/scala_reflect", + "//third_party/dependency_analyzer/src/main:dependency_analyzer", ], ) diff --git a/third_party/utils/src/test/io/bazel/rulesscala/utils/JavaCompileUtil.scala b/third_party/utils/src/test/io/bazel/rulesscala/utils/JavaCompileUtil.scala new file mode 100644 index 000000000..ed300f7f7 --- /dev/null +++ b/third_party/utils/src/test/io/bazel/rulesscala/utils/JavaCompileUtil.scala @@ -0,0 +1,52 @@ +package third_party.utils.src.test.io.bazel.rulesscala.utils + +import java.io.File +import java.io.IOException +import java.net.URI +import java.util +import java.util.Locale +import javax.tools.DiagnosticCollector +import javax.tools.JavaFileObject +import javax.tools.JavaFileObject.Kind +import javax.tools.SimpleJavaFileObject +import javax.tools.StandardLocation +import javax.tools.ToolProvider + +// Modified from +// https://myshittycode.com/2014/02/13/java-programmatically-compile-and-unit-test-generated-java-source-code/ +object JavaCompileUtil { + // in-memory Java file object + class InMemoryJavaFileObject(val className: String, val contents: String) extends SimpleJavaFileObject(URI.create("string:///" + className.replace('.', '/') + Kind.SOURCE.extension), Kind.SOURCE) { + @throws[IOException] + override def getCharContent(ignoreEncodingErrors: Boolean): CharSequence = contents + } + + /** + * Compile some java code + */ + def compile(tmpDir: String, className: String, code: String): Unit = { + val javaFileObject = new InMemoryJavaFileObject(className, code) + val compiler = ToolProvider.getSystemJavaCompiler + val fileManager = compiler.getStandardFileManager(null, null, null) + val files = util.Arrays.asList(new File(tmpDir)) + fileManager.setLocation(StandardLocation.CLASS_OUTPUT, files) + val diagnostics = new DiagnosticCollector[JavaFileObject] + val task = compiler.getTask(null, fileManager, diagnostics, null, null, util.Arrays.asList(javaFileObject)) + val success = task.call + fileManager.close() + // If there's a compilation error, display error messages and fail the test + if (!success) { + import scala.collection.JavaConversions._ + for (diagnostic <- diagnostics.getDiagnostics) { + println("Code: " + diagnostic.getCode) + println("Kind: " + diagnostic.getKind) + println("Position: " + diagnostic.getPosition) + println("Start Position: " + diagnostic.getStartPosition) + println("End Position: " + diagnostic.getEndPosition) + println("Source: " + diagnostic.getSource) + println("Message: " + diagnostic.getMessage(Locale.getDefault)) + } + throw new Exception("Compilation failed!") + } + } +} diff --git a/third_party/utils/src/test/io/bazel/rulesscala/utils/TestUtil.scala b/third_party/utils/src/test/io/bazel/rulesscala/utils/TestUtil.scala index a15f6a5c8..025564901 100644 --- a/third_party/utils/src/test/io/bazel/rulesscala/utils/TestUtil.scala +++ b/third_party/utils/src/test/io/bazel/rulesscala/utils/TestUtil.scala @@ -1,62 +1,141 @@ package third_party.utils.src.test.io.bazel.rulesscala.utils +import java.nio.file.Path import java.nio.file.Paths - import scala.reflect.internal.util.BatchSourceFile +import scala.reflect.io.AbstractFile +import scala.reflect.io.Directory +import scala.reflect.io.PlainDirectory import scala.reflect.io.VirtualDirectory import scala.tools.cmd.CommandLineParser +import scala.tools.nsc.CompilerCommand +import scala.tools.nsc.Global +import scala.tools.nsc.Settings import scala.tools.nsc.reporters.StoreReporter -import scala.tools.nsc.{CompilerCommand, Global, Settings} +import third_party.dependency_analyzer.src.main.io.bazel.rulesscala.dependencyanalyzer.DependencyTrackingMethod object TestUtil { - - import scala.language.postfixOps - final val defaultTarget = "//..." - def constructPluginParam(pluginName: String)(name: String, values: Iterable[String]): String = { + private def constructPluginParam(pluginName: String)(name: String, values: Iterable[String]): String = { if (values.isEmpty) "" else s"-P:$pluginName:$name:${values.mkString(":")}" } - def runCompiler(code: String, compileOptions: String, extraClasspath: List[String], toolboxPluginOptions: String): List[String] = { - val fullCompileOptions: String = getCompileOptions(code, compileOptions, extraClasspath, toolboxPluginOptions) - val reporter: StoreReporter = eval(code, fullCompileOptions) - reporter.infos.collect({ case msg if msg.severity == reporter.ERROR => msg.msg }).toList + private lazy val toolboxPluginOptions: String = { + val jar = System.getProperty(s"plugin.jar.location") + val start = jar.indexOf(s"/third_party/dependency_analyzer") + // this substring is needed due to issue: https://github.com/bazelbuild/bazel/issues/2475 + val jarInRelationToBaseDir = jar.substring(start, jar.length) + val pluginPath = Paths.get(baseDir, jarInRelationToBaseDir).toAbsolutePath + s"-Xplugin:$pluginPath -Jdummy=${pluginPath.toFile.lastModified}" } - private def getCompileOptions(code: String, compileOptions: String, extraClasspath: Seq[String], toolboxPluginOptions: String): String = { - val fullClasspath: String = { - val extraClasspathString = extraClasspath.mkString(":") - if (toolboxClasspath.isEmpty) extraClasspathString - else s"$toolboxClasspath:$extraClasspathString" + case class DependencyAnalyzerTestParams( + dependencyTrackingMethod: DependencyTrackingMethod, + strictDeps: Boolean = false, + unusedDeps: Boolean = false, + directJars: List[String] = List.empty, + directTargets: List[String] = List.empty, + indirectJars: List[String] = List.empty, + indirectTargets: List[String] = List.empty + ) + + private def getDependencyAnalyzerOptions(params: DependencyAnalyzerTestParams): String = { + val argsForAnalyzer = + List( + "dependency-tracking-method" -> Seq(params.dependencyTrackingMethod.name), + "current-target" -> Seq(TestUtil.defaultTarget), + "unused-deps-mode" -> (if (params.unusedDeps) { Seq("error") } else { Seq() }), + "strict-deps-mode" -> (if (params.strictDeps) { Seq("error") } else { Seq() }), + "direct-jars" -> params.directJars, + "direct-targets" -> params.directTargets, + "indirect-jars" -> params.indirectJars, + "indirect-targets" -> params.indirectTargets + ) + val constructParam = TestUtil.constructPluginParam("dependency-analyzer") _ + val argsForAnalyzerString = + argsForAnalyzer + .map { case (k, v) => + constructParam(k, v) + } + .mkString(" ") + s"$argsForAnalyzerString $toolboxPluginOptions" + } + + private def getClasspathArguments(extraClasspath: List[String]): String = { + val classpathEntries = { + val builtinClassPaths = builtinClasspaths.filterNot(_.isEmpty) + extraClasspath ++ builtinClassPaths + } + if (classpathEntries.isEmpty) { + "" + } else { + s"-classpath ${classpathEntries.mkString(":")}" } - val basicOptions = - createBasicCompileOptions(fullClasspath, toolboxPluginOptions) + } - s"$basicOptions $compileOptions" + /** + * Runs the compiler on a piece of code. + * + * @param dependencyAnalyzerParamsOpt If set, includes the dependency analyzer + * plugin with the provided parameters + * @param outputPathOpt If non-None, a directory to output the files in + * @return list of errors returned by the compiler + */ + def runCompiler( + code: String, + extraClasspath: List[String] = List.empty, + dependencyAnalyzerParamsOpt: Option[DependencyAnalyzerTestParams] = None, + outputPathOpt: Option[Path] = None + ): List[StoreReporter#Info] = { + val dependencyAnalyzerOptions = + dependencyAnalyzerParamsOpt + .map(getDependencyAnalyzerOptions) + .getOrElse("") + val classPathOptions = getClasspathArguments(extraClasspath) + val compileOptions = s"$dependencyAnalyzerOptions $classPathOptions" + val output = + outputPathOpt + .map(output => new PlainDirectory(new Directory(output.toFile))) + .getOrElse(new VirtualDirectory("(memory)", None)) + eval(code = code, compileOptions = compileOptions, output = output) } - /** Evaluate using global instance instead of toolbox because toolbox seems - * to fail to typecheck code that comes from external dependencies. */ - private def eval(code: String, compileOptions: String = ""): StoreReporter = { + private def eval( + code: String, + compileOptions: String, + output: AbstractFile + ): List[StoreReporter#Info] = { // TODO: Optimize and cache global. val options = CommandLineParser.tokenize(compileOptions) val reporter = new StoreReporter() val settings = new Settings(println) val _ = new CompilerCommand(options, settings) - settings.outputDirs.setSingleOutput(new VirtualDirectory("(memory)", None)) + settings.outputDirs.setSingleOutput(output) + + // Evaluate using global instance instead of toolbox because toolbox seems + // to fail to typecheck code that comes from external dependencies. val global = new Global(settings, reporter) + val run = new global.Run - val toCompile = new BatchSourceFile("", code) + + // It is important that the source name when compiling code + // looks like a valid scala file - + // this causes the compiler to report positions correctly. And + // tests verify that positions are reported successfully. + val toCompile = new BatchSourceFile("CompiledCode.scala", code) run.compileSources(List(toCompile)) - reporter + reporter.infos.filter(_.severity == reporter.ERROR).toList } - lazy val baseDir = System.getProperty("user.dir") + private lazy val baseDir = System.getProperty("user.dir") - lazy val toolboxClasspath: String = - pathOf("scala.library.location") + private lazy val builtinClasspaths: Vector[String] = + Vector( + pathOf("scala.library.location"), + pathOf("scala.reflect.location") + ) lazy val guavaClasspath: String = pathOf("guava.jar.location") @@ -70,10 +149,6 @@ object TestUtil { libPath.toString } - private def createBasicCompileOptions(classpath: String, usePluginOptions: String) = - s"-classpath $classpath $usePluginOptions" - - def decodeLabel(targetLabel: String): String = targetLabel.replace(";", ":") def encodeLabel(targetLabel: String): String = targetLabel.replace(":", ";") diff --git a/tools/bazel b/tools/bazel index aad205806..8fc0f8ce1 100755 --- a/tools/bazel +++ b/tools/bazel @@ -1,7 +1,7 @@ #!/usr/bin/env bash set -e -default_bazel_version='1.1.0' +default_bazel_version='2.0.0' if [ "$BUILDKITE" = true ]; then bazel_version='host' @@ -27,10 +27,6 @@ case "$bazel_version" in bazel_version=$("$BAZEL_REAL" version | awk '/Build label/ {print $3}' | cut -d '-' -f 1) bazel="$BAZEL_REAL" ;; - '1.1.0') - darwin_sha='1a552f4ce194860fbbd50eeb319f81788ddf50a849e92378eec72231cc64ef65' - linux_sha='14301099c87568db302d59a5d3585f5eb8a6250ac2c6bb0367c56e623ff6e65f' - ;; '2.0.0') darwin_sha='c675fa27d99a3114d681db10eb03ded547c40f702b2048c99b8f4ea8e89b9356' linux_sha='2fbdc9c0e3d376697caf0ee3673b7c9475214068c55a01b9744891e131f90b87' diff --git a/twitter_scrooge/twitter_scrooge.bzl b/twitter_scrooge/twitter_scrooge.bzl index 4680b3701..fa2482939 100644 --- a/twitter_scrooge/twitter_scrooge.bzl +++ b/twitter_scrooge/twitter_scrooge.bzl @@ -1,5 +1,6 @@ load( "//scala:scala_cross_version.bzl", + _default_maven_server_urls = "default_maven_server_urls", _default_scala_version = "default_scala_version", _extract_major_version = "extract_major_version", _scala_mvn_artifact = "scala_mvn_artifact", @@ -12,7 +13,14 @@ load( "//scala/private:common.bzl", "write_manifest_file", ) -load("//scala/private:rule_impls.bzl", "compile_scala") +load( + "//scala/private:dependency.bzl", + "legacy_unclear_dependency_info_for_protobuf_scrooge", +) +load( + "//scala/private:rule_impls.bzl", + "compile_scala", +) load("@io_bazel_rules_scala//thrift:thrift_info.bzl", "ThriftInfo") load( "@io_bazel_rules_scala//thrift:thrift.bzl", @@ -23,7 +31,7 @@ _jar_extension = ".jar" def twitter_scrooge( scala_version = _default_scala_version(), - maven_servers = ["https://repo.maven.apache.org/maven2"]): + maven_servers = _default_maven_server_urls()): major_version = _extract_major_version(scala_version) _scala_maven_import_external( @@ -238,6 +246,8 @@ def _compile_scala( expect_java_output = False, scalac_jvm_flags = [], scalac = ctx.attr._scalac, + dependency_info = legacy_unclear_dependency_info_for_protobuf_scrooge(ctx), + unused_dependency_checker_ignored_targets = [], ) return JavaInfo(